body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
1e9ae4b5f1b6eaf582411a38253e12f215a45eb5cf331ef02933cc759768ffb5
@property def mcs(self): 'Get set of MCS.' return self._mcs
Get set of MCS.
empower/core/txpolicy.py
mcs
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@property def mcs(self): return self._mcs
@property def mcs(self): return self._mcs<|docstring|>Get set of MCS.<|endoftext|>
3ee64d9ccfd9274e0ad512527530bcdcc0999f0f750fc8e133b9b801fa44f688
@mcs.setter def mcs(self, mcs): 'Set the list of MCS.' self.set_mcs(mcs) self.block.wtp.connection.send_set_tx_policy(self)
Set the list of MCS.
empower/core/txpolicy.py
mcs
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@mcs.setter def mcs(self, mcs): self.set_mcs(mcs) self.block.wtp.connection.send_set_tx_policy(self)
@mcs.setter def mcs(self, mcs): self.set_mcs(mcs) self.block.wtp.connection.send_set_tx_policy(self)<|docstring|>Set the list of MCS.<|endoftext|>
3e13fefd9518e83c1e41ca668e099ef6c6b7dfe7c2d0227b99ccd2fc0e7a8aaf
def set_mcs(self, mcs): 'Set the list of MCS without sending anything.' self._mcs = (self.block.supports & set(mcs)) if (not self._mcs): self._mcs = self.block.supports
Set the list of MCS without sending anything.
empower/core/txpolicy.py
set_mcs
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
def set_mcs(self, mcs): self._mcs = (self.block.supports & set(mcs)) if (not self._mcs): self._mcs = self.block.supports
def set_mcs(self, mcs): self._mcs = (self.block.supports & set(mcs)) if (not self._mcs): self._mcs = self.block.supports<|docstring|>Set the list of MCS without sending anything.<|endoftext|>
6b49e0e0d0a8094172830434a81fa5c90bdb0d723ad13c892c2404734f4e8c48
@property def ht_mcs(self): 'Get set of HT MCS.' return self._ht_mcs
Get set of HT MCS.
empower/core/txpolicy.py
ht_mcs
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@property def ht_mcs(self): return self._ht_mcs
@property def ht_mcs(self): return self._ht_mcs<|docstring|>Get set of HT MCS.<|endoftext|>
455460de923adda548036d907c4da274dd33832d2c035081d5cc09cbd2857ff2
@ht_mcs.setter def ht_mcs(self, ht_mcs): 'Set the list of MCS.' self.set_ht_mcs(ht_mcs) self.block.wtp.connection.send_set_tx_policy(self)
Set the list of MCS.
empower/core/txpolicy.py
ht_mcs
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@ht_mcs.setter def ht_mcs(self, ht_mcs): self.set_ht_mcs(ht_mcs) self.block.wtp.connection.send_set_tx_policy(self)
@ht_mcs.setter def ht_mcs(self, ht_mcs): self.set_ht_mcs(ht_mcs) self.block.wtp.connection.send_set_tx_policy(self)<|docstring|>Set the list of MCS.<|endoftext|>
2e752018bd7a4fc2dec9ed5a5c1e357159e5a7e84143aa1119c27ffaae7e5048
def set_ht_mcs(self, ht_mcs): 'Set the list of HT MCS without sending anything.' self._ht_mcs = (self.block.ht_supports & set(ht_mcs)) if (not self._ht_mcs): self._ht_mcs = self.block.ht_supports
Set the list of HT MCS without sending anything.
empower/core/txpolicy.py
set_ht_mcs
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
def set_ht_mcs(self, ht_mcs): self._ht_mcs = (self.block.ht_supports & set(ht_mcs)) if (not self._ht_mcs): self._ht_mcs = self.block.ht_supports
def set_ht_mcs(self, ht_mcs): self._ht_mcs = (self.block.ht_supports & set(ht_mcs)) if (not self._ht_mcs): self._ht_mcs = self.block.ht_supports<|docstring|>Set the list of HT MCS without sending anything.<|endoftext|>
59dbe7df388b2824898fb1cd232e0c90e89ad78ccb19e0064ceeae26526f08bd
@property def no_ack(self): 'Get no ack flag.' return self._no_ack
Get no ack flag.
empower/core/txpolicy.py
no_ack
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@property def no_ack(self): return self._no_ack
@property def no_ack(self): return self._no_ack<|docstring|>Get no ack flag.<|endoftext|>
3d8b9c347fe44631bf317d0aa2978408b7b56ddf624213484e5ae8c94bcdd38b
@no_ack.setter def no_ack(self, no_ack): 'Set the no ack flag.' self.set_no_ack(no_ack) self.block.wtp.connection.send_set_tx_policy(self)
Set the no ack flag.
empower/core/txpolicy.py
no_ack
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@no_ack.setter def no_ack(self, no_ack): self.set_no_ack(no_ack) self.block.wtp.connection.send_set_tx_policy(self)
@no_ack.setter def no_ack(self, no_ack): self.set_no_ack(no_ack) self.block.wtp.connection.send_set_tx_policy(self)<|docstring|>Set the no ack flag.<|endoftext|>
39f181d4dbac2c907a21e9cb3e89ed623c7514126b56bac70dc3084e5c518964
def set_no_ack(self, no_ack): 'Set the no ack flag without sending anything.' self._no_ack = bool(no_ack)
Set the no ack flag without sending anything.
empower/core/txpolicy.py
set_no_ack
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
def set_no_ack(self, no_ack): self._no_ack = bool(no_ack)
def set_no_ack(self, no_ack): self._no_ack = bool(no_ack)<|docstring|>Set the no ack flag without sending anything.<|endoftext|>
ae77879e00511a221082867de36134aa3f0f8520ebe23e577b6d13580d83a269
@property def rts_cts(self): 'Get rts_cts.' return self._rts_cts
Get rts_cts.
empower/core/txpolicy.py
rts_cts
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@property def rts_cts(self): return self._rts_cts
@property def rts_cts(self): return self._rts_cts<|docstring|>Get rts_cts.<|endoftext|>
47560d6139e2113f5e08bda41bcde04ccf05909b3f5394d44169c163ee9b4bd0
@rts_cts.setter def rts_cts(self, rts_cts): 'Set rts_cts.' self.set_rts_cts(rts_cts) self.block.wtp.connection.send_set_tx_policy(self)
Set rts_cts.
empower/core/txpolicy.py
rts_cts
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@rts_cts.setter def rts_cts(self, rts_cts): self.set_rts_cts(rts_cts) self.block.wtp.connection.send_set_tx_policy(self)
@rts_cts.setter def rts_cts(self, rts_cts): self.set_rts_cts(rts_cts) self.block.wtp.connection.send_set_tx_policy(self)<|docstring|>Set rts_cts.<|endoftext|>
0de9a8943696bb9ddda82a452f7af7c85551db1bd1006708a1c6cc41583de2b5
def set_rts_cts(self, rts_cts): 'Set rts_cts without sending anything.' self._rts_cts = int(rts_cts)
Set rts_cts without sending anything.
empower/core/txpolicy.py
set_rts_cts
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
def set_rts_cts(self, rts_cts): self._rts_cts = int(rts_cts)
def set_rts_cts(self, rts_cts): self._rts_cts = int(rts_cts)<|docstring|>Set rts_cts without sending anything.<|endoftext|>
f03f13071ed31c1c9555c6ebb7c5681c6c29d5543d172a449dde95b5ba43bcf8
@property def max_amsdu_len(self): 'Get max_amsdu_len.' return self._max_amsdu_len
Get max_amsdu_len.
empower/core/txpolicy.py
max_amsdu_len
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@property def max_amsdu_len(self): return self._max_amsdu_len
@property def max_amsdu_len(self): return self._max_amsdu_len<|docstring|>Get max_amsdu_len.<|endoftext|>
bb470a7713121eca7c3b317ef11cc58e273dce7c0238c0a475ca16bf1deab2a4
@max_amsdu_len.setter def max_amsdu_len(self, max_amsdu_len): 'Set max_amsdu_len.' self.set_max_amsdu_len(max_amsdu_len) self.block.wtp.connection.send_set_tx_policy(self)
Set max_amsdu_len.
empower/core/txpolicy.py
max_amsdu_len
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
@max_amsdu_len.setter def max_amsdu_len(self, max_amsdu_len): self.set_max_amsdu_len(max_amsdu_len) self.block.wtp.connection.send_set_tx_policy(self)
@max_amsdu_len.setter def max_amsdu_len(self, max_amsdu_len): self.set_max_amsdu_len(max_amsdu_len) self.block.wtp.connection.send_set_tx_policy(self)<|docstring|>Set max_amsdu_len.<|endoftext|>
223afe5ca547203c4638538790d180ad3f02492fe8ff6ec818fdffd83fdba58f
def set_max_amsdu_len(self, max_amsdu_len): 'Set max_amsdu_len without sending anything.' self._max_amsdu_len = int(max_amsdu_len)
Set max_amsdu_len without sending anything.
empower/core/txpolicy.py
set_max_amsdu_len
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
def set_max_amsdu_len(self, max_amsdu_len): self._max_amsdu_len = int(max_amsdu_len)
def set_max_amsdu_len(self, max_amsdu_len): self._max_amsdu_len = int(max_amsdu_len)<|docstring|>Set max_amsdu_len without sending anything.<|endoftext|>
354bb9283b359e05fa7e634ac68809524ef4339770942f3ec5a9e1d4c33dccd7
def to_str(self): 'Return an ASCII representation of the object.' mcs = ', '.join([str(x) for x in self.mcs]) ht_mcs = ', '.join([str(x) for x in self.ht_mcs]) if (self.block.band == BT_HT20): state = ('%s no_ack %s rts_cts %u max_amsdu %u mcast %s ur_count %u ht_mcs %s' % (self.addr, self.no_ack, self.rts_cts, self.max_amsdu_len, TX_MCAST[self.mcast], self.ur_count, ht_mcs)) else: state = ('%s no_ack %s rts_cts %u max_amsdu %u mcast %s ur_count %u mcs %s' % (self.addr, self.no_ack, self.rts_cts, self.max_amsdu_len, TX_MCAST[self.mcast], self.ur_count, mcs)) return state
Return an ASCII representation of the object.
empower/core/txpolicy.py
to_str
EstefaniaCC/empower-runtime-5g-essence-multicast
0
python
def to_str(self): mcs = ', '.join([str(x) for x in self.mcs]) ht_mcs = ', '.join([str(x) for x in self.ht_mcs]) if (self.block.band == BT_HT20): state = ('%s no_ack %s rts_cts %u max_amsdu %u mcast %s ur_count %u ht_mcs %s' % (self.addr, self.no_ack, self.rts_cts, self.max_amsdu_len, TX_MCAST[self.mcast], self.ur_count, ht_mcs)) else: state = ('%s no_ack %s rts_cts %u max_amsdu %u mcast %s ur_count %u mcs %s' % (self.addr, self.no_ack, self.rts_cts, self.max_amsdu_len, TX_MCAST[self.mcast], self.ur_count, mcs)) return state
def to_str(self): mcs = ', '.join([str(x) for x in self.mcs]) ht_mcs = ', '.join([str(x) for x in self.ht_mcs]) if (self.block.band == BT_HT20): state = ('%s no_ack %s rts_cts %u max_amsdu %u mcast %s ur_count %u ht_mcs %s' % (self.addr, self.no_ack, self.rts_cts, self.max_amsdu_len, TX_MCAST[self.mcast], self.ur_count, ht_mcs)) else: state = ('%s no_ack %s rts_cts %u max_amsdu %u mcast %s ur_count %u mcs %s' % (self.addr, self.no_ack, self.rts_cts, self.max_amsdu_len, TX_MCAST[self.mcast], self.ur_count, mcs)) return state<|docstring|>Return an ASCII representation of the object.<|endoftext|>
61fd8de805457ee04133dafc86e73bbb35231d785d289f4b769f1ed6a367e76f
@route('/', methods=['POST']) @requires_permissions('write') def new(self): 'Create a new Observable\n\n Create a new Observable from the JSON object passed in the ``POST`` data.\n\n :<json object params: JSON object containing fields to set\n :<json boolean refang: If set, the observable will be refanged before being added to the database\n ' params = request.json if ('id' in params): obs = self.objectmanager.objects.get(id=params.pop('id')) else: forced_type = params.pop('force_type', None) try: if params.pop('refang', None): obs = self.objectmanager.add_text(refang(params.pop('value')), force_type=forced_type) else: obs = self.objectmanager.add_text(params.pop('value'), force_type=forced_type) except (GenericYetiError, ObservableValidationError) as e: logging.error(e) abort(400) return render(self._modify_observable(obs, params))
Create a new Observable Create a new Observable from the JSON object passed in the ``POST`` data. :<json object params: JSON object containing fields to set :<json boolean refang: If set, the observable will be refanged before being added to the database
core/web/api/observable.py
new
0xRet/yeti
1,250
python
@route('/', methods=['POST']) @requires_permissions('write') def new(self): 'Create a new Observable\n\n Create a new Observable from the JSON object passed in the ``POST`` data.\n\n :<json object params: JSON object containing fields to set\n :<json boolean refang: If set, the observable will be refanged before being added to the database\n ' params = request.json if ('id' in params): obs = self.objectmanager.objects.get(id=params.pop('id')) else: forced_type = params.pop('force_type', None) try: if params.pop('refang', None): obs = self.objectmanager.add_text(refang(params.pop('value')), force_type=forced_type) else: obs = self.objectmanager.add_text(params.pop('value'), force_type=forced_type) except (GenericYetiError, ObservableValidationError) as e: logging.error(e) abort(400) return render(self._modify_observable(obs, params))
@route('/', methods=['POST']) @requires_permissions('write') def new(self): 'Create a new Observable\n\n Create a new Observable from the JSON object passed in the ``POST`` data.\n\n :<json object params: JSON object containing fields to set\n :<json boolean refang: If set, the observable will be refanged before being added to the database\n ' params = request.json if ('id' in params): obs = self.objectmanager.objects.get(id=params.pop('id')) else: forced_type = params.pop('force_type', None) try: if params.pop('refang', None): obs = self.objectmanager.add_text(refang(params.pop('value')), force_type=forced_type) else: obs = self.objectmanager.add_text(params.pop('value'), force_type=forced_type) except (GenericYetiError, ObservableValidationError) as e: logging.error(e) abort(400) return render(self._modify_observable(obs, params))<|docstring|>Create a new Observable Create a new Observable from the JSON object passed in the ``POST`` data. :<json object params: JSON object containing fields to set :<json boolean refang: If set, the observable will be refanged before being added to the database<|endoftext|>
1d21fac1a2dc3a54251cb8f08c564742c91474d549ba44721efb1efdf338f7c0
@route('/bulk', methods=['POST']) @requires_permissions('write') def bulk(self): 'Bulk-add observables\n\n Bulk-add Observables from an array of strings.\n\n :<json [{string: observable, tags: [string]}] observables: Array of Strings representing observables (URLs, IPs, hostnames, etc.)\n :<json boolean refang: If set, the observables will be refanged before being added to the database\n ' added = [] params = request.json bulk = params.pop('observables', []) _refang = params.pop('refang', False) for item in bulk: value = item['value'] tags = item.get('tags', []) forced_type = item.get('force_type', None) if _refang: obs = self.objectmanager.add_text(refang(value), tags=tags, force_type=forced_type) else: obs = self.objectmanager.add_text(value, tags=tags, force_type=forced_type) added.append(self._modify_observable(obs, {'source': item.get('source'), 'context': item.get('context')})) return render(added)
Bulk-add observables Bulk-add Observables from an array of strings. :<json [{string: observable, tags: [string]}] observables: Array of Strings representing observables (URLs, IPs, hostnames, etc.) :<json boolean refang: If set, the observables will be refanged before being added to the database
core/web/api/observable.py
bulk
0xRet/yeti
1,250
python
@route('/bulk', methods=['POST']) @requires_permissions('write') def bulk(self): 'Bulk-add observables\n\n Bulk-add Observables from an array of strings.\n\n :<json [{string: observable, tags: [string]}] observables: Array of Strings representing observables (URLs, IPs, hostnames, etc.)\n :<json boolean refang: If set, the observables will be refanged before being added to the database\n ' added = [] params = request.json bulk = params.pop('observables', []) _refang = params.pop('refang', False) for item in bulk: value = item['value'] tags = item.get('tags', []) forced_type = item.get('force_type', None) if _refang: obs = self.objectmanager.add_text(refang(value), tags=tags, force_type=forced_type) else: obs = self.objectmanager.add_text(value, tags=tags, force_type=forced_type) added.append(self._modify_observable(obs, {'source': item.get('source'), 'context': item.get('context')})) return render(added)
@route('/bulk', methods=['POST']) @requires_permissions('write') def bulk(self): 'Bulk-add observables\n\n Bulk-add Observables from an array of strings.\n\n :<json [{string: observable, tags: [string]}] observables: Array of Strings representing observables (URLs, IPs, hostnames, etc.)\n :<json boolean refang: If set, the observables will be refanged before being added to the database\n ' added = [] params = request.json bulk = params.pop('observables', []) _refang = params.pop('refang', False) for item in bulk: value = item['value'] tags = item.get('tags', []) forced_type = item.get('force_type', None) if _refang: obs = self.objectmanager.add_text(refang(value), tags=tags, force_type=forced_type) else: obs = self.objectmanager.add_text(value, tags=tags, force_type=forced_type) added.append(self._modify_observable(obs, {'source': item.get('source'), 'context': item.get('context')})) return render(added)<|docstring|>Bulk-add observables Bulk-add Observables from an array of strings. :<json [{string: observable, tags: [string]}] observables: Array of Strings representing observables (URLs, IPs, hostnames, etc.) :<json boolean refang: If set, the observables will be refanged before being added to the database<|endoftext|>
6ad11c81a849f0b9b1431f149d958a2a49d4ad9131403d64775aad0089796a54
@route('/<id>/context', methods=['POST']) @requires_permissions('read') def context(self, id): 'Add context to an observable\n\n :<json object context: Context JSON to be added. Must include a ``source`` key.\n :<json string old_source: String defining the source to be replaced.\n :>json object: The context object that was actually added\n ' observable = get_object_or_404(self.objectmanager, id=id) context = request.json.pop('context', {}) old_source = request.json.pop('old_source', None) observable.add_context(context, replace_source=old_source) return render(context)
Add context to an observable :<json object context: Context JSON to be added. Must include a ``source`` key. :<json string old_source: String defining the source to be replaced. :>json object: The context object that was actually added
core/web/api/observable.py
context
0xRet/yeti
1,250
python
@route('/<id>/context', methods=['POST']) @requires_permissions('read') def context(self, id): 'Add context to an observable\n\n :<json object context: Context JSON to be added. Must include a ``source`` key.\n :<json string old_source: String defining the source to be replaced.\n :>json object: The context object that was actually added\n ' observable = get_object_or_404(self.objectmanager, id=id) context = request.json.pop('context', {}) old_source = request.json.pop('old_source', None) observable.add_context(context, replace_source=old_source) return render(context)
@route('/<id>/context', methods=['POST']) @requires_permissions('read') def context(self, id): 'Add context to an observable\n\n :<json object context: Context JSON to be added. Must include a ``source`` key.\n :<json string old_source: String defining the source to be replaced.\n :>json object: The context object that was actually added\n ' observable = get_object_or_404(self.objectmanager, id=id) context = request.json.pop('context', {}) old_source = request.json.pop('old_source', None) observable.add_context(context, replace_source=old_source) return render(context)<|docstring|>Add context to an observable :<json object context: Context JSON to be added. Must include a ``source`` key. :<json string old_source: String defining the source to be replaced. :>json object: The context object that was actually added<|endoftext|>
823dd4d4ab9b935cabbe7194b14ebaa9aef43d62bb31e787b774f8d961f005c7
@route('/<id>/context', methods=['DELETE']) @requires_permissions('write') def remove_context(self, id): 'Removes context from an observable\n\n :<json object context: Context JSON to be added. Must include a ``source`` key.\n :>json object: The context object that was actually delete\n ' observable = get_object_or_404(self.objectmanager, id=id) context = request.json.pop('context', {}) observable.remove_context(context) return render(context)
Removes context from an observable :<json object context: Context JSON to be added. Must include a ``source`` key. :>json object: The context object that was actually delete
core/web/api/observable.py
remove_context
0xRet/yeti
1,250
python
@route('/<id>/context', methods=['DELETE']) @requires_permissions('write') def remove_context(self, id): 'Removes context from an observable\n\n :<json object context: Context JSON to be added. Must include a ``source`` key.\n :>json object: The context object that was actually delete\n ' observable = get_object_or_404(self.objectmanager, id=id) context = request.json.pop('context', {}) observable.remove_context(context) return render(context)
@route('/<id>/context', methods=['DELETE']) @requires_permissions('write') def remove_context(self, id): 'Removes context from an observable\n\n :<json object context: Context JSON to be added. Must include a ``source`` key.\n :>json object: The context object that was actually delete\n ' observable = get_object_or_404(self.objectmanager, id=id) context = request.json.pop('context', {}) observable.remove_context(context) return render(context)<|docstring|>Removes context from an observable :<json object context: Context JSON to be added. Must include a ``source`` key. :>json object: The context object that was actually delete<|endoftext|>
7dc7d50b3dc14603063f5bdec968c7d60938555eebda277cc4eee8268287ac4d
def __init__(self, channel): 'Constructor.\n\n Args:\n channel: A grpc.Channel.\n ' self.GetAdGroup = channel.unary_unary('/google.ads.googleads.v5.services.AdGroupService/GetAdGroup', request_serializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.GetAdGroupRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_ad__group__pb2.AdGroup.FromString) self.MutateAdGroups = channel.unary_unary('/google.ads.googleads.v5.services.AdGroupService/MutateAdGroups', request_serializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.MutateAdGroupsRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.MutateAdGroupsResponse.FromString)
Constructor. Args: channel: A grpc.Channel.
google/ads/google_ads/v5/proto/services/ad_group_service_pb2_grpc.py
__init__
arammaliachi/google-ads-python
1
python
def __init__(self, channel): 'Constructor.\n\n Args:\n channel: A grpc.Channel.\n ' self.GetAdGroup = channel.unary_unary('/google.ads.googleads.v5.services.AdGroupService/GetAdGroup', request_serializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.GetAdGroupRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_ad__group__pb2.AdGroup.FromString) self.MutateAdGroups = channel.unary_unary('/google.ads.googleads.v5.services.AdGroupService/MutateAdGroups', request_serializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.MutateAdGroupsRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.MutateAdGroupsResponse.FromString)
def __init__(self, channel): 'Constructor.\n\n Args:\n channel: A grpc.Channel.\n ' self.GetAdGroup = channel.unary_unary('/google.ads.googleads.v5.services.AdGroupService/GetAdGroup', request_serializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.GetAdGroupRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_ad__group__pb2.AdGroup.FromString) self.MutateAdGroups = channel.unary_unary('/google.ads.googleads.v5.services.AdGroupService/MutateAdGroups', request_serializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.MutateAdGroupsRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v5_dot_proto_dot_services_dot_ad__group__service__pb2.MutateAdGroupsResponse.FromString)<|docstring|>Constructor. Args: channel: A grpc.Channel.<|endoftext|>
0f8a50908fa5acdd45bb858c48a791f268a800dc7fac26fb90a7345f4a13675f
def GetAdGroup(self, request, context): 'Returns the requested ad group in full detail.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Returns the requested ad group in full detail.
google/ads/google_ads/v5/proto/services/ad_group_service_pb2_grpc.py
GetAdGroup
arammaliachi/google-ads-python
1
python
def GetAdGroup(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def GetAdGroup(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Returns the requested ad group in full detail.<|endoftext|>
8c8799d0fb9ce973ee7cdd89e6f2981ceee14a910f97c576eebaa13dc2f21a4d
def MutateAdGroups(self, request, context): 'Creates, updates, or removes ad groups. Operation statuses are returned.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Creates, updates, or removes ad groups. Operation statuses are returned.
google/ads/google_ads/v5/proto/services/ad_group_service_pb2_grpc.py
MutateAdGroups
arammaliachi/google-ads-python
1
python
def MutateAdGroups(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def MutateAdGroups(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Creates, updates, or removes ad groups. Operation statuses are returned.<|endoftext|>
c505ee286890de3bb49e8142e65e5613a561ffcdcae0e93aed18f892450daa0f
def segment(sentence, cut_type='word', pos=False): "\n 切词\n :param sentence:\n :param cut_type: 'word' use jieba.lcut; 'char' use list(sentence)\n :param pos: enable POS\n :return: list\n " if pos: if (cut_type == 'word'): word_pos_seq = posseg.lcut(sentence) (word_seq, pos_seq) = ([], []) for (w, p) in word_pos_seq: word_seq.append(w) pos_seq.append(p) return (word_seq, pos_seq) elif (cut_type == 'char'): word_seq = list(sentence) pos_seq = [] for w in word_seq: w_p = posseg.lcut(w) pos_seq.append(w_p[0].flag) return (word_seq, pos_seq) elif (cut_type == 'word'): return jieba.lcut(sentence) elif (cut_type == 'char'): return list(sentence)
切词 :param sentence: :param cut_type: 'word' use jieba.lcut; 'char' use list(sentence) :param pos: enable POS :return: list
utils/tokenizer.py
segment
rogue809/TextSummary_AutoMaster
3
python
def segment(sentence, cut_type='word', pos=False): "\n 切词\n :param sentence:\n :param cut_type: 'word' use jieba.lcut; 'char' use list(sentence)\n :param pos: enable POS\n :return: list\n " if pos: if (cut_type == 'word'): word_pos_seq = posseg.lcut(sentence) (word_seq, pos_seq) = ([], []) for (w, p) in word_pos_seq: word_seq.append(w) pos_seq.append(p) return (word_seq, pos_seq) elif (cut_type == 'char'): word_seq = list(sentence) pos_seq = [] for w in word_seq: w_p = posseg.lcut(w) pos_seq.append(w_p[0].flag) return (word_seq, pos_seq) elif (cut_type == 'word'): return jieba.lcut(sentence) elif (cut_type == 'char'): return list(sentence)
def segment(sentence, cut_type='word', pos=False): "\n 切词\n :param sentence:\n :param cut_type: 'word' use jieba.lcut; 'char' use list(sentence)\n :param pos: enable POS\n :return: list\n " if pos: if (cut_type == 'word'): word_pos_seq = posseg.lcut(sentence) (word_seq, pos_seq) = ([], []) for (w, p) in word_pos_seq: word_seq.append(w) pos_seq.append(p) return (word_seq, pos_seq) elif (cut_type == 'char'): word_seq = list(sentence) pos_seq = [] for w in word_seq: w_p = posseg.lcut(w) pos_seq.append(w_p[0].flag) return (word_seq, pos_seq) elif (cut_type == 'word'): return jieba.lcut(sentence) elif (cut_type == 'char'): return list(sentence)<|docstring|>切词 :param sentence: :param cut_type: 'word' use jieba.lcut; 'char' use list(sentence) :param pos: enable POS :return: list<|endoftext|>
c06f2ba415f2f82ee2879633949276f5d817281e0a29167ed1fb065cdbf555ff
def read_settings(): 'Read application settings.\n\n Returns\n -------\n settings : dict\n The restored settings values are returned in a dictionary for further\n processing.\n ' settings = QSettings() recent = settings.value('recent') if (not recent): recent = [] toolbar = settings.value('toolbar') if (toolbar is None): toolbar = True statusbar = settings.value('statusbar') if (statusbar is None): statusbar = True geometry = settings.value('geometry') state = settings.value('state') return {'recent': recent, 'statusbar': statusbar, 'geometry': geometry, 'state': state, 'toolbar': toolbar}
Read application settings. Returns ------- settings : dict The restored settings values are returned in a dictionary for further processing.
mnelab/mainwindow.py
read_settings
stralu/mnelab
1
python
def read_settings(): 'Read application settings.\n\n Returns\n -------\n settings : dict\n The restored settings values are returned in a dictionary for further\n processing.\n ' settings = QSettings() recent = settings.value('recent') if (not recent): recent = [] toolbar = settings.value('toolbar') if (toolbar is None): toolbar = True statusbar = settings.value('statusbar') if (statusbar is None): statusbar = True geometry = settings.value('geometry') state = settings.value('state') return {'recent': recent, 'statusbar': statusbar, 'geometry': geometry, 'state': state, 'toolbar': toolbar}
def read_settings(): 'Read application settings.\n\n Returns\n -------\n settings : dict\n The restored settings values are returned in a dictionary for further\n processing.\n ' settings = QSettings() recent = settings.value('recent') if (not recent): recent = [] toolbar = settings.value('toolbar') if (toolbar is None): toolbar = True statusbar = settings.value('statusbar') if (statusbar is None): statusbar = True geometry = settings.value('geometry') state = settings.value('state') return {'recent': recent, 'statusbar': statusbar, 'geometry': geometry, 'state': state, 'toolbar': toolbar}<|docstring|>Read application settings. Returns ------- settings : dict The restored settings values are returned in a dictionary for further processing.<|endoftext|>
f5a429ca14d5050aeda29fa37fb75e31db0e58e62dd12466480313ee825e2206
def write_settings(**kwargs): 'Write application settings.' settings = QSettings() for (key, value) in kwargs.items(): settings.setValue(key, value)
Write application settings.
mnelab/mainwindow.py
write_settings
stralu/mnelab
1
python
def write_settings(**kwargs): settings = QSettings() for (key, value) in kwargs.items(): settings.setValue(key, value)
def write_settings(**kwargs): settings = QSettings() for (key, value) in kwargs.items(): settings.setValue(key, value)<|docstring|>Write application settings.<|endoftext|>
539669d04987866dd1e8f865d97252ef83c97755f1651792dc406e71f2b0faee
def __init__(self, model): 'Initialize MNELAB main window.\n\n Parameters\n ----------\n model : mnelab.model.Model instance\n The main window needs to connect to a model containing all data\n sets. This decouples the GUI from the data (model/view).\n ' super().__init__() self.model = model self.setWindowTitle('MNELAB') settings = read_settings() self.recent = settings['recent'] if settings['geometry']: self.restoreGeometry(settings['geometry']) else: self.setGeometry(300, 300, 1000, 750) self.move((QApplication.screens()[0].geometry().center() - self.rect().center())) if settings['state']: self.restoreState(settings['state']) self.actions = {} file_menu = self.menuBar().addMenu('&File') icon = QIcon(image_path('open_file.svg')) self.actions['open_file'] = file_menu.addAction(icon, '&Open...', self.open_data, QKeySequence.Open) self.recent_menu = file_menu.addMenu('Open recent') self.recent_menu.aboutToShow.connect(self._update_recent_menu) self.recent_menu.triggered.connect(self._load_recent) if (not self.recent): self.recent_menu.setEnabled(False) self.actions['close_file'] = file_menu.addAction('&Close', self.model.remove_data, QKeySequence.Close) self.actions['close_all'] = file_menu.addAction('Close all', self.close_all) file_menu.addSeparator() icon = QIcon(image_path('meta_info.svg')) self.actions['meta_info'] = file_menu.addAction(icon, 'Show information...', self.meta_info) file_menu.addSeparator() self.actions['import_bads'] = file_menu.addAction('Import bad channels...', (lambda : self.import_file(model.import_bads, 'Import bad channels', '*.csv'))) self.actions['import_events'] = file_menu.addAction('Import events...', (lambda : self.import_file(model.import_events, 'Import events', '*.csv'))) self.actions['import_annotations'] = file_menu.addAction('Import annotations...', (lambda : self.import_file(model.import_annotations, 'Import annotations', '*.csv'))) self.actions['import_ica'] = file_menu.addAction('Import &ICA...', (lambda : self.open_file(model.import_ica, 'Import ICA', '*.fif *.fif.gz'))) file_menu.addSeparator() self.export_menu = file_menu.addMenu('Export data') for (name, ext) in EXPORT_FORMATS.items(): self.actions[('export_data_' + ext)] = self.export_menu.addAction(f'{name} ({ext[1:].upper()})...', partial(self.export_file, model.export_data, 'Export data', ext)) self.actions['export_bads'] = file_menu.addAction('Export &bad channels...', (lambda : self.export_file(model.export_bads, 'Export bad channels', '*.csv'))) self.actions['export_events'] = file_menu.addAction('Export &events...', (lambda : self.export_file(model.export_events, 'Export events', '*.csv'))) self.actions['export_annotations'] = file_menu.addAction('Export &annotations...', (lambda : self.export_file(model.export_annotations, 'Export annotations', '*.csv'))) self.actions['export_ica'] = file_menu.addAction('Export ICA...', (lambda : self.export_file(model.export_ica, 'Export ICA', '*.fif *.fif.gz'))) file_menu.addSeparator() self.actions['quit'] = file_menu.addAction('&Quit', self.close, QKeySequence.Quit) edit_menu = self.menuBar().addMenu('&Edit') self.actions['pick_chans'] = edit_menu.addAction('P&ick channels...', self.pick_channels) icon = QIcon(image_path('chan_props.svg')) self.actions['chan_props'] = edit_menu.addAction(icon, 'Channel &properties...', self.channel_properties) self.actions['set_montage'] = edit_menu.addAction('Set &montage...', self.set_montage) edit_menu.addSeparator() self.actions['set_ref'] = edit_menu.addAction('Set &reference...', self.set_reference) edit_menu.addSeparator() self.actions['annotations'] = edit_menu.addAction('&Annotations...', self.edit_annotations) self.actions['events'] = edit_menu.addAction('&Events...', self.edit_events) edit_menu.addSeparator() self.actions['crop'] = edit_menu.addAction('&Crop data...', self.crop) self.actions['append_data'] = edit_menu.addAction('Appen&d data...', self.append_data) plot_menu = self.menuBar().addMenu('&Plot') icon = QIcon(image_path('plot_data.svg')) self.actions['plot_data'] = plot_menu.addAction(icon, '&Data...', self.plot_data) icon = QIcon(image_path('plot_psd.svg')) self.actions['plot_psd'] = plot_menu.addAction(icon, '&Power spectral density...', self.plot_psd) icon = QIcon(image_path('plot_montage.svg')) self.actions['plot_montage'] = plot_menu.addAction(icon, '&Montage...', self.plot_montage) plot_menu.addSeparator() self.actions['plot_ica_components'] = plot_menu.addAction('ICA &components...', self.plot_ica_components) self.actions['plot_ica_sources'] = plot_menu.addAction('ICA &sources...', self.plot_ica_sources) tools_menu = self.menuBar().addMenu('&Tools') icon = QIcon(image_path('filter.svg')) self.actions['filter'] = tools_menu.addAction(icon, '&Filter data...', self.filter_data) icon = QIcon(image_path('find_events.svg')) self.actions['find_events'] = tools_menu.addAction(icon, 'Find &events...', self.find_events) self.actions['events_from_annotations'] = tools_menu.addAction('Create events from annotations', self.events_from_annotations) tools_menu.addSeparator() icon = QIcon(image_path('run_ica.svg')) self.actions['run_ica'] = tools_menu.addAction(icon, 'Run &ICA...', self.run_ica) self.actions['apply_ica'] = tools_menu.addAction('Apply &ICA', self.apply_ica) tools_menu.addSeparator() self.actions['interpolate_bads'] = tools_menu.addAction('Interpolate bad channels...', self.interpolate_bads) tools_menu.addSeparator() icon = QIcon(image_path('epoch_data.svg')) self.actions['epoch_data'] = tools_menu.addAction(icon, 'Create Epochs...', self.epoch_data) view_menu = self.menuBar().addMenu('&View') self.actions['history'] = view_menu.addAction('&History...', self.show_history) self.actions['toolbar'] = view_menu.addAction('&Toolbar', self._toggle_toolbar) self.actions['toolbar'].setCheckable(True) self.actions['statusbar'] = view_menu.addAction('&Statusbar', self._toggle_statusbar) self.actions['statusbar'].setCheckable(True) help_menu = self.menuBar().addMenu('&Help') self.actions['about'] = help_menu.addAction('&About', self.show_about) self.actions['about_qt'] = help_menu.addAction('About &Qt', self.show_about_qt) self.always_enabled = ['open_file', 'about', 'about_qt', 'quit', 'toolbar', 'statusbar'] self.toolbar = self.addToolBar('toolbar') self.toolbar.setObjectName('toolbar') self.toolbar.addAction(self.actions['open_file']) self.toolbar.addAction(self.actions['meta_info']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['chan_props']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['plot_data']) self.toolbar.addAction(self.actions['plot_psd']) self.toolbar.addAction(self.actions['plot_montage']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['filter']) self.toolbar.addAction(self.actions['find_events']) self.toolbar.addAction(self.actions['epoch_data']) self.toolbar.addAction(self.actions['run_ica']) self.setUnifiedTitleAndToolBarOnMac(True) if settings['toolbar']: self.toolbar.show() self.actions['toolbar'].setChecked(True) else: self.toolbar.hide() self.actions['toolbar'].setChecked(False) self.names = QStringListModel() self.names.dataChanged.connect(self._update_names) splitter = QSplitter() self.sidebar = QListView() self.sidebar.setFrameStyle(QFrame.NoFrame) self.sidebar.setFocusPolicy(Qt.NoFocus) self.sidebar.setModel(self.names) self.sidebar.clicked.connect(self._update_data) splitter.addWidget(self.sidebar) self.infowidget = InfoWidget() splitter.addWidget(self.infowidget) width = splitter.size().width() splitter.setSizes((int((width * 0.3)), int((width * 0.7)))) self.setCentralWidget(splitter) self.status_label = QLabel() self.statusBar().addPermanentWidget(self.status_label) if settings['statusbar']: self.statusBar().show() self.actions['statusbar'].setChecked(True) else: self.statusBar().hide() self.actions['statusbar'].setChecked(False) self.setAcceptDrops(True) self.data_changed()
Initialize MNELAB main window. Parameters ---------- model : mnelab.model.Model instance The main window needs to connect to a model containing all data sets. This decouples the GUI from the data (model/view).
mnelab/mainwindow.py
__init__
stralu/mnelab
1
python
def __init__(self, model): 'Initialize MNELAB main window.\n\n Parameters\n ----------\n model : mnelab.model.Model instance\n The main window needs to connect to a model containing all data\n sets. This decouples the GUI from the data (model/view).\n ' super().__init__() self.model = model self.setWindowTitle('MNELAB') settings = read_settings() self.recent = settings['recent'] if settings['geometry']: self.restoreGeometry(settings['geometry']) else: self.setGeometry(300, 300, 1000, 750) self.move((QApplication.screens()[0].geometry().center() - self.rect().center())) if settings['state']: self.restoreState(settings['state']) self.actions = {} file_menu = self.menuBar().addMenu('&File') icon = QIcon(image_path('open_file.svg')) self.actions['open_file'] = file_menu.addAction(icon, '&Open...', self.open_data, QKeySequence.Open) self.recent_menu = file_menu.addMenu('Open recent') self.recent_menu.aboutToShow.connect(self._update_recent_menu) self.recent_menu.triggered.connect(self._load_recent) if (not self.recent): self.recent_menu.setEnabled(False) self.actions['close_file'] = file_menu.addAction('&Close', self.model.remove_data, QKeySequence.Close) self.actions['close_all'] = file_menu.addAction('Close all', self.close_all) file_menu.addSeparator() icon = QIcon(image_path('meta_info.svg')) self.actions['meta_info'] = file_menu.addAction(icon, 'Show information...', self.meta_info) file_menu.addSeparator() self.actions['import_bads'] = file_menu.addAction('Import bad channels...', (lambda : self.import_file(model.import_bads, 'Import bad channels', '*.csv'))) self.actions['import_events'] = file_menu.addAction('Import events...', (lambda : self.import_file(model.import_events, 'Import events', '*.csv'))) self.actions['import_annotations'] = file_menu.addAction('Import annotations...', (lambda : self.import_file(model.import_annotations, 'Import annotations', '*.csv'))) self.actions['import_ica'] = file_menu.addAction('Import &ICA...', (lambda : self.open_file(model.import_ica, 'Import ICA', '*.fif *.fif.gz'))) file_menu.addSeparator() self.export_menu = file_menu.addMenu('Export data') for (name, ext) in EXPORT_FORMATS.items(): self.actions[('export_data_' + ext)] = self.export_menu.addAction(f'{name} ({ext[1:].upper()})...', partial(self.export_file, model.export_data, 'Export data', ext)) self.actions['export_bads'] = file_menu.addAction('Export &bad channels...', (lambda : self.export_file(model.export_bads, 'Export bad channels', '*.csv'))) self.actions['export_events'] = file_menu.addAction('Export &events...', (lambda : self.export_file(model.export_events, 'Export events', '*.csv'))) self.actions['export_annotations'] = file_menu.addAction('Export &annotations...', (lambda : self.export_file(model.export_annotations, 'Export annotations', '*.csv'))) self.actions['export_ica'] = file_menu.addAction('Export ICA...', (lambda : self.export_file(model.export_ica, 'Export ICA', '*.fif *.fif.gz'))) file_menu.addSeparator() self.actions['quit'] = file_menu.addAction('&Quit', self.close, QKeySequence.Quit) edit_menu = self.menuBar().addMenu('&Edit') self.actions['pick_chans'] = edit_menu.addAction('P&ick channels...', self.pick_channels) icon = QIcon(image_path('chan_props.svg')) self.actions['chan_props'] = edit_menu.addAction(icon, 'Channel &properties...', self.channel_properties) self.actions['set_montage'] = edit_menu.addAction('Set &montage...', self.set_montage) edit_menu.addSeparator() self.actions['set_ref'] = edit_menu.addAction('Set &reference...', self.set_reference) edit_menu.addSeparator() self.actions['annotations'] = edit_menu.addAction('&Annotations...', self.edit_annotations) self.actions['events'] = edit_menu.addAction('&Events...', self.edit_events) edit_menu.addSeparator() self.actions['crop'] = edit_menu.addAction('&Crop data...', self.crop) self.actions['append_data'] = edit_menu.addAction('Appen&d data...', self.append_data) plot_menu = self.menuBar().addMenu('&Plot') icon = QIcon(image_path('plot_data.svg')) self.actions['plot_data'] = plot_menu.addAction(icon, '&Data...', self.plot_data) icon = QIcon(image_path('plot_psd.svg')) self.actions['plot_psd'] = plot_menu.addAction(icon, '&Power spectral density...', self.plot_psd) icon = QIcon(image_path('plot_montage.svg')) self.actions['plot_montage'] = plot_menu.addAction(icon, '&Montage...', self.plot_montage) plot_menu.addSeparator() self.actions['plot_ica_components'] = plot_menu.addAction('ICA &components...', self.plot_ica_components) self.actions['plot_ica_sources'] = plot_menu.addAction('ICA &sources...', self.plot_ica_sources) tools_menu = self.menuBar().addMenu('&Tools') icon = QIcon(image_path('filter.svg')) self.actions['filter'] = tools_menu.addAction(icon, '&Filter data...', self.filter_data) icon = QIcon(image_path('find_events.svg')) self.actions['find_events'] = tools_menu.addAction(icon, 'Find &events...', self.find_events) self.actions['events_from_annotations'] = tools_menu.addAction('Create events from annotations', self.events_from_annotations) tools_menu.addSeparator() icon = QIcon(image_path('run_ica.svg')) self.actions['run_ica'] = tools_menu.addAction(icon, 'Run &ICA...', self.run_ica) self.actions['apply_ica'] = tools_menu.addAction('Apply &ICA', self.apply_ica) tools_menu.addSeparator() self.actions['interpolate_bads'] = tools_menu.addAction('Interpolate bad channels...', self.interpolate_bads) tools_menu.addSeparator() icon = QIcon(image_path('epoch_data.svg')) self.actions['epoch_data'] = tools_menu.addAction(icon, 'Create Epochs...', self.epoch_data) view_menu = self.menuBar().addMenu('&View') self.actions['history'] = view_menu.addAction('&History...', self.show_history) self.actions['toolbar'] = view_menu.addAction('&Toolbar', self._toggle_toolbar) self.actions['toolbar'].setCheckable(True) self.actions['statusbar'] = view_menu.addAction('&Statusbar', self._toggle_statusbar) self.actions['statusbar'].setCheckable(True) help_menu = self.menuBar().addMenu('&Help') self.actions['about'] = help_menu.addAction('&About', self.show_about) self.actions['about_qt'] = help_menu.addAction('About &Qt', self.show_about_qt) self.always_enabled = ['open_file', 'about', 'about_qt', 'quit', 'toolbar', 'statusbar'] self.toolbar = self.addToolBar('toolbar') self.toolbar.setObjectName('toolbar') self.toolbar.addAction(self.actions['open_file']) self.toolbar.addAction(self.actions['meta_info']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['chan_props']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['plot_data']) self.toolbar.addAction(self.actions['plot_psd']) self.toolbar.addAction(self.actions['plot_montage']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['filter']) self.toolbar.addAction(self.actions['find_events']) self.toolbar.addAction(self.actions['epoch_data']) self.toolbar.addAction(self.actions['run_ica']) self.setUnifiedTitleAndToolBarOnMac(True) if settings['toolbar']: self.toolbar.show() self.actions['toolbar'].setChecked(True) else: self.toolbar.hide() self.actions['toolbar'].setChecked(False) self.names = QStringListModel() self.names.dataChanged.connect(self._update_names) splitter = QSplitter() self.sidebar = QListView() self.sidebar.setFrameStyle(QFrame.NoFrame) self.sidebar.setFocusPolicy(Qt.NoFocus) self.sidebar.setModel(self.names) self.sidebar.clicked.connect(self._update_data) splitter.addWidget(self.sidebar) self.infowidget = InfoWidget() splitter.addWidget(self.infowidget) width = splitter.size().width() splitter.setSizes((int((width * 0.3)), int((width * 0.7)))) self.setCentralWidget(splitter) self.status_label = QLabel() self.statusBar().addPermanentWidget(self.status_label) if settings['statusbar']: self.statusBar().show() self.actions['statusbar'].setChecked(True) else: self.statusBar().hide() self.actions['statusbar'].setChecked(False) self.setAcceptDrops(True) self.data_changed()
def __init__(self, model): 'Initialize MNELAB main window.\n\n Parameters\n ----------\n model : mnelab.model.Model instance\n The main window needs to connect to a model containing all data\n sets. This decouples the GUI from the data (model/view).\n ' super().__init__() self.model = model self.setWindowTitle('MNELAB') settings = read_settings() self.recent = settings['recent'] if settings['geometry']: self.restoreGeometry(settings['geometry']) else: self.setGeometry(300, 300, 1000, 750) self.move((QApplication.screens()[0].geometry().center() - self.rect().center())) if settings['state']: self.restoreState(settings['state']) self.actions = {} file_menu = self.menuBar().addMenu('&File') icon = QIcon(image_path('open_file.svg')) self.actions['open_file'] = file_menu.addAction(icon, '&Open...', self.open_data, QKeySequence.Open) self.recent_menu = file_menu.addMenu('Open recent') self.recent_menu.aboutToShow.connect(self._update_recent_menu) self.recent_menu.triggered.connect(self._load_recent) if (not self.recent): self.recent_menu.setEnabled(False) self.actions['close_file'] = file_menu.addAction('&Close', self.model.remove_data, QKeySequence.Close) self.actions['close_all'] = file_menu.addAction('Close all', self.close_all) file_menu.addSeparator() icon = QIcon(image_path('meta_info.svg')) self.actions['meta_info'] = file_menu.addAction(icon, 'Show information...', self.meta_info) file_menu.addSeparator() self.actions['import_bads'] = file_menu.addAction('Import bad channels...', (lambda : self.import_file(model.import_bads, 'Import bad channels', '*.csv'))) self.actions['import_events'] = file_menu.addAction('Import events...', (lambda : self.import_file(model.import_events, 'Import events', '*.csv'))) self.actions['import_annotations'] = file_menu.addAction('Import annotations...', (lambda : self.import_file(model.import_annotations, 'Import annotations', '*.csv'))) self.actions['import_ica'] = file_menu.addAction('Import &ICA...', (lambda : self.open_file(model.import_ica, 'Import ICA', '*.fif *.fif.gz'))) file_menu.addSeparator() self.export_menu = file_menu.addMenu('Export data') for (name, ext) in EXPORT_FORMATS.items(): self.actions[('export_data_' + ext)] = self.export_menu.addAction(f'{name} ({ext[1:].upper()})...', partial(self.export_file, model.export_data, 'Export data', ext)) self.actions['export_bads'] = file_menu.addAction('Export &bad channels...', (lambda : self.export_file(model.export_bads, 'Export bad channels', '*.csv'))) self.actions['export_events'] = file_menu.addAction('Export &events...', (lambda : self.export_file(model.export_events, 'Export events', '*.csv'))) self.actions['export_annotations'] = file_menu.addAction('Export &annotations...', (lambda : self.export_file(model.export_annotations, 'Export annotations', '*.csv'))) self.actions['export_ica'] = file_menu.addAction('Export ICA...', (lambda : self.export_file(model.export_ica, 'Export ICA', '*.fif *.fif.gz'))) file_menu.addSeparator() self.actions['quit'] = file_menu.addAction('&Quit', self.close, QKeySequence.Quit) edit_menu = self.menuBar().addMenu('&Edit') self.actions['pick_chans'] = edit_menu.addAction('P&ick channels...', self.pick_channels) icon = QIcon(image_path('chan_props.svg')) self.actions['chan_props'] = edit_menu.addAction(icon, 'Channel &properties...', self.channel_properties) self.actions['set_montage'] = edit_menu.addAction('Set &montage...', self.set_montage) edit_menu.addSeparator() self.actions['set_ref'] = edit_menu.addAction('Set &reference...', self.set_reference) edit_menu.addSeparator() self.actions['annotations'] = edit_menu.addAction('&Annotations...', self.edit_annotations) self.actions['events'] = edit_menu.addAction('&Events...', self.edit_events) edit_menu.addSeparator() self.actions['crop'] = edit_menu.addAction('&Crop data...', self.crop) self.actions['append_data'] = edit_menu.addAction('Appen&d data...', self.append_data) plot_menu = self.menuBar().addMenu('&Plot') icon = QIcon(image_path('plot_data.svg')) self.actions['plot_data'] = plot_menu.addAction(icon, '&Data...', self.plot_data) icon = QIcon(image_path('plot_psd.svg')) self.actions['plot_psd'] = plot_menu.addAction(icon, '&Power spectral density...', self.plot_psd) icon = QIcon(image_path('plot_montage.svg')) self.actions['plot_montage'] = plot_menu.addAction(icon, '&Montage...', self.plot_montage) plot_menu.addSeparator() self.actions['plot_ica_components'] = plot_menu.addAction('ICA &components...', self.plot_ica_components) self.actions['plot_ica_sources'] = plot_menu.addAction('ICA &sources...', self.plot_ica_sources) tools_menu = self.menuBar().addMenu('&Tools') icon = QIcon(image_path('filter.svg')) self.actions['filter'] = tools_menu.addAction(icon, '&Filter data...', self.filter_data) icon = QIcon(image_path('find_events.svg')) self.actions['find_events'] = tools_menu.addAction(icon, 'Find &events...', self.find_events) self.actions['events_from_annotations'] = tools_menu.addAction('Create events from annotations', self.events_from_annotations) tools_menu.addSeparator() icon = QIcon(image_path('run_ica.svg')) self.actions['run_ica'] = tools_menu.addAction(icon, 'Run &ICA...', self.run_ica) self.actions['apply_ica'] = tools_menu.addAction('Apply &ICA', self.apply_ica) tools_menu.addSeparator() self.actions['interpolate_bads'] = tools_menu.addAction('Interpolate bad channels...', self.interpolate_bads) tools_menu.addSeparator() icon = QIcon(image_path('epoch_data.svg')) self.actions['epoch_data'] = tools_menu.addAction(icon, 'Create Epochs...', self.epoch_data) view_menu = self.menuBar().addMenu('&View') self.actions['history'] = view_menu.addAction('&History...', self.show_history) self.actions['toolbar'] = view_menu.addAction('&Toolbar', self._toggle_toolbar) self.actions['toolbar'].setCheckable(True) self.actions['statusbar'] = view_menu.addAction('&Statusbar', self._toggle_statusbar) self.actions['statusbar'].setCheckable(True) help_menu = self.menuBar().addMenu('&Help') self.actions['about'] = help_menu.addAction('&About', self.show_about) self.actions['about_qt'] = help_menu.addAction('About &Qt', self.show_about_qt) self.always_enabled = ['open_file', 'about', 'about_qt', 'quit', 'toolbar', 'statusbar'] self.toolbar = self.addToolBar('toolbar') self.toolbar.setObjectName('toolbar') self.toolbar.addAction(self.actions['open_file']) self.toolbar.addAction(self.actions['meta_info']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['chan_props']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['plot_data']) self.toolbar.addAction(self.actions['plot_psd']) self.toolbar.addAction(self.actions['plot_montage']) self.toolbar.addSeparator() self.toolbar.addAction(self.actions['filter']) self.toolbar.addAction(self.actions['find_events']) self.toolbar.addAction(self.actions['epoch_data']) self.toolbar.addAction(self.actions['run_ica']) self.setUnifiedTitleAndToolBarOnMac(True) if settings['toolbar']: self.toolbar.show() self.actions['toolbar'].setChecked(True) else: self.toolbar.hide() self.actions['toolbar'].setChecked(False) self.names = QStringListModel() self.names.dataChanged.connect(self._update_names) splitter = QSplitter() self.sidebar = QListView() self.sidebar.setFrameStyle(QFrame.NoFrame) self.sidebar.setFocusPolicy(Qt.NoFocus) self.sidebar.setModel(self.names) self.sidebar.clicked.connect(self._update_data) splitter.addWidget(self.sidebar) self.infowidget = InfoWidget() splitter.addWidget(self.infowidget) width = splitter.size().width() splitter.setSizes((int((width * 0.3)), int((width * 0.7)))) self.setCentralWidget(splitter) self.status_label = QLabel() self.statusBar().addPermanentWidget(self.status_label) if settings['statusbar']: self.statusBar().show() self.actions['statusbar'].setChecked(True) else: self.statusBar().hide() self.actions['statusbar'].setChecked(False) self.setAcceptDrops(True) self.data_changed()<|docstring|>Initialize MNELAB main window. Parameters ---------- model : mnelab.model.Model instance The main window needs to connect to a model containing all data sets. This decouples the GUI from the data (model/view).<|endoftext|>
e0c2fa8630205ba01fdcc5c30b59a469b788b94dd34392261e4221b2058df430
def open_data(self, fname=None): 'Open raw file.' if (fname is None): fname = QFileDialog.getOpenFileName(self, 'Open raw', filter='*')[0] if fname: if (not (isfile(fname) or isdir(fname))): self._remove_recent(fname) QMessageBox.critical(self, 'File does not exist', f'File {fname} does not exist anymore.') return (name, ext, ftype) = split_fname(fname, IMPORT_FORMATS) if (ext in ['.xdf', '.xdfz', '.xdf.gz']): streams = parse_chunks(parse_xdf(fname)) (rows, disabled) = ([], []) for (idx, s) in enumerate(streams): rows.append([s['stream_id'], s['name'], s['type'], s['channel_count'], s['channel_format'], s['nominal_srate']]) is_marker = ((s['nominal_srate'] == 0) or (s['channel_format'] == 'string')) if is_marker: disabled.append(idx) enabled = list((set(range(len(rows))) - set(disabled))) if enabled: selected = enabled[0] else: selected = None dialog = XDFStreamsDialog(self, rows, selected=selected, disabled=disabled) if dialog.exec_(): row = dialog.view.selectionModel().selectedRows()[0].row() stream_id = dialog.model.data(dialog.model.index(row, 0)) self.model.load(fname, stream_id=stream_id) else: try: self.model.load(fname) except FileNotFoundError as e: QMessageBox.critical(self, 'File not found', str(e)) except UnknownFileTypeError as e: QMessageBox.critical(self, 'Unknown file type', str(e))
Open raw file.
mnelab/mainwindow.py
open_data
stralu/mnelab
1
python
def open_data(self, fname=None): if (fname is None): fname = QFileDialog.getOpenFileName(self, 'Open raw', filter='*')[0] if fname: if (not (isfile(fname) or isdir(fname))): self._remove_recent(fname) QMessageBox.critical(self, 'File does not exist', f'File {fname} does not exist anymore.') return (name, ext, ftype) = split_fname(fname, IMPORT_FORMATS) if (ext in ['.xdf', '.xdfz', '.xdf.gz']): streams = parse_chunks(parse_xdf(fname)) (rows, disabled) = ([], []) for (idx, s) in enumerate(streams): rows.append([s['stream_id'], s['name'], s['type'], s['channel_count'], s['channel_format'], s['nominal_srate']]) is_marker = ((s['nominal_srate'] == 0) or (s['channel_format'] == 'string')) if is_marker: disabled.append(idx) enabled = list((set(range(len(rows))) - set(disabled))) if enabled: selected = enabled[0] else: selected = None dialog = XDFStreamsDialog(self, rows, selected=selected, disabled=disabled) if dialog.exec_(): row = dialog.view.selectionModel().selectedRows()[0].row() stream_id = dialog.model.data(dialog.model.index(row, 0)) self.model.load(fname, stream_id=stream_id) else: try: self.model.load(fname) except FileNotFoundError as e: QMessageBox.critical(self, 'File not found', str(e)) except UnknownFileTypeError as e: QMessageBox.critical(self, 'Unknown file type', str(e))
def open_data(self, fname=None): if (fname is None): fname = QFileDialog.getOpenFileName(self, 'Open raw', filter='*')[0] if fname: if (not (isfile(fname) or isdir(fname))): self._remove_recent(fname) QMessageBox.critical(self, 'File does not exist', f'File {fname} does not exist anymore.') return (name, ext, ftype) = split_fname(fname, IMPORT_FORMATS) if (ext in ['.xdf', '.xdfz', '.xdf.gz']): streams = parse_chunks(parse_xdf(fname)) (rows, disabled) = ([], []) for (idx, s) in enumerate(streams): rows.append([s['stream_id'], s['name'], s['type'], s['channel_count'], s['channel_format'], s['nominal_srate']]) is_marker = ((s['nominal_srate'] == 0) or (s['channel_format'] == 'string')) if is_marker: disabled.append(idx) enabled = list((set(range(len(rows))) - set(disabled))) if enabled: selected = enabled[0] else: selected = None dialog = XDFStreamsDialog(self, rows, selected=selected, disabled=disabled) if dialog.exec_(): row = dialog.view.selectionModel().selectedRows()[0].row() stream_id = dialog.model.data(dialog.model.index(row, 0)) self.model.load(fname, stream_id=stream_id) else: try: self.model.load(fname) except FileNotFoundError as e: QMessageBox.critical(self, 'File not found', str(e)) except UnknownFileTypeError as e: QMessageBox.critical(self, 'Unknown file type', str(e))<|docstring|>Open raw file.<|endoftext|>
c06c4ef19b87d1f39a935c733906610aa60818bb9ee69cec1a02286dbf5e003b
def open_file(self, f, text, ffilter='*'): 'Open file.' fname = QFileDialog.getOpenFileName(self, text, filter=ffilter)[0] if fname: f(fname)
Open file.
mnelab/mainwindow.py
open_file
stralu/mnelab
1
python
def open_file(self, f, text, ffilter='*'): fname = QFileDialog.getOpenFileName(self, text, filter=ffilter)[0] if fname: f(fname)
def open_file(self, f, text, ffilter='*'): fname = QFileDialog.getOpenFileName(self, text, filter=ffilter)[0] if fname: f(fname)<|docstring|>Open file.<|endoftext|>
7abd356a16aed83307dbcf09ee5ebcc1e23ccda538bfc18cee25ac6d14cc01e6
def export_file(self, f, text, ffilter='*'): 'Export to file.' fname = QFileDialog.getSaveFileName(self, text, filter=ffilter)[0] if fname: f(fname, ffilter)
Export to file.
mnelab/mainwindow.py
export_file
stralu/mnelab
1
python
def export_file(self, f, text, ffilter='*'): fname = QFileDialog.getSaveFileName(self, text, filter=ffilter)[0] if fname: f(fname, ffilter)
def export_file(self, f, text, ffilter='*'): fname = QFileDialog.getSaveFileName(self, text, filter=ffilter)[0] if fname: f(fname, ffilter)<|docstring|>Export to file.<|endoftext|>
531fa23ba5a6ae1f24a437562ed6b769541a790788e2123908543ce1ecd2531a
def import_file(self, f, text, ffilter='*'): 'Import file.' fname = QFileDialog.getOpenFileName(self, text, filter=ffilter)[0] if fname: try: f(fname) except LabelsNotFoundError as e: QMessageBox.critical(self, 'Channel labels not found', str(e)) except InvalidAnnotationsError as e: QMessageBox.critical(self, 'Invalid annotations', str(e))
Import file.
mnelab/mainwindow.py
import_file
stralu/mnelab
1
python
def import_file(self, f, text, ffilter='*'): fname = QFileDialog.getOpenFileName(self, text, filter=ffilter)[0] if fname: try: f(fname) except LabelsNotFoundError as e: QMessageBox.critical(self, 'Channel labels not found', str(e)) except InvalidAnnotationsError as e: QMessageBox.critical(self, 'Invalid annotations', str(e))
def import_file(self, f, text, ffilter='*'): fname = QFileDialog.getOpenFileName(self, text, filter=ffilter)[0] if fname: try: f(fname) except LabelsNotFoundError as e: QMessageBox.critical(self, 'Channel labels not found', str(e)) except InvalidAnnotationsError as e: QMessageBox.critical(self, 'Invalid annotations', str(e))<|docstring|>Import file.<|endoftext|>
f4dd151aa731562a1e16f5412ec6876bb6f32079b1ad8e44463a4f510dd4b89c
def close_all(self): 'Close all currently open data sets.' msg = QMessageBox.question(self, 'Close all data sets', 'Close all data sets?') if (msg == QMessageBox.Yes): while (len(self.model) > 0): self.model.remove_data()
Close all currently open data sets.
mnelab/mainwindow.py
close_all
stralu/mnelab
1
python
def close_all(self): msg = QMessageBox.question(self, 'Close all data sets', 'Close all data sets?') if (msg == QMessageBox.Yes): while (len(self.model) > 0): self.model.remove_data()
def close_all(self): msg = QMessageBox.question(self, 'Close all data sets', 'Close all data sets?') if (msg == QMessageBox.Yes): while (len(self.model) > 0): self.model.remove_data()<|docstring|>Close all currently open data sets.<|endoftext|>
4564efa6cc8db82abff5379d7903f2aeff99cf1652e748df04747890c5285d0e
def pick_channels(self): 'Pick channels in current data set.' channels = self.model.current['data'].info['ch_names'] dialog = PickChannelsDialog(self, channels, selected=channels) if dialog.exec_(): picks = [item.data(0) for item in dialog.channels.selectedItems()] drops = (set(channels) - set(picks)) if drops: self.auto_duplicate() self.model.drop_channels(drops) self.model.history.append(f'raw.drop({drops})')
Pick channels in current data set.
mnelab/mainwindow.py
pick_channels
stralu/mnelab
1
python
def pick_channels(self): channels = self.model.current['data'].info['ch_names'] dialog = PickChannelsDialog(self, channels, selected=channels) if dialog.exec_(): picks = [item.data(0) for item in dialog.channels.selectedItems()] drops = (set(channels) - set(picks)) if drops: self.auto_duplicate() self.model.drop_channels(drops) self.model.history.append(f'raw.drop({drops})')
def pick_channels(self): channels = self.model.current['data'].info['ch_names'] dialog = PickChannelsDialog(self, channels, selected=channels) if dialog.exec_(): picks = [item.data(0) for item in dialog.channels.selectedItems()] drops = (set(channels) - set(picks)) if drops: self.auto_duplicate() self.model.drop_channels(drops) self.model.history.append(f'raw.drop({drops})')<|docstring|>Pick channels in current data set.<|endoftext|>
e8dcbd40a10ba2351000f33e3dc48520976511587510158c431f0e4ec346f14a
def channel_properties(self): 'Show channel properties dialog.' info = self.model.current['data'].info dialog = ChannelPropertiesDialog(self, info) if dialog.exec_(): dialog.model.sort(0) bads = [] renamed = {} types = {} for i in range(dialog.model.rowCount()): new_label = dialog.model.item(i, 1).data(Qt.DisplayRole) old_label = info['ch_names'][i] if (new_label != old_label): renamed[old_label] = new_label new_type = dialog.model.item(i, 2).data(Qt.DisplayRole).lower() old_type = channel_type(info, i).lower() if (new_type != old_type): types[new_label] = new_type if (dialog.model.item(i, 3).checkState() == Qt.Checked): bads.append(info['ch_names'][i]) self.model.set_channel_properties(bads, renamed, types)
Show channel properties dialog.
mnelab/mainwindow.py
channel_properties
stralu/mnelab
1
python
def channel_properties(self): info = self.model.current['data'].info dialog = ChannelPropertiesDialog(self, info) if dialog.exec_(): dialog.model.sort(0) bads = [] renamed = {} types = {} for i in range(dialog.model.rowCount()): new_label = dialog.model.item(i, 1).data(Qt.DisplayRole) old_label = info['ch_names'][i] if (new_label != old_label): renamed[old_label] = new_label new_type = dialog.model.item(i, 2).data(Qt.DisplayRole).lower() old_type = channel_type(info, i).lower() if (new_type != old_type): types[new_label] = new_type if (dialog.model.item(i, 3).checkState() == Qt.Checked): bads.append(info['ch_names'][i]) self.model.set_channel_properties(bads, renamed, types)
def channel_properties(self): info = self.model.current['data'].info dialog = ChannelPropertiesDialog(self, info) if dialog.exec_(): dialog.model.sort(0) bads = [] renamed = {} types = {} for i in range(dialog.model.rowCount()): new_label = dialog.model.item(i, 1).data(Qt.DisplayRole) old_label = info['ch_names'][i] if (new_label != old_label): renamed[old_label] = new_label new_type = dialog.model.item(i, 2).data(Qt.DisplayRole).lower() old_type = channel_type(info, i).lower() if (new_type != old_type): types[new_label] = new_type if (dialog.model.item(i, 3).checkState() == Qt.Checked): bads.append(info['ch_names'][i]) self.model.set_channel_properties(bads, renamed, types)<|docstring|>Show channel properties dialog.<|endoftext|>
7d559400c569bab8aa3769ac0e071e3036f942ee876dc04e8075281a753aca2c
def set_montage(self): 'Set montage.' montages = mne.channels.get_builtin_montages() dialog = MontageDialog(self, montages, selected=self.model.current['montage']) if dialog.exec_(): name = dialog.montages.selectedItems()[0].data(0) montage = mne.channels.make_standard_montage(name) ch_names = self.model.current['data'].info['ch_names'] if (set(ch_names) & set(montage.ch_names)): self.model.set_montage(name) else: QMessageBox.critical(self, 'No matching channel names', 'Channel names defined in the montage do not match any channel name in the data.')
Set montage.
mnelab/mainwindow.py
set_montage
stralu/mnelab
1
python
def set_montage(self): montages = mne.channels.get_builtin_montages() dialog = MontageDialog(self, montages, selected=self.model.current['montage']) if dialog.exec_(): name = dialog.montages.selectedItems()[0].data(0) montage = mne.channels.make_standard_montage(name) ch_names = self.model.current['data'].info['ch_names'] if (set(ch_names) & set(montage.ch_names)): self.model.set_montage(name) else: QMessageBox.critical(self, 'No matching channel names', 'Channel names defined in the montage do not match any channel name in the data.')
def set_montage(self): montages = mne.channels.get_builtin_montages() dialog = MontageDialog(self, montages, selected=self.model.current['montage']) if dialog.exec_(): name = dialog.montages.selectedItems()[0].data(0) montage = mne.channels.make_standard_montage(name) ch_names = self.model.current['data'].info['ch_names'] if (set(ch_names) & set(montage.ch_names)): self.model.set_montage(name) else: QMessageBox.critical(self, 'No matching channel names', 'Channel names defined in the montage do not match any channel name in the data.')<|docstring|>Set montage.<|endoftext|>
0b4f09c21976603b881d21c784a2ee3ab9e334c51b4598aacc30c4e47cef5283
def crop(self): 'Crop data.' fs = self.model.current['data'].info['sfreq'] length = (self.model.current['data'].n_times / fs) dialog = CropDialog(self, 0, length) if dialog.exec_(): self.auto_duplicate() self.model.crop((dialog.start or 0), dialog.stop)
Crop data.
mnelab/mainwindow.py
crop
stralu/mnelab
1
python
def crop(self): fs = self.model.current['data'].info['sfreq'] length = (self.model.current['data'].n_times / fs) dialog = CropDialog(self, 0, length) if dialog.exec_(): self.auto_duplicate() self.model.crop((dialog.start or 0), dialog.stop)
def crop(self): fs = self.model.current['data'].info['sfreq'] length = (self.model.current['data'].n_times / fs) dialog = CropDialog(self, 0, length) if dialog.exec_(): self.auto_duplicate() self.model.crop((dialog.start or 0), dialog.stop)<|docstring|>Crop data.<|endoftext|>
35d98a87fd39029a71fe2b241cbc829dff4d7fb0b9a8ce9a576137b8ae7bd5f1
def append_data(self): 'Concatenate raw data objects to current one.' compatibles = self.model.get_compatibles() dialog = AppendDialog(self, compatibles) if dialog.exec_(): self.auto_duplicate() self.model.append_data(dialog.names)
Concatenate raw data objects to current one.
mnelab/mainwindow.py
append_data
stralu/mnelab
1
python
def append_data(self): compatibles = self.model.get_compatibles() dialog = AppendDialog(self, compatibles) if dialog.exec_(): self.auto_duplicate() self.model.append_data(dialog.names)
def append_data(self): compatibles = self.model.get_compatibles() dialog = AppendDialog(self, compatibles) if dialog.exec_(): self.auto_duplicate() self.model.append_data(dialog.names)<|docstring|>Concatenate raw data objects to current one.<|endoftext|>
9e3f2844b29ab707709235eb0ee8979010036d632798d19cb7b7f734f60a6d99
def plot_data(self): 'Plot data.' self.bads = self.model.current['data'].info['bads'] events = self.model.current['events'] nchan = self.model.current['data'].info['nchan'] fig = self.model.current['data'].plot(events=events, n_channels=nchan, title=self.model.current['name'], scalings='auto', show=False) if (events is not None): hist = f'data.plot(events=events, n_channels={nchan})' else: hist = f'data.plot(n_channels={nchan})' self.model.history.append(hist) win = fig.canvas.manager.window win.setWindowTitle(self.model.current['name']) win.findChild(QStatusBar).hide() win.installEventFilter(self) try: fig._mne_params['close_key'] = None except AttributeError: pass fig.show()
Plot data.
mnelab/mainwindow.py
plot_data
stralu/mnelab
1
python
def plot_data(self): self.bads = self.model.current['data'].info['bads'] events = self.model.current['events'] nchan = self.model.current['data'].info['nchan'] fig = self.model.current['data'].plot(events=events, n_channels=nchan, title=self.model.current['name'], scalings='auto', show=False) if (events is not None): hist = f'data.plot(events=events, n_channels={nchan})' else: hist = f'data.plot(n_channels={nchan})' self.model.history.append(hist) win = fig.canvas.manager.window win.setWindowTitle(self.model.current['name']) win.findChild(QStatusBar).hide() win.installEventFilter(self) try: fig._mne_params['close_key'] = None except AttributeError: pass fig.show()
def plot_data(self): self.bads = self.model.current['data'].info['bads'] events = self.model.current['events'] nchan = self.model.current['data'].info['nchan'] fig = self.model.current['data'].plot(events=events, n_channels=nchan, title=self.model.current['name'], scalings='auto', show=False) if (events is not None): hist = f'data.plot(events=events, n_channels={nchan})' else: hist = f'data.plot(n_channels={nchan})' self.model.history.append(hist) win = fig.canvas.manager.window win.setWindowTitle(self.model.current['name']) win.findChild(QStatusBar).hide() win.installEventFilter(self) try: fig._mne_params['close_key'] = None except AttributeError: pass fig.show()<|docstring|>Plot data.<|endoftext|>
b8bb8083c2a914de78255a55f4e498336afa44b1e6d4455e932e62e1dc236299
def plot_psd(self): 'Plot power spectral density (PSD).' kwds = {} if (self.model.current['dtype'] == 'raw'): kwds.update({'average': False, 'spatial_colors': False}) fig = self.model.current['data'].plot_psd(show=False, **kwds) if kwds: tmp = ', '.join((f'{key}={value}' for (key, value) in kwds.items())) hist = f'data.plot_psd({tmp})' else: hist = 'data.plot_psd()' self.model.history.append(hist) win = fig.canvas.manager.window win.setWindowTitle('Power spectral density') fig.show()
Plot power spectral density (PSD).
mnelab/mainwindow.py
plot_psd
stralu/mnelab
1
python
def plot_psd(self): kwds = {} if (self.model.current['dtype'] == 'raw'): kwds.update({'average': False, 'spatial_colors': False}) fig = self.model.current['data'].plot_psd(show=False, **kwds) if kwds: tmp = ', '.join((f'{key}={value}' for (key, value) in kwds.items())) hist = f'data.plot_psd({tmp})' else: hist = 'data.plot_psd()' self.model.history.append(hist) win = fig.canvas.manager.window win.setWindowTitle('Power spectral density') fig.show()
def plot_psd(self): kwds = {} if (self.model.current['dtype'] == 'raw'): kwds.update({'average': False, 'spatial_colors': False}) fig = self.model.current['data'].plot_psd(show=False, **kwds) if kwds: tmp = ', '.join((f'{key}={value}' for (key, value) in kwds.items())) hist = f'data.plot_psd({tmp})' else: hist = 'data.plot_psd()' self.model.history.append(hist) win = fig.canvas.manager.window win.setWindowTitle('Power spectral density') fig.show()<|docstring|>Plot power spectral density (PSD).<|endoftext|>
39464b9a6b369d49b883b1ea9de075571815352b29eebf248555bc4a330ec194
def plot_montage(self): 'Plot current montage.' fig = self.model.current['data'].plot_sensors(show_names=True, show=False) win = fig.canvas.manager.window win.setWindowTitle('Montage') win.findChild(QStatusBar).hide() win.findChild(QToolBar).hide() fig.show()
Plot current montage.
mnelab/mainwindow.py
plot_montage
stralu/mnelab
1
python
def plot_montage(self): fig = self.model.current['data'].plot_sensors(show_names=True, show=False) win = fig.canvas.manager.window win.setWindowTitle('Montage') win.findChild(QStatusBar).hide() win.findChild(QToolBar).hide() fig.show()
def plot_montage(self): fig = self.model.current['data'].plot_sensors(show_names=True, show=False) win = fig.canvas.manager.window win.setWindowTitle('Montage') win.findChild(QStatusBar).hide() win.findChild(QToolBar).hide() fig.show()<|docstring|>Plot current montage.<|endoftext|>
916e43c261d8a019f33d2c73a2c247d5d660e852ca3a6457663abdee086653a3
def run_ica(self): 'Run ICA calculation.' dialog = RunICADialog(self, self.model.current['data'].info['nchan'], have['picard'], have['sklearn']) if dialog.exec_(): calc = CalcDialog(self, 'Calculating ICA', 'Calculating ICA.') method = dialog.method.currentText() exclude_bad_segments = dialog.exclude_bad_segments.isChecked() fit_params = {} if (not dialog.extended.isHidden()): fit_params['extended'] = dialog.extended.isChecked() if (not dialog.ortho.isHidden()): fit_params['ortho'] = dialog.ortho.isChecked() ica = mne.preprocessing.ICA(method=dialog.methods[method], fit_params=fit_params) self.model.history.append(f'ica = mne.preprocessing.ICA(method={dialog.methods[method]}, fit_params={fit_params})') kwds = {'reject_by_annotation': exclude_bad_segments} pool = pebble.ProcessPool(max_workers=1) process = pool.schedule(function=ica.fit, args=(self.model.current['data'],), kwargs=kwds) process.add_done_callback((lambda x: calc.accept())) pool.close() if (not calc.exec_()): pool.stop() pool.join() else: self.model.current['ica'] = process.result() self.model.history.append(f'ica.fit(inst=raw, reject_by_annotation={exclude_bad_segments})') self.data_changed() pool.join()
Run ICA calculation.
mnelab/mainwindow.py
run_ica
stralu/mnelab
1
python
def run_ica(self): dialog = RunICADialog(self, self.model.current['data'].info['nchan'], have['picard'], have['sklearn']) if dialog.exec_(): calc = CalcDialog(self, 'Calculating ICA', 'Calculating ICA.') method = dialog.method.currentText() exclude_bad_segments = dialog.exclude_bad_segments.isChecked() fit_params = {} if (not dialog.extended.isHidden()): fit_params['extended'] = dialog.extended.isChecked() if (not dialog.ortho.isHidden()): fit_params['ortho'] = dialog.ortho.isChecked() ica = mne.preprocessing.ICA(method=dialog.methods[method], fit_params=fit_params) self.model.history.append(f'ica = mne.preprocessing.ICA(method={dialog.methods[method]}, fit_params={fit_params})') kwds = {'reject_by_annotation': exclude_bad_segments} pool = pebble.ProcessPool(max_workers=1) process = pool.schedule(function=ica.fit, args=(self.model.current['data'],), kwargs=kwds) process.add_done_callback((lambda x: calc.accept())) pool.close() if (not calc.exec_()): pool.stop() pool.join() else: self.model.current['ica'] = process.result() self.model.history.append(f'ica.fit(inst=raw, reject_by_annotation={exclude_bad_segments})') self.data_changed() pool.join()
def run_ica(self): dialog = RunICADialog(self, self.model.current['data'].info['nchan'], have['picard'], have['sklearn']) if dialog.exec_(): calc = CalcDialog(self, 'Calculating ICA', 'Calculating ICA.') method = dialog.method.currentText() exclude_bad_segments = dialog.exclude_bad_segments.isChecked() fit_params = {} if (not dialog.extended.isHidden()): fit_params['extended'] = dialog.extended.isChecked() if (not dialog.ortho.isHidden()): fit_params['ortho'] = dialog.ortho.isChecked() ica = mne.preprocessing.ICA(method=dialog.methods[method], fit_params=fit_params) self.model.history.append(f'ica = mne.preprocessing.ICA(method={dialog.methods[method]}, fit_params={fit_params})') kwds = {'reject_by_annotation': exclude_bad_segments} pool = pebble.ProcessPool(max_workers=1) process = pool.schedule(function=ica.fit, args=(self.model.current['data'],), kwargs=kwds) process.add_done_callback((lambda x: calc.accept())) pool.close() if (not calc.exec_()): pool.stop() pool.join() else: self.model.current['ica'] = process.result() self.model.history.append(f'ica.fit(inst=raw, reject_by_annotation={exclude_bad_segments})') self.data_changed() pool.join()<|docstring|>Run ICA calculation.<|endoftext|>
5e9544471231f42aa9b7ed67cb047735756923ac4ef5271e8049a80dd6e93d90
def apply_ica(self): 'Apply current fitted ICA.' self.auto_duplicate() self.model.apply_ica()
Apply current fitted ICA.
mnelab/mainwindow.py
apply_ica
stralu/mnelab
1
python
def apply_ica(self): self.auto_duplicate() self.model.apply_ica()
def apply_ica(self): self.auto_duplicate() self.model.apply_ica()<|docstring|>Apply current fitted ICA.<|endoftext|>
799b41c897530e6ad4cc50625c90332933da5a5d1f161917e2eb9f9cfa491625
def interpolate_bads(self): 'Interpolate bad channels' dialog = InterpolateBadsDialog(self) if dialog.exec_(): duplicated = self.auto_duplicate() try: self.model.interpolate_bads(dialog.reset_bads, dialog.mode, dialog.origin) except ValueError as e: if duplicated: self.model.remove_data() self.model.index -= 1 self.data_changed() msgbox = ErrorMessageBox(self, 'Could not interpolate bad channels', str(e), traceback.format_exc()) msgbox.show()
Interpolate bad channels
mnelab/mainwindow.py
interpolate_bads
stralu/mnelab
1
python
def interpolate_bads(self): dialog = InterpolateBadsDialog(self) if dialog.exec_(): duplicated = self.auto_duplicate() try: self.model.interpolate_bads(dialog.reset_bads, dialog.mode, dialog.origin) except ValueError as e: if duplicated: self.model.remove_data() self.model.index -= 1 self.data_changed() msgbox = ErrorMessageBox(self, 'Could not interpolate bad channels', str(e), traceback.format_exc()) msgbox.show()
def interpolate_bads(self): dialog = InterpolateBadsDialog(self) if dialog.exec_(): duplicated = self.auto_duplicate() try: self.model.interpolate_bads(dialog.reset_bads, dialog.mode, dialog.origin) except ValueError as e: if duplicated: self.model.remove_data() self.model.index -= 1 self.data_changed() msgbox = ErrorMessageBox(self, 'Could not interpolate bad channels', str(e), traceback.format_exc()) msgbox.show()<|docstring|>Interpolate bad channels<|endoftext|>
4aaefc4066c69428437bbfc2797507d5e278726385c1b50ad40cd02a97968bb6
def filter_data(self): 'Filter data.' dialog = FilterDialog(self) if dialog.exec_(): self.auto_duplicate() self.model.filter(dialog.low, dialog.high)
Filter data.
mnelab/mainwindow.py
filter_data
stralu/mnelab
1
python
def filter_data(self): dialog = FilterDialog(self) if dialog.exec_(): self.auto_duplicate() self.model.filter(dialog.low, dialog.high)
def filter_data(self): dialog = FilterDialog(self) if dialog.exec_(): self.auto_duplicate() self.model.filter(dialog.low, dialog.high)<|docstring|>Filter data.<|endoftext|>
ce0aa16338d2dbb795c416fbab3aa19e8dfb2a35a907ef8f6ba9ffa07873535b
def epoch_data(self): 'Epoch raw data.' dialog = EpochDialog(self, self.model.current['events']) if dialog.exec_(): events = [int(item.text()) for item in dialog.events.selectedItems()] tmin = dialog.tmin.value() tmax = dialog.tmax.value() if dialog.baseline.isChecked(): baseline = (dialog.a.value(), dialog.b.value()) else: baseline = None duplicated = self.auto_duplicate() try: self.model.epoch_data(events, tmin, tmax, baseline) except ValueError as e: if duplicated: self.model.remove_data() self.model.index -= 1 self.data_changed() msgbox = ErrorMessageBox(self, 'Could not create epochs', str(e), traceback.format_exc()) msgbox.show()
Epoch raw data.
mnelab/mainwindow.py
epoch_data
stralu/mnelab
1
python
def epoch_data(self): dialog = EpochDialog(self, self.model.current['events']) if dialog.exec_(): events = [int(item.text()) for item in dialog.events.selectedItems()] tmin = dialog.tmin.value() tmax = dialog.tmax.value() if dialog.baseline.isChecked(): baseline = (dialog.a.value(), dialog.b.value()) else: baseline = None duplicated = self.auto_duplicate() try: self.model.epoch_data(events, tmin, tmax, baseline) except ValueError as e: if duplicated: self.model.remove_data() self.model.index -= 1 self.data_changed() msgbox = ErrorMessageBox(self, 'Could not create epochs', str(e), traceback.format_exc()) msgbox.show()
def epoch_data(self): dialog = EpochDialog(self, self.model.current['events']) if dialog.exec_(): events = [int(item.text()) for item in dialog.events.selectedItems()] tmin = dialog.tmin.value() tmax = dialog.tmax.value() if dialog.baseline.isChecked(): baseline = (dialog.a.value(), dialog.b.value()) else: baseline = None duplicated = self.auto_duplicate() try: self.model.epoch_data(events, tmin, tmax, baseline) except ValueError as e: if duplicated: self.model.remove_data() self.model.index -= 1 self.data_changed() msgbox = ErrorMessageBox(self, 'Could not create epochs', str(e), traceback.format_exc()) msgbox.show()<|docstring|>Epoch raw data.<|endoftext|>
78012d283818451bc5e0922b99c7fd0a8634c502be54f4a65a8e957ced00b5e9
def set_reference(self): 'Set reference.' dialog = ReferenceDialog(self) if dialog.exec_(): self.auto_duplicate() if dialog.average.isChecked(): self.model.set_reference('average') else: ref = [c.strip() for c in dialog.channellist.text().split(',')] self.model.set_reference(ref)
Set reference.
mnelab/mainwindow.py
set_reference
stralu/mnelab
1
python
def set_reference(self): dialog = ReferenceDialog(self) if dialog.exec_(): self.auto_duplicate() if dialog.average.isChecked(): self.model.set_reference('average') else: ref = [c.strip() for c in dialog.channellist.text().split(',')] self.model.set_reference(ref)
def set_reference(self): dialog = ReferenceDialog(self) if dialog.exec_(): self.auto_duplicate() if dialog.average.isChecked(): self.model.set_reference('average') else: ref = [c.strip() for c in dialog.channellist.text().split(',')] self.model.set_reference(ref)<|docstring|>Set reference.<|endoftext|>
79a3ce0f1843128308163ddcc374b9159a52341301ac2bc8b8ef4f163ec69aed
def show_history(self): 'Show history.' dialog = HistoryDialog(self, '\n'.join(self.model.history)) dialog.exec_()
Show history.
mnelab/mainwindow.py
show_history
stralu/mnelab
1
python
def show_history(self): dialog = HistoryDialog(self, '\n'.join(self.model.history)) dialog.exec_()
def show_history(self): dialog = HistoryDialog(self, '\n'.join(self.model.history)) dialog.exec_()<|docstring|>Show history.<|endoftext|>
13a1265899cc9dcd46235d13229e553f4c606ffa6f077e071396a8047acfcaab
def show_about(self): 'Show About dialog.' msg_box = QMessageBox(self) text = f"<img src='{image_path('mnelab_logo.png')}'><p>MNELAB {__version__}</p>" msg_box.setText(text) mnelab_url = 'github.com/cbrnr/mnelab' mne_url = 'github.com/mne-tools/mne-python' pkgs = [] for (key, value) in have.items(): if value: pkgs.append(f'{key}&nbsp;({value})') else: pkgs.append(f'{key}&nbsp;(not installed)') text = f"<nobr><p>This program uses Python {'.'.join((str(k) for k in version_info[:3]))} and the following packages:</p></nobr><p>{', '.join(pkgs)}</p><nobr><p>MNELAB repository: <a href=https://{mnelab_url}>{mnelab_url}</a></p></nobr><nobr><p>MNE repository: <a href=https://{mne_url}>{mne_url}</a></p></nobr><p>Licensed under the BSD 3-clause license.</p><p>Copyright 2017-2020 by Clemens Brunner.</p>" msg_box.setInformativeText(text) msg_box.exec_()
Show About dialog.
mnelab/mainwindow.py
show_about
stralu/mnelab
1
python
def show_about(self): msg_box = QMessageBox(self) text = f"<img src='{image_path('mnelab_logo.png')}'><p>MNELAB {__version__}</p>" msg_box.setText(text) mnelab_url = 'github.com/cbrnr/mnelab' mne_url = 'github.com/mne-tools/mne-python' pkgs = [] for (key, value) in have.items(): if value: pkgs.append(f'{key}&nbsp;({value})') else: pkgs.append(f'{key}&nbsp;(not installed)') text = f"<nobr><p>This program uses Python {'.'.join((str(k) for k in version_info[:3]))} and the following packages:</p></nobr><p>{', '.join(pkgs)}</p><nobr><p>MNELAB repository: <a href=https://{mnelab_url}>{mnelab_url}</a></p></nobr><nobr><p>MNE repository: <a href=https://{mne_url}>{mne_url}</a></p></nobr><p>Licensed under the BSD 3-clause license.</p><p>Copyright 2017-2020 by Clemens Brunner.</p>" msg_box.setInformativeText(text) msg_box.exec_()
def show_about(self): msg_box = QMessageBox(self) text = f"<img src='{image_path('mnelab_logo.png')}'><p>MNELAB {__version__}</p>" msg_box.setText(text) mnelab_url = 'github.com/cbrnr/mnelab' mne_url = 'github.com/mne-tools/mne-python' pkgs = [] for (key, value) in have.items(): if value: pkgs.append(f'{key}&nbsp;({value})') else: pkgs.append(f'{key}&nbsp;(not installed)') text = f"<nobr><p>This program uses Python {'.'.join((str(k) for k in version_info[:3]))} and the following packages:</p></nobr><p>{', '.join(pkgs)}</p><nobr><p>MNELAB repository: <a href=https://{mnelab_url}>{mnelab_url}</a></p></nobr><nobr><p>MNE repository: <a href=https://{mne_url}>{mne_url}</a></p></nobr><p>Licensed under the BSD 3-clause license.</p><p>Copyright 2017-2020 by Clemens Brunner.</p>" msg_box.setInformativeText(text) msg_box.exec_()<|docstring|>Show About dialog.<|endoftext|>
65437c8bf673f96d4a0006c66de6cd1407943742cf872068c48def50824274d6
def show_about_qt(self): 'Show About Qt dialog.' QMessageBox.aboutQt(self, 'About Qt')
Show About Qt dialog.
mnelab/mainwindow.py
show_about_qt
stralu/mnelab
1
python
def show_about_qt(self): QMessageBox.aboutQt(self, 'About Qt')
def show_about_qt(self): QMessageBox.aboutQt(self, 'About Qt')<|docstring|>Show About Qt dialog.<|endoftext|>
1f312f93c1c9fb8d5b5e58080b3d944bf007d6f43a47c775603faf5754a32ffd
def auto_duplicate(self): 'Automatically duplicate current data set.\n\n If the current data set is stored in a file (i.e. was loaded directly\n from a file), a new data set is automatically created. If the current\n data set is not stored in a file (i.e. was created by operations in\n MNELAB), a dialog box asks the user if the current data set should be\n overwritten or duplicated.\n\n Returns\n -------\n duplicated : bool\n True if the current data set was automatically duplicated, False if\n the current data set was overwritten.\n ' if self.model.current['fname']: self.model.duplicate_data() return True else: msg = QMessageBox.question(self, 'Overwrite existing data set', 'Overwrite existing data set?') if (msg == QMessageBox.No): self.model.duplicate_data() return True return False
Automatically duplicate current data set. If the current data set is stored in a file (i.e. was loaded directly from a file), a new data set is automatically created. If the current data set is not stored in a file (i.e. was created by operations in MNELAB), a dialog box asks the user if the current data set should be overwritten or duplicated. Returns ------- duplicated : bool True if the current data set was automatically duplicated, False if the current data set was overwritten.
mnelab/mainwindow.py
auto_duplicate
stralu/mnelab
1
python
def auto_duplicate(self): 'Automatically duplicate current data set.\n\n If the current data set is stored in a file (i.e. was loaded directly\n from a file), a new data set is automatically created. If the current\n data set is not stored in a file (i.e. was created by operations in\n MNELAB), a dialog box asks the user if the current data set should be\n overwritten or duplicated.\n\n Returns\n -------\n duplicated : bool\n True if the current data set was automatically duplicated, False if\n the current data set was overwritten.\n ' if self.model.current['fname']: self.model.duplicate_data() return True else: msg = QMessageBox.question(self, 'Overwrite existing data set', 'Overwrite existing data set?') if (msg == QMessageBox.No): self.model.duplicate_data() return True return False
def auto_duplicate(self): 'Automatically duplicate current data set.\n\n If the current data set is stored in a file (i.e. was loaded directly\n from a file), a new data set is automatically created. If the current\n data set is not stored in a file (i.e. was created by operations in\n MNELAB), a dialog box asks the user if the current data set should be\n overwritten or duplicated.\n\n Returns\n -------\n duplicated : bool\n True if the current data set was automatically duplicated, False if\n the current data set was overwritten.\n ' if self.model.current['fname']: self.model.duplicate_data() return True else: msg = QMessageBox.question(self, 'Overwrite existing data set', 'Overwrite existing data set?') if (msg == QMessageBox.No): self.model.duplicate_data() return True return False<|docstring|>Automatically duplicate current data set. If the current data set is stored in a file (i.e. was loaded directly from a file), a new data set is automatically created. If the current data set is not stored in a file (i.e. was created by operations in MNELAB), a dialog box asks the user if the current data set should be overwritten or duplicated. Returns ------- duplicated : bool True if the current data set was automatically duplicated, False if the current data set was overwritten.<|endoftext|>
22414191ccee517d49440e70be4723b76f5dcb4030a264e5fc395b88cfe23d6e
def _add_recent(self, fname): 'Add a file to recent file list.\n\n Parameters\n ----------\n fname : str\n File name.\n ' if (fname in self.recent): self.recent.remove(fname) self.recent.insert(0, fname) while (len(self.recent) > MAX_RECENT): self.recent.pop() write_settings(recent=self.recent) if (not self.recent_menu.isEnabled()): self.recent_menu.setEnabled(True)
Add a file to recent file list. Parameters ---------- fname : str File name.
mnelab/mainwindow.py
_add_recent
stralu/mnelab
1
python
def _add_recent(self, fname): 'Add a file to recent file list.\n\n Parameters\n ----------\n fname : str\n File name.\n ' if (fname in self.recent): self.recent.remove(fname) self.recent.insert(0, fname) while (len(self.recent) > MAX_RECENT): self.recent.pop() write_settings(recent=self.recent) if (not self.recent_menu.isEnabled()): self.recent_menu.setEnabled(True)
def _add_recent(self, fname): 'Add a file to recent file list.\n\n Parameters\n ----------\n fname : str\n File name.\n ' if (fname in self.recent): self.recent.remove(fname) self.recent.insert(0, fname) while (len(self.recent) > MAX_RECENT): self.recent.pop() write_settings(recent=self.recent) if (not self.recent_menu.isEnabled()): self.recent_menu.setEnabled(True)<|docstring|>Add a file to recent file list. Parameters ---------- fname : str File name.<|endoftext|>
817772627d311831e0b596d3a0c127631b13e935d2d62de575874ff256086ee9
def _remove_recent(self, fname): 'Remove file from recent file list.\n\n Parameters\n ----------\n fname : str\n File name.\n ' if (fname in self.recent): self.recent.remove(fname) write_settings(recent=self.recent) if (not self.recent): self.recent_menu.setEnabled(False)
Remove file from recent file list. Parameters ---------- fname : str File name.
mnelab/mainwindow.py
_remove_recent
stralu/mnelab
1
python
def _remove_recent(self, fname): 'Remove file from recent file list.\n\n Parameters\n ----------\n fname : str\n File name.\n ' if (fname in self.recent): self.recent.remove(fname) write_settings(recent=self.recent) if (not self.recent): self.recent_menu.setEnabled(False)
def _remove_recent(self, fname): 'Remove file from recent file list.\n\n Parameters\n ----------\n fname : str\n File name.\n ' if (fname in self.recent): self.recent.remove(fname) write_settings(recent=self.recent) if (not self.recent): self.recent_menu.setEnabled(False)<|docstring|>Remove file from recent file list. Parameters ---------- fname : str File name.<|endoftext|>
3897e4cab4d1a18bf6d56a513a1ce70edb0ec49fd146dc54dee9fd80f24862e9
@Slot(QModelIndex) def _update_data(self, selected): 'Update index and information based on the state of the sidebar.\n\n Parameters\n ----------\n selected : QModelIndex\n Index of the selected row.\n ' if (selected.row() != self.model.index): self.model.index = selected.row() self.data_changed() self.model.history.append(f'data = datasets[{self.model.index}]')
Update index and information based on the state of the sidebar. Parameters ---------- selected : QModelIndex Index of the selected row.
mnelab/mainwindow.py
_update_data
stralu/mnelab
1
python
@Slot(QModelIndex) def _update_data(self, selected): 'Update index and information based on the state of the sidebar.\n\n Parameters\n ----------\n selected : QModelIndex\n Index of the selected row.\n ' if (selected.row() != self.model.index): self.model.index = selected.row() self.data_changed() self.model.history.append(f'data = datasets[{self.model.index}]')
@Slot(QModelIndex) def _update_data(self, selected): 'Update index and information based on the state of the sidebar.\n\n Parameters\n ----------\n selected : QModelIndex\n Index of the selected row.\n ' if (selected.row() != self.model.index): self.model.index = selected.row() self.data_changed() self.model.history.append(f'data = datasets[{self.model.index}]')<|docstring|>Update index and information based on the state of the sidebar. Parameters ---------- selected : QModelIndex Index of the selected row.<|endoftext|>
696f029b772baa81f95af32ab170997cb6150304913ed82a40ddb40ec3e32b47
@Slot(QModelIndex, QModelIndex) def _update_names(self, start, stop): 'Update names in DataSets after changes in sidebar.' for index in range(start.row(), (stop.row() + 1)): self.model.data[index]['name'] = self.names.stringList()[index]
Update names in DataSets after changes in sidebar.
mnelab/mainwindow.py
_update_names
stralu/mnelab
1
python
@Slot(QModelIndex, QModelIndex) def _update_names(self, start, stop): for index in range(start.row(), (stop.row() + 1)): self.model.data[index]['name'] = self.names.stringList()[index]
@Slot(QModelIndex, QModelIndex) def _update_names(self, start, stop): for index in range(start.row(), (stop.row() + 1)): self.model.data[index]['name'] = self.names.stringList()[index]<|docstring|>Update names in DataSets after changes in sidebar.<|endoftext|>
0cf2d9815cb4e11d828530f1d4dacdae263ac7b1c638ede58f81b5e630799775
@Slot(QEvent) def closeEvent(self, event): 'Close application.\n\n Parameters\n ----------\n event : QEvent\n Close event.\n ' write_settings(geometry=self.saveGeometry(), state=self.saveState()) if self.model.history: print('\nCommand History') print('===============') print('\n'.join(self.model.history)) QApplication.quit()
Close application. Parameters ---------- event : QEvent Close event.
mnelab/mainwindow.py
closeEvent
stralu/mnelab
1
python
@Slot(QEvent) def closeEvent(self, event): 'Close application.\n\n Parameters\n ----------\n event : QEvent\n Close event.\n ' write_settings(geometry=self.saveGeometry(), state=self.saveState()) if self.model.history: print('\nCommand History') print('===============') print('\n'.join(self.model.history)) QApplication.quit()
@Slot(QEvent) def closeEvent(self, event): 'Close application.\n\n Parameters\n ----------\n event : QEvent\n Close event.\n ' write_settings(geometry=self.saveGeometry(), state=self.saveState()) if self.model.history: print('\nCommand History') print('===============') print('\n'.join(self.model.history)) QApplication.quit()<|docstring|>Close application. Parameters ---------- event : QEvent Close event.<|endoftext|>
3d22101fd56a1dfa63ac993ea08df318d000c070fa2eeb3cda83b23b1f6ac32a
def image_add(gray_img1, gray_img2): "This is a function used to add images. The numpy addition function '+' is used. This is a modulo operation\n rather than the cv2.add fxn which is a saturation operation. ddepth = -1 specifies that the dimensions of output\n image will be the same as the input image.\n\n Inputs:\n gray_img1 = Grayscale image data to be added to image 2\n gray_img2 = Grayscale image data to be added to image 1\n\n Returns:\n added_img = summed images\n\n :param gray_img1: numpy.ndarray\n :param gray_img2: numpy.ndarray\n :return added_img: numpy.ndarray\n " added_img = (gray_img1 + gray_img2) params.device += 1 if (params.debug == 'print'): print_image(added_img, os.path.join(params.debug_outdir, ((str(params.device) + '_added') + '.png'))) elif (params.debug == 'plot'): plot_image(added_img, cmap='gray') return added_img
This is a function used to add images. The numpy addition function '+' is used. This is a modulo operation rather than the cv2.add fxn which is a saturation operation. ddepth = -1 specifies that the dimensions of output image will be the same as the input image. Inputs: gray_img1 = Grayscale image data to be added to image 2 gray_img2 = Grayscale image data to be added to image 1 Returns: added_img = summed images :param gray_img1: numpy.ndarray :param gray_img2: numpy.ndarray :return added_img: numpy.ndarray
plantcv/plantcv/image_add.py
image_add
typelogic/plantcv
2
python
def image_add(gray_img1, gray_img2): "This is a function used to add images. The numpy addition function '+' is used. This is a modulo operation\n rather than the cv2.add fxn which is a saturation operation. ddepth = -1 specifies that the dimensions of output\n image will be the same as the input image.\n\n Inputs:\n gray_img1 = Grayscale image data to be added to image 2\n gray_img2 = Grayscale image data to be added to image 1\n\n Returns:\n added_img = summed images\n\n :param gray_img1: numpy.ndarray\n :param gray_img2: numpy.ndarray\n :return added_img: numpy.ndarray\n " added_img = (gray_img1 + gray_img2) params.device += 1 if (params.debug == 'print'): print_image(added_img, os.path.join(params.debug_outdir, ((str(params.device) + '_added') + '.png'))) elif (params.debug == 'plot'): plot_image(added_img, cmap='gray') return added_img
def image_add(gray_img1, gray_img2): "This is a function used to add images. The numpy addition function '+' is used. This is a modulo operation\n rather than the cv2.add fxn which is a saturation operation. ddepth = -1 specifies that the dimensions of output\n image will be the same as the input image.\n\n Inputs:\n gray_img1 = Grayscale image data to be added to image 2\n gray_img2 = Grayscale image data to be added to image 1\n\n Returns:\n added_img = summed images\n\n :param gray_img1: numpy.ndarray\n :param gray_img2: numpy.ndarray\n :return added_img: numpy.ndarray\n " added_img = (gray_img1 + gray_img2) params.device += 1 if (params.debug == 'print'): print_image(added_img, os.path.join(params.debug_outdir, ((str(params.device) + '_added') + '.png'))) elif (params.debug == 'plot'): plot_image(added_img, cmap='gray') return added_img<|docstring|>This is a function used to add images. The numpy addition function '+' is used. This is a modulo operation rather than the cv2.add fxn which is a saturation operation. ddepth = -1 specifies that the dimensions of output image will be the same as the input image. Inputs: gray_img1 = Grayscale image data to be added to image 2 gray_img2 = Grayscale image data to be added to image 1 Returns: added_img = summed images :param gray_img1: numpy.ndarray :param gray_img2: numpy.ndarray :return added_img: numpy.ndarray<|endoftext|>
9e73397bc1d4da3720204318d2ebc21f6f3bfcd6fcba9c8294f84871a76afd95
def maximum_run(shape1, shape2, dtype, attrs_op={}, attrs={}): 'maximum_run' attrs.update(attrs_op) if ('tuning' in attrs.keys()): t = attrs.get('tuning', False) kernel_name = attrs.get('kernel_name', False) mod = utils.op_build_test(Maximum, [shape1, shape2], [dtype, dtype], kernel_name=kernel_name, attrs=attrs, tuning=t) if t: (expect, lhd, output, rhd) = gen_data(dtype, shape1, shape2) return (mod, expect, (lhd, rhd, output)) return mod else: mod = utils.op_build_test(Maximum, [shape1, shape2], [dtype, dtype], kernel_name='maximum', attrs=attrs) (expect, lhd, output, rhd) = gen_data(dtype, shape1, shape2) output = utils.mod_launch(mod, (lhd, rhd, output), expect=expect) if attrs.get('profiling', False): import akg target_name = attrs['target'].split()[0] args_list = to_tvm_nd_array([lhd, rhd, output], akg.tvm.context(target_name, 0)) target_profiling(mod, *args_list, target=target_name, repeat_time=attrs['repeat_times']) compare_result = compare_tensor(output, expect, rtol=0.005, equal_nan=True) return ((lhd, rhd), output, expect, compare_result)
maximum_run
tests/common/test_run/maximum_run.py
maximum_run
tianjiashuo/akg
286
python
def (shape1, shape2, dtype, attrs_op={}, attrs={}): attrs.update(attrs_op) if ('tuning' in attrs.keys()): t = attrs.get('tuning', False) kernel_name = attrs.get('kernel_name', False) mod = utils.op_build_test(Maximum, [shape1, shape2], [dtype, dtype], kernel_name=kernel_name, attrs=attrs, tuning=t) if t: (expect, lhd, output, rhd) = gen_data(dtype, shape1, shape2) return (mod, expect, (lhd, rhd, output)) return mod else: mod = utils.op_build_test(Maximum, [shape1, shape2], [dtype, dtype], kernel_name='maximum', attrs=attrs) (expect, lhd, output, rhd) = gen_data(dtype, shape1, shape2) output = utils.mod_launch(mod, (lhd, rhd, output), expect=expect) if attrs.get('profiling', False): import akg target_name = attrs['target'].split()[0] args_list = to_tvm_nd_array([lhd, rhd, output], akg.tvm.context(target_name, 0)) target_profiling(mod, *args_list, target=target_name, repeat_time=attrs['repeat_times']) compare_result = compare_tensor(output, expect, rtol=0.005, equal_nan=True) return ((lhd, rhd), output, expect, compare_result)
def (shape1, shape2, dtype, attrs_op={}, attrs={}): attrs.update(attrs_op) if ('tuning' in attrs.keys()): t = attrs.get('tuning', False) kernel_name = attrs.get('kernel_name', False) mod = utils.op_build_test(Maximum, [shape1, shape2], [dtype, dtype], kernel_name=kernel_name, attrs=attrs, tuning=t) if t: (expect, lhd, output, rhd) = gen_data(dtype, shape1, shape2) return (mod, expect, (lhd, rhd, output)) return mod else: mod = utils.op_build_test(Maximum, [shape1, shape2], [dtype, dtype], kernel_name='maximum', attrs=attrs) (expect, lhd, output, rhd) = gen_data(dtype, shape1, shape2) output = utils.mod_launch(mod, (lhd, rhd, output), expect=expect) if attrs.get('profiling', False): import akg target_name = attrs['target'].split()[0] args_list = to_tvm_nd_array([lhd, rhd, output], akg.tvm.context(target_name, 0)) target_profiling(mod, *args_list, target=target_name, repeat_time=attrs['repeat_times']) compare_result = compare_tensor(output, expect, rtol=0.005, equal_nan=True) return ((lhd, rhd), output, expect, compare_result)<|docstring|>maximum_run<|endoftext|>
d221737b33aaf1bc37bc5cef8dc44d02858c85f62c262c7bfce44bbf10fbfa51
def gen_data(dtype, shape1, shape2): 'gen_data' vc_util.check_shape(shape1) vc_util.check_shape(shape2) support_list = {'float16': np.float16, 'float32': np.float32, 'int32': np.int32, 'int8': np.int8, 'uint8': np.uint8} if (not (dtype.lower() in support_list)): raise RuntimeError(('maximum_cce only support %s while dtype is %s' % (','.join(support_list.keys()), dtype))) lhd = random_gaussian(shape1, miu=1, sigma=1).astype(support_list[dtype]) rhd = random_gaussian(shape2, miu=1, sigma=1).astype(support_list[dtype]) expect = np.maximum(lhd, rhd) output = np.full(shape1, np.nan, dtype) return (expect, lhd, output, rhd)
gen_data
tests/common/test_run/maximum_run.py
gen_data
tianjiashuo/akg
286
python
def (dtype, shape1, shape2): vc_util.check_shape(shape1) vc_util.check_shape(shape2) support_list = {'float16': np.float16, 'float32': np.float32, 'int32': np.int32, 'int8': np.int8, 'uint8': np.uint8} if (not (dtype.lower() in support_list)): raise RuntimeError(('maximum_cce only support %s while dtype is %s' % (','.join(support_list.keys()), dtype))) lhd = random_gaussian(shape1, miu=1, sigma=1).astype(support_list[dtype]) rhd = random_gaussian(shape2, miu=1, sigma=1).astype(support_list[dtype]) expect = np.maximum(lhd, rhd) output = np.full(shape1, np.nan, dtype) return (expect, lhd, output, rhd)
def (dtype, shape1, shape2): vc_util.check_shape(shape1) vc_util.check_shape(shape2) support_list = {'float16': np.float16, 'float32': np.float32, 'int32': np.int32, 'int8': np.int8, 'uint8': np.uint8} if (not (dtype.lower() in support_list)): raise RuntimeError(('maximum_cce only support %s while dtype is %s' % (','.join(support_list.keys()), dtype))) lhd = random_gaussian(shape1, miu=1, sigma=1).astype(support_list[dtype]) rhd = random_gaussian(shape2, miu=1, sigma=1).astype(support_list[dtype]) expect = np.maximum(lhd, rhd) output = np.full(shape1, np.nan, dtype) return (expect, lhd, output, rhd)<|docstring|>gen_data<|endoftext|>
e103414af14ca02c9e824acd8708b858c195aa8c82b73094204f0d9abeefe459
@cuda.jit('(i8, f8[:], f8[:], i8, f8[:], f8[:], f8[:], f8[:], f8[:],f8[:], f8[:], i8, b1, i8, f8[:, :], i8[:, :], b1)') def _compute_and_update_PI_kernel(i, T_A, T_B, m, QT_even, QT_odd, QT_first, M_T, Σ_T, μ_Q, σ_Q, k, ignore_trivial, excl_zone, profile, indices, compute_QT): '\n A Numba CUDA kernel to update the matrix profile and matrix profile indices\n\n Parameters\n ----------\n i : int\n sliding window `i`\n\n T_A : ndarray\n The time series or sequence for which to compute the dot product\n\n T_B : ndarray\n The time series or sequence that contain your query subsequence\n of interest\n\n m : int\n Window size\n\n QT_even : ndarray\n The input QT array (dot product between the query sequence,`Q`, and\n time series, `T`) to use when `i` is even\n\n QT_odd : ndarray\n The input QT array (dot product between the query sequence,`Q`, and\n time series, `T`) to use when `i` is odd\n\n QT_first : ndarray\n Dot product between the first query sequence,`Q`, and time series, `T`\n\n M_T : ndarray\n Sliding mean of time series, `T`\n\n Σ_T : ndarray\n Sliding standard deviation of time series, `T`\n\n μ_Q : ndarray\n Mean of the query sequence, `Q`\n\n σ_Q : ndarray\n Standard deviation of the query sequence, `Q`\n\n k : int\n The total number of sliding windows to iterate over\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this to\n `False`.\n\n excl_zone : int\n The half width for the exclusion zone relative to the current\n sliding window\n\n profile : ndarray\n Matrix profile. The first column consists of the global matrix profile,\n the second column consists of the left matrix profile, and the third\n column consists of the right matrix profile.\n\n indices : ndarray\n The first column consists of the matrix profile indices, the second\n column consists of the left matrix profile indices, and the third\n column consists of the right matrix profile indices.\n\n compute_QT : bool\n A boolean flag for whether or not to compute QT\n\n Returns\n -------\n None\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n ' start = cuda.grid(1) stride = cuda.gridsize(1) if ((i % 2) == 0): QT_out = QT_even QT_in = QT_odd else: QT_out = QT_odd QT_in = QT_even for j in range(start, QT_out.shape[0], stride): zone_start = max(0, (j - excl_zone)) zone_stop = min(k, (j + excl_zone)) if compute_QT: QT_out[j] = ((QT_in[(j - 1)] - (T_B[(i - 1)] * T_A[(j - 1)])) + (T_B[((i + m) - 1)] * T_A[((j + m) - 1)])) QT_out[0] = QT_first[i] if (math.isinf(M_T[j]) or math.isinf(μ_Q[i])): D = np.inf else: if ((σ_Q[i] < config.STUMPY_STDDEV_THRESHOLD) or (Σ_T[j] < config.STUMPY_STDDEV_THRESHOLD)): D = m else: denom = ((m * σ_Q[i]) * Σ_T[j]) if (math.fabs(denom) < config.STUMPY_DENOM_THRESHOLD): denom = config.STUMPY_DENOM_THRESHOLD D = abs(((2 * m) * (1.0 - ((QT_out[j] - ((m * μ_Q[i]) * M_T[j])) / denom)))) if (((σ_Q[i] < config.STUMPY_STDDEV_THRESHOLD) and (Σ_T[j] < config.STUMPY_STDDEV_THRESHOLD)) or (D < config.STUMPY_D_SQUARED_THRESHOLD)): D = 0 if ignore_trivial: if ((i <= zone_stop) and (i >= zone_start)): D = np.inf if ((D < profile[(j, 1)]) and (i < j)): profile[(j, 1)] = D indices[(j, 1)] = i if ((D < profile[(j, 2)]) and (i > j)): profile[(j, 2)] = D indices[(j, 2)] = i if (D < profile[(j, 0)]): profile[(j, 0)] = D indices[(j, 0)] = i
A Numba CUDA kernel to update the matrix profile and matrix profile indices Parameters ---------- i : int sliding window `i` T_A : ndarray The time series or sequence for which to compute the dot product T_B : ndarray The time series or sequence that contain your query subsequence of interest m : int Window size QT_even : ndarray The input QT array (dot product between the query sequence,`Q`, and time series, `T`) to use when `i` is even QT_odd : ndarray The input QT array (dot product between the query sequence,`Q`, and time series, `T`) to use when `i` is odd QT_first : ndarray Dot product between the first query sequence,`Q`, and time series, `T` M_T : ndarray Sliding mean of time series, `T` Σ_T : ndarray Sliding standard deviation of time series, `T` μ_Q : ndarray Mean of the query sequence, `Q` σ_Q : ndarray Standard deviation of the query sequence, `Q` k : int The total number of sliding windows to iterate over ignore_trivial : bool Set to `True` if this is a self-join. Otherwise, for AB-join, set this to `False`. excl_zone : int The half width for the exclusion zone relative to the current sliding window profile : ndarray Matrix profile. The first column consists of the global matrix profile, the second column consists of the left matrix profile, and the third column consists of the right matrix profile. indices : ndarray The first column consists of the matrix profile indices, the second column consists of the left matrix profile indices, and the third column consists of the right matrix profile indices. compute_QT : bool A boolean flag for whether or not to compute QT Returns ------- None Notes ----- `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__ See Table II, Figure 5, and Figure 6
stumpy/gpu_stump.py
_compute_and_update_PI_kernel
abbasnikbakht/stumpy
1
python
@cuda.jit('(i8, f8[:], f8[:], i8, f8[:], f8[:], f8[:], f8[:], f8[:],f8[:], f8[:], i8, b1, i8, f8[:, :], i8[:, :], b1)') def _compute_and_update_PI_kernel(i, T_A, T_B, m, QT_even, QT_odd, QT_first, M_T, Σ_T, μ_Q, σ_Q, k, ignore_trivial, excl_zone, profile, indices, compute_QT): '\n A Numba CUDA kernel to update the matrix profile and matrix profile indices\n\n Parameters\n ----------\n i : int\n sliding window `i`\n\n T_A : ndarray\n The time series or sequence for which to compute the dot product\n\n T_B : ndarray\n The time series or sequence that contain your query subsequence\n of interest\n\n m : int\n Window size\n\n QT_even : ndarray\n The input QT array (dot product between the query sequence,`Q`, and\n time series, `T`) to use when `i` is even\n\n QT_odd : ndarray\n The input QT array (dot product between the query sequence,`Q`, and\n time series, `T`) to use when `i` is odd\n\n QT_first : ndarray\n Dot product between the first query sequence,`Q`, and time series, `T`\n\n M_T : ndarray\n Sliding mean of time series, `T`\n\n Σ_T : ndarray\n Sliding standard deviation of time series, `T`\n\n μ_Q : ndarray\n Mean of the query sequence, `Q`\n\n σ_Q : ndarray\n Standard deviation of the query sequence, `Q`\n\n k : int\n The total number of sliding windows to iterate over\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this to\n `False`.\n\n excl_zone : int\n The half width for the exclusion zone relative to the current\n sliding window\n\n profile : ndarray\n Matrix profile. The first column consists of the global matrix profile,\n the second column consists of the left matrix profile, and the third\n column consists of the right matrix profile.\n\n indices : ndarray\n The first column consists of the matrix profile indices, the second\n column consists of the left matrix profile indices, and the third\n column consists of the right matrix profile indices.\n\n compute_QT : bool\n A boolean flag for whether or not to compute QT\n\n Returns\n -------\n None\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n ' start = cuda.grid(1) stride = cuda.gridsize(1) if ((i % 2) == 0): QT_out = QT_even QT_in = QT_odd else: QT_out = QT_odd QT_in = QT_even for j in range(start, QT_out.shape[0], stride): zone_start = max(0, (j - excl_zone)) zone_stop = min(k, (j + excl_zone)) if compute_QT: QT_out[j] = ((QT_in[(j - 1)] - (T_B[(i - 1)] * T_A[(j - 1)])) + (T_B[((i + m) - 1)] * T_A[((j + m) - 1)])) QT_out[0] = QT_first[i] if (math.isinf(M_T[j]) or math.isinf(μ_Q[i])): D = np.inf else: if ((σ_Q[i] < config.STUMPY_STDDEV_THRESHOLD) or (Σ_T[j] < config.STUMPY_STDDEV_THRESHOLD)): D = m else: denom = ((m * σ_Q[i]) * Σ_T[j]) if (math.fabs(denom) < config.STUMPY_DENOM_THRESHOLD): denom = config.STUMPY_DENOM_THRESHOLD D = abs(((2 * m) * (1.0 - ((QT_out[j] - ((m * μ_Q[i]) * M_T[j])) / denom)))) if (((σ_Q[i] < config.STUMPY_STDDEV_THRESHOLD) and (Σ_T[j] < config.STUMPY_STDDEV_THRESHOLD)) or (D < config.STUMPY_D_SQUARED_THRESHOLD)): D = 0 if ignore_trivial: if ((i <= zone_stop) and (i >= zone_start)): D = np.inf if ((D < profile[(j, 1)]) and (i < j)): profile[(j, 1)] = D indices[(j, 1)] = i if ((D < profile[(j, 2)]) and (i > j)): profile[(j, 2)] = D indices[(j, 2)] = i if (D < profile[(j, 0)]): profile[(j, 0)] = D indices[(j, 0)] = i
@cuda.jit('(i8, f8[:], f8[:], i8, f8[:], f8[:], f8[:], f8[:], f8[:],f8[:], f8[:], i8, b1, i8, f8[:, :], i8[:, :], b1)') def _compute_and_update_PI_kernel(i, T_A, T_B, m, QT_even, QT_odd, QT_first, M_T, Σ_T, μ_Q, σ_Q, k, ignore_trivial, excl_zone, profile, indices, compute_QT): '\n A Numba CUDA kernel to update the matrix profile and matrix profile indices\n\n Parameters\n ----------\n i : int\n sliding window `i`\n\n T_A : ndarray\n The time series or sequence for which to compute the dot product\n\n T_B : ndarray\n The time series or sequence that contain your query subsequence\n of interest\n\n m : int\n Window size\n\n QT_even : ndarray\n The input QT array (dot product between the query sequence,`Q`, and\n time series, `T`) to use when `i` is even\n\n QT_odd : ndarray\n The input QT array (dot product between the query sequence,`Q`, and\n time series, `T`) to use when `i` is odd\n\n QT_first : ndarray\n Dot product between the first query sequence,`Q`, and time series, `T`\n\n M_T : ndarray\n Sliding mean of time series, `T`\n\n Σ_T : ndarray\n Sliding standard deviation of time series, `T`\n\n μ_Q : ndarray\n Mean of the query sequence, `Q`\n\n σ_Q : ndarray\n Standard deviation of the query sequence, `Q`\n\n k : int\n The total number of sliding windows to iterate over\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this to\n `False`.\n\n excl_zone : int\n The half width for the exclusion zone relative to the current\n sliding window\n\n profile : ndarray\n Matrix profile. The first column consists of the global matrix profile,\n the second column consists of the left matrix profile, and the third\n column consists of the right matrix profile.\n\n indices : ndarray\n The first column consists of the matrix profile indices, the second\n column consists of the left matrix profile indices, and the third\n column consists of the right matrix profile indices.\n\n compute_QT : bool\n A boolean flag for whether or not to compute QT\n\n Returns\n -------\n None\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n ' start = cuda.grid(1) stride = cuda.gridsize(1) if ((i % 2) == 0): QT_out = QT_even QT_in = QT_odd else: QT_out = QT_odd QT_in = QT_even for j in range(start, QT_out.shape[0], stride): zone_start = max(0, (j - excl_zone)) zone_stop = min(k, (j + excl_zone)) if compute_QT: QT_out[j] = ((QT_in[(j - 1)] - (T_B[(i - 1)] * T_A[(j - 1)])) + (T_B[((i + m) - 1)] * T_A[((j + m) - 1)])) QT_out[0] = QT_first[i] if (math.isinf(M_T[j]) or math.isinf(μ_Q[i])): D = np.inf else: if ((σ_Q[i] < config.STUMPY_STDDEV_THRESHOLD) or (Σ_T[j] < config.STUMPY_STDDEV_THRESHOLD)): D = m else: denom = ((m * σ_Q[i]) * Σ_T[j]) if (math.fabs(denom) < config.STUMPY_DENOM_THRESHOLD): denom = config.STUMPY_DENOM_THRESHOLD D = abs(((2 * m) * (1.0 - ((QT_out[j] - ((m * μ_Q[i]) * M_T[j])) / denom)))) if (((σ_Q[i] < config.STUMPY_STDDEV_THRESHOLD) and (Σ_T[j] < config.STUMPY_STDDEV_THRESHOLD)) or (D < config.STUMPY_D_SQUARED_THRESHOLD)): D = 0 if ignore_trivial: if ((i <= zone_stop) and (i >= zone_start)): D = np.inf if ((D < profile[(j, 1)]) and (i < j)): profile[(j, 1)] = D indices[(j, 1)] = i if ((D < profile[(j, 2)]) and (i > j)): profile[(j, 2)] = D indices[(j, 2)] = i if (D < profile[(j, 0)]): profile[(j, 0)] = D indices[(j, 0)] = i<|docstring|>A Numba CUDA kernel to update the matrix profile and matrix profile indices Parameters ---------- i : int sliding window `i` T_A : ndarray The time series or sequence for which to compute the dot product T_B : ndarray The time series or sequence that contain your query subsequence of interest m : int Window size QT_even : ndarray The input QT array (dot product between the query sequence,`Q`, and time series, `T`) to use when `i` is even QT_odd : ndarray The input QT array (dot product between the query sequence,`Q`, and time series, `T`) to use when `i` is odd QT_first : ndarray Dot product between the first query sequence,`Q`, and time series, `T` M_T : ndarray Sliding mean of time series, `T` Σ_T : ndarray Sliding standard deviation of time series, `T` μ_Q : ndarray Mean of the query sequence, `Q` σ_Q : ndarray Standard deviation of the query sequence, `Q` k : int The total number of sliding windows to iterate over ignore_trivial : bool Set to `True` if this is a self-join. Otherwise, for AB-join, set this to `False`. excl_zone : int The half width for the exclusion zone relative to the current sliding window profile : ndarray Matrix profile. The first column consists of the global matrix profile, the second column consists of the left matrix profile, and the third column consists of the right matrix profile. indices : ndarray The first column consists of the matrix profile indices, the second column consists of the left matrix profile indices, and the third column consists of the right matrix profile indices. compute_QT : bool A boolean flag for whether or not to compute QT Returns ------- None Notes ----- `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__ See Table II, Figure 5, and Figure 6<|endoftext|>
e180687f034c920532e812251e526e44447f248ce5cd434535ffad29a472eb5d
def _gpu_stump(T_A_fname, T_B_fname, m, range_stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial=True, range_start=1, device_id=0): '\n A Numba CUDA version of STOMP for parallel computation of the\n matrix profile, matrix profile indices, left matrix profile indices,\n and right matrix profile indices.\n\n Parameters\n ----------\n T_A_fname : str\n The file name for the time series or sequence for which to compute\n the matrix profile\n\n T_B_fname : str\n The file name for the time series or sequence that contain your\n query subsequences of interest\n\n m : int\n Window size\n\n range_stop : int\n The index value along T_B for which to stop the matrix profile\n calculation. This parameter is here for consistency with the\n distributed `stumped` algorithm.\n\n excl_zone : int\n The half width for the exclusion zone relative to the current\n sliding window\n\n M_T_fname : str\n The file name for the sliding mean of time series, `T`\n\n Σ_T_fname : str\n The file name for the sliding standard deviation of time series, `T`\n\n QT_fname : str\n The file name for the dot product between some query sequence,`Q`,\n and time series, `T`\n\n QT_first_fname : str\n The file name for the QT for the first window relative to the current\n sliding window\n\n μ_Q_fname : str\n The file name for the mean of the query sequence, `Q`, relative to\n the current sliding window\n\n σ_Q_fname : str\n The file name for the standard deviation of the query sequence, `Q`,\n relative to the current sliding window\n\n k : int\n The total number of sliding windows to iterate over\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this to\n `False`. Default is `True`.\n\n range_start : int\n The starting index value along T_B for which to start the matrix\n profile calculation. Default is 1.\n\n device_id : int\n The (GPU) device number to use. The default value is `0`.\n\n Returns\n -------\n profile_fname : str\n The file name for the matrix profile\n\n indices_fname : str\n The file name for the matrix profile indices. The first column of the\n array consists of the matrix profile indices, the second column consists\n of the left matrix profile indices, and the third column consists of the\n right matrix profile indices.\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n\n Timeseries, T_B, will be annotated with the distance location\n (or index) of all its subsequences in another times series, T_A.\n\n Return: For every subsequence, Q, in T_B, you will get a distance\n and index for the closest subsequence in T_A. Thus, the array\n returned will have length T_B.shape[0]-m+1. Additionally, the\n left and right matrix profiles are also returned.\n\n Note: Unlike in the Table II where T_A.shape is expected to be equal\n to T_B.shape, this implementation is generalized so that the shapes of\n T_A and T_B can be different. In the case where T_A.shape == T_B.shape,\n then our algorithm reduces down to the same algorithm found in Table II.\n\n Additionally, unlike STAMP where the exclusion zone is m/2, the default\n exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3).\n\n For self-joins, set `ignore_trivial = True` in order to avoid the\n trivial match.\n\n Note that left and right matrix profiles are only available for self-joins.\n ' threads_per_block = config.STUMPY_THREADS_PER_BLOCK blocks_per_grid = math.ceil((k / threads_per_block)) T_A = np.load(T_A_fname, allow_pickle=False) T_B = np.load(T_B_fname, allow_pickle=False) QT = np.load(QT_fname, allow_pickle=False) QT_first = np.load(QT_first_fname, allow_pickle=False) M_T = np.load(M_T_fname, allow_pickle=False) Σ_T = np.load(Σ_T_fname, allow_pickle=False) μ_Q = np.load(μ_Q_fname, allow_pickle=False) σ_Q = np.load(σ_Q_fname, allow_pickle=False) with cuda.gpus[device_id]: device_T_A = cuda.to_device(T_A) device_QT_odd = cuda.to_device(QT) device_QT_even = cuda.to_device(QT) device_QT_first = cuda.to_device(QT_first) device_μ_Q = cuda.to_device(μ_Q) device_σ_Q = cuda.to_device(σ_Q) if ignore_trivial: device_T_B = device_T_A device_M_T = device_μ_Q device_Σ_T = device_σ_Q else: device_T_B = cuda.to_device(T_B) device_M_T = cuda.to_device(M_T) device_Σ_T = cuda.to_device(Σ_T) profile = np.full((k, 3), np.inf) indices = np.full((k, 3), (- 1), dtype=np.int64) device_profile = cuda.to_device(profile) device_indices = cuda.to_device(indices) _compute_and_update_PI_kernel[(blocks_per_grid, threads_per_block)]((range_start - 1), device_T_A, device_T_B, m, device_QT_even, device_QT_odd, device_QT_first, device_M_T, device_Σ_T, device_μ_Q, device_σ_Q, k, ignore_trivial, excl_zone, device_profile, device_indices, False) for i in range(range_start, range_stop): _compute_and_update_PI_kernel[(blocks_per_grid, threads_per_block)](i, device_T_A, device_T_B, m, device_QT_even, device_QT_odd, device_QT_first, device_M_T, device_Σ_T, device_μ_Q, device_σ_Q, k, ignore_trivial, excl_zone, device_profile, device_indices, True) profile = device_profile.copy_to_host() indices = device_indices.copy_to_host() profile = np.sqrt(profile) profile_fname = core.array_to_temp_file(profile) indices_fname = core.array_to_temp_file(indices) return (profile_fname, indices_fname)
A Numba CUDA version of STOMP for parallel computation of the matrix profile, matrix profile indices, left matrix profile indices, and right matrix profile indices. Parameters ---------- T_A_fname : str The file name for the time series or sequence for which to compute the matrix profile T_B_fname : str The file name for the time series or sequence that contain your query subsequences of interest m : int Window size range_stop : int The index value along T_B for which to stop the matrix profile calculation. This parameter is here for consistency with the distributed `stumped` algorithm. excl_zone : int The half width for the exclusion zone relative to the current sliding window M_T_fname : str The file name for the sliding mean of time series, `T` Σ_T_fname : str The file name for the sliding standard deviation of time series, `T` QT_fname : str The file name for the dot product between some query sequence,`Q`, and time series, `T` QT_first_fname : str The file name for the QT for the first window relative to the current sliding window μ_Q_fname : str The file name for the mean of the query sequence, `Q`, relative to the current sliding window σ_Q_fname : str The file name for the standard deviation of the query sequence, `Q`, relative to the current sliding window k : int The total number of sliding windows to iterate over ignore_trivial : bool Set to `True` if this is a self-join. Otherwise, for AB-join, set this to `False`. Default is `True`. range_start : int The starting index value along T_B for which to start the matrix profile calculation. Default is 1. device_id : int The (GPU) device number to use. The default value is `0`. Returns ------- profile_fname : str The file name for the matrix profile indices_fname : str The file name for the matrix profile indices. The first column of the array consists of the matrix profile indices, the second column consists of the left matrix profile indices, and the third column consists of the right matrix profile indices. Notes ----- `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__ See Table II, Figure 5, and Figure 6 Timeseries, T_B, will be annotated with the distance location (or index) of all its subsequences in another times series, T_A. Return: For every subsequence, Q, in T_B, you will get a distance and index for the closest subsequence in T_A. Thus, the array returned will have length T_B.shape[0]-m+1. Additionally, the left and right matrix profiles are also returned. Note: Unlike in the Table II where T_A.shape is expected to be equal to T_B.shape, this implementation is generalized so that the shapes of T_A and T_B can be different. In the case where T_A.shape == T_B.shape, then our algorithm reduces down to the same algorithm found in Table II. Additionally, unlike STAMP where the exclusion zone is m/2, the default exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3). For self-joins, set `ignore_trivial = True` in order to avoid the trivial match. Note that left and right matrix profiles are only available for self-joins.
stumpy/gpu_stump.py
_gpu_stump
abbasnikbakht/stumpy
1
python
def _gpu_stump(T_A_fname, T_B_fname, m, range_stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial=True, range_start=1, device_id=0): '\n A Numba CUDA version of STOMP for parallel computation of the\n matrix profile, matrix profile indices, left matrix profile indices,\n and right matrix profile indices.\n\n Parameters\n ----------\n T_A_fname : str\n The file name for the time series or sequence for which to compute\n the matrix profile\n\n T_B_fname : str\n The file name for the time series or sequence that contain your\n query subsequences of interest\n\n m : int\n Window size\n\n range_stop : int\n The index value along T_B for which to stop the matrix profile\n calculation. This parameter is here for consistency with the\n distributed `stumped` algorithm.\n\n excl_zone : int\n The half width for the exclusion zone relative to the current\n sliding window\n\n M_T_fname : str\n The file name for the sliding mean of time series, `T`\n\n Σ_T_fname : str\n The file name for the sliding standard deviation of time series, `T`\n\n QT_fname : str\n The file name for the dot product between some query sequence,`Q`,\n and time series, `T`\n\n QT_first_fname : str\n The file name for the QT for the first window relative to the current\n sliding window\n\n μ_Q_fname : str\n The file name for the mean of the query sequence, `Q`, relative to\n the current sliding window\n\n σ_Q_fname : str\n The file name for the standard deviation of the query sequence, `Q`,\n relative to the current sliding window\n\n k : int\n The total number of sliding windows to iterate over\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this to\n `False`. Default is `True`.\n\n range_start : int\n The starting index value along T_B for which to start the matrix\n profile calculation. Default is 1.\n\n device_id : int\n The (GPU) device number to use. The default value is `0`.\n\n Returns\n -------\n profile_fname : str\n The file name for the matrix profile\n\n indices_fname : str\n The file name for the matrix profile indices. The first column of the\n array consists of the matrix profile indices, the second column consists\n of the left matrix profile indices, and the third column consists of the\n right matrix profile indices.\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n\n Timeseries, T_B, will be annotated with the distance location\n (or index) of all its subsequences in another times series, T_A.\n\n Return: For every subsequence, Q, in T_B, you will get a distance\n and index for the closest subsequence in T_A. Thus, the array\n returned will have length T_B.shape[0]-m+1. Additionally, the\n left and right matrix profiles are also returned.\n\n Note: Unlike in the Table II where T_A.shape is expected to be equal\n to T_B.shape, this implementation is generalized so that the shapes of\n T_A and T_B can be different. In the case where T_A.shape == T_B.shape,\n then our algorithm reduces down to the same algorithm found in Table II.\n\n Additionally, unlike STAMP where the exclusion zone is m/2, the default\n exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3).\n\n For self-joins, set `ignore_trivial = True` in order to avoid the\n trivial match.\n\n Note that left and right matrix profiles are only available for self-joins.\n ' threads_per_block = config.STUMPY_THREADS_PER_BLOCK blocks_per_grid = math.ceil((k / threads_per_block)) T_A = np.load(T_A_fname, allow_pickle=False) T_B = np.load(T_B_fname, allow_pickle=False) QT = np.load(QT_fname, allow_pickle=False) QT_first = np.load(QT_first_fname, allow_pickle=False) M_T = np.load(M_T_fname, allow_pickle=False) Σ_T = np.load(Σ_T_fname, allow_pickle=False) μ_Q = np.load(μ_Q_fname, allow_pickle=False) σ_Q = np.load(σ_Q_fname, allow_pickle=False) with cuda.gpus[device_id]: device_T_A = cuda.to_device(T_A) device_QT_odd = cuda.to_device(QT) device_QT_even = cuda.to_device(QT) device_QT_first = cuda.to_device(QT_first) device_μ_Q = cuda.to_device(μ_Q) device_σ_Q = cuda.to_device(σ_Q) if ignore_trivial: device_T_B = device_T_A device_M_T = device_μ_Q device_Σ_T = device_σ_Q else: device_T_B = cuda.to_device(T_B) device_M_T = cuda.to_device(M_T) device_Σ_T = cuda.to_device(Σ_T) profile = np.full((k, 3), np.inf) indices = np.full((k, 3), (- 1), dtype=np.int64) device_profile = cuda.to_device(profile) device_indices = cuda.to_device(indices) _compute_and_update_PI_kernel[(blocks_per_grid, threads_per_block)]((range_start - 1), device_T_A, device_T_B, m, device_QT_even, device_QT_odd, device_QT_first, device_M_T, device_Σ_T, device_μ_Q, device_σ_Q, k, ignore_trivial, excl_zone, device_profile, device_indices, False) for i in range(range_start, range_stop): _compute_and_update_PI_kernel[(blocks_per_grid, threads_per_block)](i, device_T_A, device_T_B, m, device_QT_even, device_QT_odd, device_QT_first, device_M_T, device_Σ_T, device_μ_Q, device_σ_Q, k, ignore_trivial, excl_zone, device_profile, device_indices, True) profile = device_profile.copy_to_host() indices = device_indices.copy_to_host() profile = np.sqrt(profile) profile_fname = core.array_to_temp_file(profile) indices_fname = core.array_to_temp_file(indices) return (profile_fname, indices_fname)
def _gpu_stump(T_A_fname, T_B_fname, m, range_stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial=True, range_start=1, device_id=0): '\n A Numba CUDA version of STOMP for parallel computation of the\n matrix profile, matrix profile indices, left matrix profile indices,\n and right matrix profile indices.\n\n Parameters\n ----------\n T_A_fname : str\n The file name for the time series or sequence for which to compute\n the matrix profile\n\n T_B_fname : str\n The file name for the time series or sequence that contain your\n query subsequences of interest\n\n m : int\n Window size\n\n range_stop : int\n The index value along T_B for which to stop the matrix profile\n calculation. This parameter is here for consistency with the\n distributed `stumped` algorithm.\n\n excl_zone : int\n The half width for the exclusion zone relative to the current\n sliding window\n\n M_T_fname : str\n The file name for the sliding mean of time series, `T`\n\n Σ_T_fname : str\n The file name for the sliding standard deviation of time series, `T`\n\n QT_fname : str\n The file name for the dot product between some query sequence,`Q`,\n and time series, `T`\n\n QT_first_fname : str\n The file name for the QT for the first window relative to the current\n sliding window\n\n μ_Q_fname : str\n The file name for the mean of the query sequence, `Q`, relative to\n the current sliding window\n\n σ_Q_fname : str\n The file name for the standard deviation of the query sequence, `Q`,\n relative to the current sliding window\n\n k : int\n The total number of sliding windows to iterate over\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this to\n `False`. Default is `True`.\n\n range_start : int\n The starting index value along T_B for which to start the matrix\n profile calculation. Default is 1.\n\n device_id : int\n The (GPU) device number to use. The default value is `0`.\n\n Returns\n -------\n profile_fname : str\n The file name for the matrix profile\n\n indices_fname : str\n The file name for the matrix profile indices. The first column of the\n array consists of the matrix profile indices, the second column consists\n of the left matrix profile indices, and the third column consists of the\n right matrix profile indices.\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n\n Timeseries, T_B, will be annotated with the distance location\n (or index) of all its subsequences in another times series, T_A.\n\n Return: For every subsequence, Q, in T_B, you will get a distance\n and index for the closest subsequence in T_A. Thus, the array\n returned will have length T_B.shape[0]-m+1. Additionally, the\n left and right matrix profiles are also returned.\n\n Note: Unlike in the Table II where T_A.shape is expected to be equal\n to T_B.shape, this implementation is generalized so that the shapes of\n T_A and T_B can be different. In the case where T_A.shape == T_B.shape,\n then our algorithm reduces down to the same algorithm found in Table II.\n\n Additionally, unlike STAMP where the exclusion zone is m/2, the default\n exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3).\n\n For self-joins, set `ignore_trivial = True` in order to avoid the\n trivial match.\n\n Note that left and right matrix profiles are only available for self-joins.\n ' threads_per_block = config.STUMPY_THREADS_PER_BLOCK blocks_per_grid = math.ceil((k / threads_per_block)) T_A = np.load(T_A_fname, allow_pickle=False) T_B = np.load(T_B_fname, allow_pickle=False) QT = np.load(QT_fname, allow_pickle=False) QT_first = np.load(QT_first_fname, allow_pickle=False) M_T = np.load(M_T_fname, allow_pickle=False) Σ_T = np.load(Σ_T_fname, allow_pickle=False) μ_Q = np.load(μ_Q_fname, allow_pickle=False) σ_Q = np.load(σ_Q_fname, allow_pickle=False) with cuda.gpus[device_id]: device_T_A = cuda.to_device(T_A) device_QT_odd = cuda.to_device(QT) device_QT_even = cuda.to_device(QT) device_QT_first = cuda.to_device(QT_first) device_μ_Q = cuda.to_device(μ_Q) device_σ_Q = cuda.to_device(σ_Q) if ignore_trivial: device_T_B = device_T_A device_M_T = device_μ_Q device_Σ_T = device_σ_Q else: device_T_B = cuda.to_device(T_B) device_M_T = cuda.to_device(M_T) device_Σ_T = cuda.to_device(Σ_T) profile = np.full((k, 3), np.inf) indices = np.full((k, 3), (- 1), dtype=np.int64) device_profile = cuda.to_device(profile) device_indices = cuda.to_device(indices) _compute_and_update_PI_kernel[(blocks_per_grid, threads_per_block)]((range_start - 1), device_T_A, device_T_B, m, device_QT_even, device_QT_odd, device_QT_first, device_M_T, device_Σ_T, device_μ_Q, device_σ_Q, k, ignore_trivial, excl_zone, device_profile, device_indices, False) for i in range(range_start, range_stop): _compute_and_update_PI_kernel[(blocks_per_grid, threads_per_block)](i, device_T_A, device_T_B, m, device_QT_even, device_QT_odd, device_QT_first, device_M_T, device_Σ_T, device_μ_Q, device_σ_Q, k, ignore_trivial, excl_zone, device_profile, device_indices, True) profile = device_profile.copy_to_host() indices = device_indices.copy_to_host() profile = np.sqrt(profile) profile_fname = core.array_to_temp_file(profile) indices_fname = core.array_to_temp_file(indices) return (profile_fname, indices_fname)<|docstring|>A Numba CUDA version of STOMP for parallel computation of the matrix profile, matrix profile indices, left matrix profile indices, and right matrix profile indices. Parameters ---------- T_A_fname : str The file name for the time series or sequence for which to compute the matrix profile T_B_fname : str The file name for the time series or sequence that contain your query subsequences of interest m : int Window size range_stop : int The index value along T_B for which to stop the matrix profile calculation. This parameter is here for consistency with the distributed `stumped` algorithm. excl_zone : int The half width for the exclusion zone relative to the current sliding window M_T_fname : str The file name for the sliding mean of time series, `T` Σ_T_fname : str The file name for the sliding standard deviation of time series, `T` QT_fname : str The file name for the dot product between some query sequence,`Q`, and time series, `T` QT_first_fname : str The file name for the QT for the first window relative to the current sliding window μ_Q_fname : str The file name for the mean of the query sequence, `Q`, relative to the current sliding window σ_Q_fname : str The file name for the standard deviation of the query sequence, `Q`, relative to the current sliding window k : int The total number of sliding windows to iterate over ignore_trivial : bool Set to `True` if this is a self-join. Otherwise, for AB-join, set this to `False`. Default is `True`. range_start : int The starting index value along T_B for which to start the matrix profile calculation. Default is 1. device_id : int The (GPU) device number to use. The default value is `0`. Returns ------- profile_fname : str The file name for the matrix profile indices_fname : str The file name for the matrix profile indices. The first column of the array consists of the matrix profile indices, the second column consists of the left matrix profile indices, and the third column consists of the right matrix profile indices. Notes ----- `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__ See Table II, Figure 5, and Figure 6 Timeseries, T_B, will be annotated with the distance location (or index) of all its subsequences in another times series, T_A. Return: For every subsequence, Q, in T_B, you will get a distance and index for the closest subsequence in T_A. Thus, the array returned will have length T_B.shape[0]-m+1. Additionally, the left and right matrix profiles are also returned. Note: Unlike in the Table II where T_A.shape is expected to be equal to T_B.shape, this implementation is generalized so that the shapes of T_A and T_B can be different. In the case where T_A.shape == T_B.shape, then our algorithm reduces down to the same algorithm found in Table II. Additionally, unlike STAMP where the exclusion zone is m/2, the default exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3). For self-joins, set `ignore_trivial = True` in order to avoid the trivial match. Note that left and right matrix profiles are only available for self-joins.<|endoftext|>
ccce30af0dbb42fa2a23c2a95ed4bbd753fc3b3a650ce149d9ed056a05a616e1
def gpu_stump(T_A, m, T_B=None, ignore_trivial=True, device_id=0): '\n Compute the matrix profile with GPU-STOMP\n\n This is a convenience wrapper around the Numba `cuda.jit` `_gpu_stump` function\n which computes the matrix profile according to GPU-STOMP.\n\n Parameters\n ----------\n T_A : ndarray\n The time series or sequence for which to compute the matrix profile\n\n m : int\n Window size\n\n T_B : (optional) ndarray\n The time series or sequence that contain your query subsequences\n of interest. Default is `None` which corresponds to a self-join.\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this\n to `False`. Default is `True`.\n\n device_id : int or list\n The (GPU) device number to use. The default value is `0`. A list of\n valid device ids (int) may also be provided for parallel GPU-STUMP\n computation. A list of all valid device ids can be obtained by\n executing `[device.id for device in cuda.list_devices()]`.\n\n Returns\n -------\n out : ndarray\n The first column consists of the matrix profile, the second column\n consists of the matrix profile indices, the third column consists of\n the left matrix profile indices, and the fourth column consists of\n the right matrix profile indices.\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n\n Timeseries, T_B, will be annotated with the distance location\n (or index) of all its subsequences in another times series, T_A.\n\n Return: For every subsequence, Q, in T_B, you will get a distance\n and index for the closest subsequence in T_A. Thus, the array\n returned will have length T_B.shape[0]-m+1. Additionally, the\n left and right matrix profiles are also returned.\n\n Note: Unlike in the Table II where T_A.shape is expected to be equal\n to T_B.shape, this implementation is generalized so that the shapes of\n T_A and T_B can be different. In the case where T_A.shape == T_B.shape,\n then our algorithm reduces down to the same algorithm found in Table II.\n\n Additionally, unlike STAMP where the exclusion zone is m/2, the default\n exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3).\n\n For self-joins, set `ignore_trivial = True` in order to avoid the\n trivial match.\n\n Note that left and right matrix profiles are only available for self-joins.\n ' if (T_B is None): T_B = T_A ignore_trivial = True tmp_T = T_A T_A = T_B T_B = tmp_T (T_A, M_T, Σ_T) = core.preprocess(T_A, m) (T_B, μ_Q, σ_Q) = core.preprocess(T_B, m) if (T_A.ndim != 1): raise ValueError(f'T_A is {T_A.ndim}-dimensional and must be 1-dimensional. For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`') if (T_B.ndim != 1): raise ValueError(f'T_B is {T_B.ndim}-dimensional and must be 1-dimensional. For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`') core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) if ((ignore_trivial is False) and core.are_arrays_equal(T_A, T_B)): logger.warning('Arrays T_A, T_B are equal, which implies a self-join.') logger.warning('Try setting `ignore_trivial = True`.') if (ignore_trivial and (core.are_arrays_equal(T_A, T_B) is False)): logger.warning('Arrays T_A, T_B are not equal, which implies an AB-join.') logger.warning('Try setting `ignore_trivial = False`.') n = T_B.shape[0] k = ((T_A.shape[0] - m) + 1) l = ((n - m) + 1) excl_zone = int(np.ceil((m / 4))) T_A_fname = core.array_to_temp_file(T_A) T_B_fname = core.array_to_temp_file(T_B) M_T_fname = core.array_to_temp_file(M_T) Σ_T_fname = core.array_to_temp_file(Σ_T) μ_Q_fname = core.array_to_temp_file(μ_Q) σ_Q_fname = core.array_to_temp_file(σ_Q) out = np.empty((k, 4), dtype=object) if isinstance(device_id, int): device_ids = [device_id] else: device_ids = device_id profile = ([None] * len(device_ids)) indices = ([None] * len(device_ids)) for _id in device_ids: with cuda.gpus[_id]: if (cuda.current_context().__class__.__name__ != 'FakeCUDAContext'): cuda.current_context().deallocations.clear() step = (1 + (l // len(device_ids))) if (len(device_ids) > 1): mp.set_start_method('spawn', force=True) p = mp.Pool(processes=len(device_ids)) results = ([None] * len(device_ids)) QT_fnames = [] QT_first_fnames = [] for (idx, start) in enumerate(range(0, l, step)): stop = min(l, (start + step)) (QT, QT_first) = core._get_QT(start, T_A, T_B, m) QT_fname = core.array_to_temp_file(QT) QT_first_fname = core.array_to_temp_file(QT_first) QT_fnames.append(QT_fname) QT_first_fnames.append(QT_first_fname) if ((len(device_ids) > 1) and (idx < (len(device_ids) - 1))): results[idx] = p.apply_async(_gpu_stump, (T_A_fname, T_B_fname, m, stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial, (start + 1), device_ids[idx])) else: (profile[idx], indices[idx]) = _gpu_stump(T_A_fname, T_B_fname, m, stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial, (start + 1), device_ids[idx]) if (len(device_ids) > 1): p.close() p.join() for (idx, result) in enumerate(results): if (result is not None): (profile[idx], indices[idx]) = result.get() os.remove(T_A_fname) os.remove(T_B_fname) os.remove(M_T_fname) os.remove(Σ_T_fname) os.remove(μ_Q_fname) os.remove(σ_Q_fname) for QT_fname in QT_fnames: os.remove(QT_fname) for QT_first_fname in QT_first_fnames: os.remove(QT_first_fname) for idx in range(len(device_ids)): profile_fname = profile[idx] indices_fname = indices[idx] profile[idx] = np.load(profile_fname, allow_pickle=False) indices[idx] = np.load(indices_fname, allow_pickle=False) os.remove(profile_fname) os.remove(indices_fname) for i in range(1, len(device_ids)): for col in range(profile[0].shape[1]): cond = (profile[0][(:, col)] < profile[i][(:, col)]) profile[0][(:, col)] = np.where(cond, profile[0][(:, col)], profile[i][(:, col)]) indices[0][(:, col)] = np.where(cond, indices[0][(:, col)], indices[i][(:, col)]) out[(:, 0)] = profile[0][(:, 0)] out[(:, 1:4)] = indices[0][(:, :)] threshold = 1e-05 if core.are_distances_too_small(out[(:, 0)], threshold=threshold): logger.warning(f'A large number of values are smaller than {threshold}.') logger.warning('For a self-join, try setting `ignore_trivial = True`.') return out
Compute the matrix profile with GPU-STOMP This is a convenience wrapper around the Numba `cuda.jit` `_gpu_stump` function which computes the matrix profile according to GPU-STOMP. Parameters ---------- T_A : ndarray The time series or sequence for which to compute the matrix profile m : int Window size T_B : (optional) ndarray The time series or sequence that contain your query subsequences of interest. Default is `None` which corresponds to a self-join. ignore_trivial : bool Set to `True` if this is a self-join. Otherwise, for AB-join, set this to `False`. Default is `True`. device_id : int or list The (GPU) device number to use. The default value is `0`. A list of valid device ids (int) may also be provided for parallel GPU-STUMP computation. A list of all valid device ids can be obtained by executing `[device.id for device in cuda.list_devices()]`. Returns ------- out : ndarray The first column consists of the matrix profile, the second column consists of the matrix profile indices, the third column consists of the left matrix profile indices, and the fourth column consists of the right matrix profile indices. Notes ----- `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__ See Table II, Figure 5, and Figure 6 Timeseries, T_B, will be annotated with the distance location (or index) of all its subsequences in another times series, T_A. Return: For every subsequence, Q, in T_B, you will get a distance and index for the closest subsequence in T_A. Thus, the array returned will have length T_B.shape[0]-m+1. Additionally, the left and right matrix profiles are also returned. Note: Unlike in the Table II where T_A.shape is expected to be equal to T_B.shape, this implementation is generalized so that the shapes of T_A and T_B can be different. In the case where T_A.shape == T_B.shape, then our algorithm reduces down to the same algorithm found in Table II. Additionally, unlike STAMP where the exclusion zone is m/2, the default exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3). For self-joins, set `ignore_trivial = True` in order to avoid the trivial match. Note that left and right matrix profiles are only available for self-joins.
stumpy/gpu_stump.py
gpu_stump
abbasnikbakht/stumpy
1
python
def gpu_stump(T_A, m, T_B=None, ignore_trivial=True, device_id=0): '\n Compute the matrix profile with GPU-STOMP\n\n This is a convenience wrapper around the Numba `cuda.jit` `_gpu_stump` function\n which computes the matrix profile according to GPU-STOMP.\n\n Parameters\n ----------\n T_A : ndarray\n The time series or sequence for which to compute the matrix profile\n\n m : int\n Window size\n\n T_B : (optional) ndarray\n The time series or sequence that contain your query subsequences\n of interest. Default is `None` which corresponds to a self-join.\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this\n to `False`. Default is `True`.\n\n device_id : int or list\n The (GPU) device number to use. The default value is `0`. A list of\n valid device ids (int) may also be provided for parallel GPU-STUMP\n computation. A list of all valid device ids can be obtained by\n executing `[device.id for device in cuda.list_devices()]`.\n\n Returns\n -------\n out : ndarray\n The first column consists of the matrix profile, the second column\n consists of the matrix profile indices, the third column consists of\n the left matrix profile indices, and the fourth column consists of\n the right matrix profile indices.\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n\n Timeseries, T_B, will be annotated with the distance location\n (or index) of all its subsequences in another times series, T_A.\n\n Return: For every subsequence, Q, in T_B, you will get a distance\n and index for the closest subsequence in T_A. Thus, the array\n returned will have length T_B.shape[0]-m+1. Additionally, the\n left and right matrix profiles are also returned.\n\n Note: Unlike in the Table II where T_A.shape is expected to be equal\n to T_B.shape, this implementation is generalized so that the shapes of\n T_A and T_B can be different. In the case where T_A.shape == T_B.shape,\n then our algorithm reduces down to the same algorithm found in Table II.\n\n Additionally, unlike STAMP where the exclusion zone is m/2, the default\n exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3).\n\n For self-joins, set `ignore_trivial = True` in order to avoid the\n trivial match.\n\n Note that left and right matrix profiles are only available for self-joins.\n ' if (T_B is None): T_B = T_A ignore_trivial = True tmp_T = T_A T_A = T_B T_B = tmp_T (T_A, M_T, Σ_T) = core.preprocess(T_A, m) (T_B, μ_Q, σ_Q) = core.preprocess(T_B, m) if (T_A.ndim != 1): raise ValueError(f'T_A is {T_A.ndim}-dimensional and must be 1-dimensional. For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`') if (T_B.ndim != 1): raise ValueError(f'T_B is {T_B.ndim}-dimensional and must be 1-dimensional. For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`') core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) if ((ignore_trivial is False) and core.are_arrays_equal(T_A, T_B)): logger.warning('Arrays T_A, T_B are equal, which implies a self-join.') logger.warning('Try setting `ignore_trivial = True`.') if (ignore_trivial and (core.are_arrays_equal(T_A, T_B) is False)): logger.warning('Arrays T_A, T_B are not equal, which implies an AB-join.') logger.warning('Try setting `ignore_trivial = False`.') n = T_B.shape[0] k = ((T_A.shape[0] - m) + 1) l = ((n - m) + 1) excl_zone = int(np.ceil((m / 4))) T_A_fname = core.array_to_temp_file(T_A) T_B_fname = core.array_to_temp_file(T_B) M_T_fname = core.array_to_temp_file(M_T) Σ_T_fname = core.array_to_temp_file(Σ_T) μ_Q_fname = core.array_to_temp_file(μ_Q) σ_Q_fname = core.array_to_temp_file(σ_Q) out = np.empty((k, 4), dtype=object) if isinstance(device_id, int): device_ids = [device_id] else: device_ids = device_id profile = ([None] * len(device_ids)) indices = ([None] * len(device_ids)) for _id in device_ids: with cuda.gpus[_id]: if (cuda.current_context().__class__.__name__ != 'FakeCUDAContext'): cuda.current_context().deallocations.clear() step = (1 + (l // len(device_ids))) if (len(device_ids) > 1): mp.set_start_method('spawn', force=True) p = mp.Pool(processes=len(device_ids)) results = ([None] * len(device_ids)) QT_fnames = [] QT_first_fnames = [] for (idx, start) in enumerate(range(0, l, step)): stop = min(l, (start + step)) (QT, QT_first) = core._get_QT(start, T_A, T_B, m) QT_fname = core.array_to_temp_file(QT) QT_first_fname = core.array_to_temp_file(QT_first) QT_fnames.append(QT_fname) QT_first_fnames.append(QT_first_fname) if ((len(device_ids) > 1) and (idx < (len(device_ids) - 1))): results[idx] = p.apply_async(_gpu_stump, (T_A_fname, T_B_fname, m, stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial, (start + 1), device_ids[idx])) else: (profile[idx], indices[idx]) = _gpu_stump(T_A_fname, T_B_fname, m, stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial, (start + 1), device_ids[idx]) if (len(device_ids) > 1): p.close() p.join() for (idx, result) in enumerate(results): if (result is not None): (profile[idx], indices[idx]) = result.get() os.remove(T_A_fname) os.remove(T_B_fname) os.remove(M_T_fname) os.remove(Σ_T_fname) os.remove(μ_Q_fname) os.remove(σ_Q_fname) for QT_fname in QT_fnames: os.remove(QT_fname) for QT_first_fname in QT_first_fnames: os.remove(QT_first_fname) for idx in range(len(device_ids)): profile_fname = profile[idx] indices_fname = indices[idx] profile[idx] = np.load(profile_fname, allow_pickle=False) indices[idx] = np.load(indices_fname, allow_pickle=False) os.remove(profile_fname) os.remove(indices_fname) for i in range(1, len(device_ids)): for col in range(profile[0].shape[1]): cond = (profile[0][(:, col)] < profile[i][(:, col)]) profile[0][(:, col)] = np.where(cond, profile[0][(:, col)], profile[i][(:, col)]) indices[0][(:, col)] = np.where(cond, indices[0][(:, col)], indices[i][(:, col)]) out[(:, 0)] = profile[0][(:, 0)] out[(:, 1:4)] = indices[0][(:, :)] threshold = 1e-05 if core.are_distances_too_small(out[(:, 0)], threshold=threshold): logger.warning(f'A large number of values are smaller than {threshold}.') logger.warning('For a self-join, try setting `ignore_trivial = True`.') return out
def gpu_stump(T_A, m, T_B=None, ignore_trivial=True, device_id=0): '\n Compute the matrix profile with GPU-STOMP\n\n This is a convenience wrapper around the Numba `cuda.jit` `_gpu_stump` function\n which computes the matrix profile according to GPU-STOMP.\n\n Parameters\n ----------\n T_A : ndarray\n The time series or sequence for which to compute the matrix profile\n\n m : int\n Window size\n\n T_B : (optional) ndarray\n The time series or sequence that contain your query subsequences\n of interest. Default is `None` which corresponds to a self-join.\n\n ignore_trivial : bool\n Set to `True` if this is a self-join. Otherwise, for AB-join, set this\n to `False`. Default is `True`.\n\n device_id : int or list\n The (GPU) device number to use. The default value is `0`. A list of\n valid device ids (int) may also be provided for parallel GPU-STUMP\n computation. A list of all valid device ids can be obtained by\n executing `[device.id for device in cuda.list_devices()]`.\n\n Returns\n -------\n out : ndarray\n The first column consists of the matrix profile, the second column\n consists of the matrix profile indices, the third column consists of\n the left matrix profile indices, and the fourth column consists of\n the right matrix profile indices.\n\n Notes\n -----\n `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__\n\n See Table II, Figure 5, and Figure 6\n\n Timeseries, T_B, will be annotated with the distance location\n (or index) of all its subsequences in another times series, T_A.\n\n Return: For every subsequence, Q, in T_B, you will get a distance\n and index for the closest subsequence in T_A. Thus, the array\n returned will have length T_B.shape[0]-m+1. Additionally, the\n left and right matrix profiles are also returned.\n\n Note: Unlike in the Table II where T_A.shape is expected to be equal\n to T_B.shape, this implementation is generalized so that the shapes of\n T_A and T_B can be different. In the case where T_A.shape == T_B.shape,\n then our algorithm reduces down to the same algorithm found in Table II.\n\n Additionally, unlike STAMP where the exclusion zone is m/2, the default\n exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3).\n\n For self-joins, set `ignore_trivial = True` in order to avoid the\n trivial match.\n\n Note that left and right matrix profiles are only available for self-joins.\n ' if (T_B is None): T_B = T_A ignore_trivial = True tmp_T = T_A T_A = T_B T_B = tmp_T (T_A, M_T, Σ_T) = core.preprocess(T_A, m) (T_B, μ_Q, σ_Q) = core.preprocess(T_B, m) if (T_A.ndim != 1): raise ValueError(f'T_A is {T_A.ndim}-dimensional and must be 1-dimensional. For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`') if (T_B.ndim != 1): raise ValueError(f'T_B is {T_B.ndim}-dimensional and must be 1-dimensional. For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`') core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) if ((ignore_trivial is False) and core.are_arrays_equal(T_A, T_B)): logger.warning('Arrays T_A, T_B are equal, which implies a self-join.') logger.warning('Try setting `ignore_trivial = True`.') if (ignore_trivial and (core.are_arrays_equal(T_A, T_B) is False)): logger.warning('Arrays T_A, T_B are not equal, which implies an AB-join.') logger.warning('Try setting `ignore_trivial = False`.') n = T_B.shape[0] k = ((T_A.shape[0] - m) + 1) l = ((n - m) + 1) excl_zone = int(np.ceil((m / 4))) T_A_fname = core.array_to_temp_file(T_A) T_B_fname = core.array_to_temp_file(T_B) M_T_fname = core.array_to_temp_file(M_T) Σ_T_fname = core.array_to_temp_file(Σ_T) μ_Q_fname = core.array_to_temp_file(μ_Q) σ_Q_fname = core.array_to_temp_file(σ_Q) out = np.empty((k, 4), dtype=object) if isinstance(device_id, int): device_ids = [device_id] else: device_ids = device_id profile = ([None] * len(device_ids)) indices = ([None] * len(device_ids)) for _id in device_ids: with cuda.gpus[_id]: if (cuda.current_context().__class__.__name__ != 'FakeCUDAContext'): cuda.current_context().deallocations.clear() step = (1 + (l // len(device_ids))) if (len(device_ids) > 1): mp.set_start_method('spawn', force=True) p = mp.Pool(processes=len(device_ids)) results = ([None] * len(device_ids)) QT_fnames = [] QT_first_fnames = [] for (idx, start) in enumerate(range(0, l, step)): stop = min(l, (start + step)) (QT, QT_first) = core._get_QT(start, T_A, T_B, m) QT_fname = core.array_to_temp_file(QT) QT_first_fname = core.array_to_temp_file(QT_first) QT_fnames.append(QT_fname) QT_first_fnames.append(QT_first_fname) if ((len(device_ids) > 1) and (idx < (len(device_ids) - 1))): results[idx] = p.apply_async(_gpu_stump, (T_A_fname, T_B_fname, m, stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial, (start + 1), device_ids[idx])) else: (profile[idx], indices[idx]) = _gpu_stump(T_A_fname, T_B_fname, m, stop, excl_zone, M_T_fname, Σ_T_fname, QT_fname, QT_first_fname, μ_Q_fname, σ_Q_fname, k, ignore_trivial, (start + 1), device_ids[idx]) if (len(device_ids) > 1): p.close() p.join() for (idx, result) in enumerate(results): if (result is not None): (profile[idx], indices[idx]) = result.get() os.remove(T_A_fname) os.remove(T_B_fname) os.remove(M_T_fname) os.remove(Σ_T_fname) os.remove(μ_Q_fname) os.remove(σ_Q_fname) for QT_fname in QT_fnames: os.remove(QT_fname) for QT_first_fname in QT_first_fnames: os.remove(QT_first_fname) for idx in range(len(device_ids)): profile_fname = profile[idx] indices_fname = indices[idx] profile[idx] = np.load(profile_fname, allow_pickle=False) indices[idx] = np.load(indices_fname, allow_pickle=False) os.remove(profile_fname) os.remove(indices_fname) for i in range(1, len(device_ids)): for col in range(profile[0].shape[1]): cond = (profile[0][(:, col)] < profile[i][(:, col)]) profile[0][(:, col)] = np.where(cond, profile[0][(:, col)], profile[i][(:, col)]) indices[0][(:, col)] = np.where(cond, indices[0][(:, col)], indices[i][(:, col)]) out[(:, 0)] = profile[0][(:, 0)] out[(:, 1:4)] = indices[0][(:, :)] threshold = 1e-05 if core.are_distances_too_small(out[(:, 0)], threshold=threshold): logger.warning(f'A large number of values are smaller than {threshold}.') logger.warning('For a self-join, try setting `ignore_trivial = True`.') return out<|docstring|>Compute the matrix profile with GPU-STOMP This is a convenience wrapper around the Numba `cuda.jit` `_gpu_stump` function which computes the matrix profile according to GPU-STOMP. Parameters ---------- T_A : ndarray The time series or sequence for which to compute the matrix profile m : int Window size T_B : (optional) ndarray The time series or sequence that contain your query subsequences of interest. Default is `None` which corresponds to a self-join. ignore_trivial : bool Set to `True` if this is a self-join. Otherwise, for AB-join, set this to `False`. Default is `True`. device_id : int or list The (GPU) device number to use. The default value is `0`. A list of valid device ids (int) may also be provided for parallel GPU-STUMP computation. A list of all valid device ids can be obtained by executing `[device.id for device in cuda.list_devices()]`. Returns ------- out : ndarray The first column consists of the matrix profile, the second column consists of the matrix profile indices, the third column consists of the left matrix profile indices, and the fourth column consists of the right matrix profile indices. Notes ----- `DOI: 10.1109/ICDM.2016.0085 <https://www.cs.ucr.edu/~eamonn/STOMP_GPU_final_submission_camera_ready.pdf>`__ See Table II, Figure 5, and Figure 6 Timeseries, T_B, will be annotated with the distance location (or index) of all its subsequences in another times series, T_A. Return: For every subsequence, Q, in T_B, you will get a distance and index for the closest subsequence in T_A. Thus, the array returned will have length T_B.shape[0]-m+1. Additionally, the left and right matrix profiles are also returned. Note: Unlike in the Table II where T_A.shape is expected to be equal to T_B.shape, this implementation is generalized so that the shapes of T_A and T_B can be different. In the case where T_A.shape == T_B.shape, then our algorithm reduces down to the same algorithm found in Table II. Additionally, unlike STAMP where the exclusion zone is m/2, the default exclusion zone for STOMP is m/4 (See Definition 3 and Figure 3). For self-joins, set `ignore_trivial = True` in order to avoid the trivial match. Note that left and right matrix profiles are only available for self-joins.<|endoftext|>
8eec27f90eaa21fbe13984bb836f3e7649bf6eee27f8f7074746193ce541b58e
@staticmethod def zone_salle(salle): 'Retourne la clé de la zone de la salle précisée.\n\n Paramètres à préciser :\n\n * salle : la salle dont on veut la zone\n\n Exemple d\'utilisation :\n\n # Si salle contient une salle (disons depart:1)\n zone = zone(salle)\n # zone contient maintenant "depart"\n\n ' return salle.nom_zone
Retourne la clé de la zone de la salle précisée. Paramètres à préciser : * salle : la salle dont on veut la zone Exemple d'utilisation : # Si salle contient une salle (disons depart:1) zone = zone(salle) # zone contient maintenant "depart"
src/primaires/scripting/fonctions/zone.py
zone_salle
vlegoff/tsunami
14
python
@staticmethod def zone_salle(salle): 'Retourne la clé de la zone de la salle précisée.\n\n Paramètres à préciser :\n\n * salle : la salle dont on veut la zone\n\n Exemple d\'utilisation :\n\n # Si salle contient une salle (disons depart:1)\n zone = zone(salle)\n # zone contient maintenant "depart"\n\n ' return salle.nom_zone
@staticmethod def zone_salle(salle): 'Retourne la clé de la zone de la salle précisée.\n\n Paramètres à préciser :\n\n * salle : la salle dont on veut la zone\n\n Exemple d\'utilisation :\n\n # Si salle contient une salle (disons depart:1)\n zone = zone(salle)\n # zone contient maintenant "depart"\n\n ' return salle.nom_zone<|docstring|>Retourne la clé de la zone de la salle précisée. Paramètres à préciser : * salle : la salle dont on veut la zone Exemple d'utilisation : # Si salle contient une salle (disons depart:1) zone = zone(salle) # zone contient maintenant "depart"<|endoftext|>
e77c955d716842ea85c702ccd0aaf6b9fab91d2be96365d81230804589dc3329
@staticmethod def zone(nom_zone): "Retourne toutes les salles d'une zone indiquée.\n\n Cette fonction retourne toutes les salles de la zone indiquée, sans\n aucun tri. Bien que le calcul ne soit pas gourmand en ressources,\n ne pas oublier que plusieurs centaines de salle peuvent être\n retournées.\n\n " nom_zone = nom_zone.lower() try: zone = importeur.salle.zones[nom_zone] except KeyError: raise ErreurExecution('zone {} inconnue'.format(repr(nom_zone))) return list(zone.salles)
Retourne toutes les salles d'une zone indiquée. Cette fonction retourne toutes les salles de la zone indiquée, sans aucun tri. Bien que le calcul ne soit pas gourmand en ressources, ne pas oublier que plusieurs centaines de salle peuvent être retournées.
src/primaires/scripting/fonctions/zone.py
zone
vlegoff/tsunami
14
python
@staticmethod def zone(nom_zone): "Retourne toutes les salles d'une zone indiquée.\n\n Cette fonction retourne toutes les salles de la zone indiquée, sans\n aucun tri. Bien que le calcul ne soit pas gourmand en ressources,\n ne pas oublier que plusieurs centaines de salle peuvent être\n retournées.\n\n " nom_zone = nom_zone.lower() try: zone = importeur.salle.zones[nom_zone] except KeyError: raise ErreurExecution('zone {} inconnue'.format(repr(nom_zone))) return list(zone.salles)
@staticmethod def zone(nom_zone): "Retourne toutes les salles d'une zone indiquée.\n\n Cette fonction retourne toutes les salles de la zone indiquée, sans\n aucun tri. Bien que le calcul ne soit pas gourmand en ressources,\n ne pas oublier que plusieurs centaines de salle peuvent être\n retournées.\n\n " nom_zone = nom_zone.lower() try: zone = importeur.salle.zones[nom_zone] except KeyError: raise ErreurExecution('zone {} inconnue'.format(repr(nom_zone))) return list(zone.salles)<|docstring|>Retourne toutes les salles d'une zone indiquée. Cette fonction retourne toutes les salles de la zone indiquée, sans aucun tri. Bien que le calcul ne soit pas gourmand en ressources, ne pas oublier que plusieurs centaines de salle peuvent être retournées.<|endoftext|>
2920cb86e002003ecd3ac932d136892846316f22140069bb6f1384ea59c3a147
@staticmethod def zone_filtre(nom_zone, mnemoniques): 'Retourne les salles filtrées d\'une zone.\n\n Le filtre opère sur les mnémoniques. Si on a par exeple les salles :\n zone:ch1, zone:ch2, zone:rt1, zone:rt2, zone:mag1\n L\'appel à zone("zone", "ch") retournera :\n zone:ch1, zone:ch2\n\n On peut utiliser plusieurs débuts de mnémonique, séparés par un pipe\n (|).\n L\'appel à zone("zone", "ch|mag") retournera :\n zone:ch1, zone:ch2, zone:mag1\n\n ' nom_zone = nom_zone.lower() mnemonics = mnemoniques.split('_b_') mnemonics = [m.lower() for m in mnemonics] try: zone = importeur.salle.zones[nom_zone] except KeyError: raise ErreurExecution('zone {} inconnue'.format(repr(nom_zone))) salles = list(zone.salles) r_salles = set() for m in mnemonics: t_salles = [s for s in salles if s.mnemonic.startswith(m)] for salle in t_salles: r_salles.add(salle) return list(r_salles)
Retourne les salles filtrées d'une zone. Le filtre opère sur les mnémoniques. Si on a par exeple les salles : zone:ch1, zone:ch2, zone:rt1, zone:rt2, zone:mag1 L'appel à zone("zone", "ch") retournera : zone:ch1, zone:ch2 On peut utiliser plusieurs débuts de mnémonique, séparés par un pipe (|). L'appel à zone("zone", "ch|mag") retournera : zone:ch1, zone:ch2, zone:mag1
src/primaires/scripting/fonctions/zone.py
zone_filtre
vlegoff/tsunami
14
python
@staticmethod def zone_filtre(nom_zone, mnemoniques): 'Retourne les salles filtrées d\'une zone.\n\n Le filtre opère sur les mnémoniques. Si on a par exeple les salles :\n zone:ch1, zone:ch2, zone:rt1, zone:rt2, zone:mag1\n L\'appel à zone("zone", "ch") retournera :\n zone:ch1, zone:ch2\n\n On peut utiliser plusieurs débuts de mnémonique, séparés par un pipe\n (|).\n L\'appel à zone("zone", "ch|mag") retournera :\n zone:ch1, zone:ch2, zone:mag1\n\n ' nom_zone = nom_zone.lower() mnemonics = mnemoniques.split('_b_') mnemonics = [m.lower() for m in mnemonics] try: zone = importeur.salle.zones[nom_zone] except KeyError: raise ErreurExecution('zone {} inconnue'.format(repr(nom_zone))) salles = list(zone.salles) r_salles = set() for m in mnemonics: t_salles = [s for s in salles if s.mnemonic.startswith(m)] for salle in t_salles: r_salles.add(salle) return list(r_salles)
@staticmethod def zone_filtre(nom_zone, mnemoniques): 'Retourne les salles filtrées d\'une zone.\n\n Le filtre opère sur les mnémoniques. Si on a par exeple les salles :\n zone:ch1, zone:ch2, zone:rt1, zone:rt2, zone:mag1\n L\'appel à zone("zone", "ch") retournera :\n zone:ch1, zone:ch2\n\n On peut utiliser plusieurs débuts de mnémonique, séparés par un pipe\n (|).\n L\'appel à zone("zone", "ch|mag") retournera :\n zone:ch1, zone:ch2, zone:mag1\n\n ' nom_zone = nom_zone.lower() mnemonics = mnemoniques.split('_b_') mnemonics = [m.lower() for m in mnemonics] try: zone = importeur.salle.zones[nom_zone] except KeyError: raise ErreurExecution('zone {} inconnue'.format(repr(nom_zone))) salles = list(zone.salles) r_salles = set() for m in mnemonics: t_salles = [s for s in salles if s.mnemonic.startswith(m)] for salle in t_salles: r_salles.add(salle) return list(r_salles)<|docstring|>Retourne les salles filtrées d'une zone. Le filtre opère sur les mnémoniques. Si on a par exeple les salles : zone:ch1, zone:ch2, zone:rt1, zone:rt2, zone:mag1 L'appel à zone("zone", "ch") retournera : zone:ch1, zone:ch2 On peut utiliser plusieurs débuts de mnémonique, séparés par un pipe (|). L'appel à zone("zone", "ch|mag") retournera : zone:ch1, zone:ch2, zone:mag1<|endoftext|>
506443b9750e464bf325f12128fc43267eb41c871279cca30144430e48f4ff9a
def new_theorem(a, doc, con): ' from amsthm.py ' name = str(a['name']) header = a['header'] star = (a['*modifier*'] == '*') parent = a['parent'] shared = a['shared'] style = doc.userdata.getPath('packages/amsthm/currentstyle') if star: thecounter = None elif (parent and (not shared)): con.newcounter(name, initial=0, resetby=parent) con.newcommand(('the' + name), 0, ('\\arabic{%s}.\\arabic{%s}' % (parent, name))) thecounter = name elif shared: thecounter = shared else: thecounter = name con.newcounter(name, initial=0) con.newcommand(('the' + name), 0, ('\\arabic{%s}' % name)) data = {'nodeName': 'thmenv', 'thmName': name, 'args': '[title]', 'counter': thecounter, 'caption': header, 'forcePars': True, 'style': style} th = type(name, (Environment,), data) return th
from amsthm.py
ColDoc/blob_inator.py
new_theorem
mennucc/ColDoc_project
0
python
def new_theorem(a, doc, con): ' ' name = str(a['name']) header = a['header'] star = (a['*modifier*'] == '*') parent = a['parent'] shared = a['shared'] style = doc.userdata.getPath('packages/amsthm/currentstyle') if star: thecounter = None elif (parent and (not shared)): con.newcounter(name, initial=0, resetby=parent) con.newcommand(('the' + name), 0, ('\\arabic{%s}.\\arabic{%s}' % (parent, name))) thecounter = name elif shared: thecounter = shared else: thecounter = name con.newcounter(name, initial=0) con.newcommand(('the' + name), 0, ('\\arabic{%s}' % name)) data = {'nodeName': 'thmenv', 'thmName': name, 'args': '[title]', 'counter': thecounter, 'caption': header, 'forcePars': True, 'style': style} th = type(name, (Environment,), data) return th
def new_theorem(a, doc, con): ' ' name = str(a['name']) header = a['header'] star = (a['*modifier*'] == '*') parent = a['parent'] shared = a['shared'] style = doc.userdata.getPath('packages/amsthm/currentstyle') if star: thecounter = None elif (parent and (not shared)): con.newcounter(name, initial=0, resetby=parent) con.newcommand(('the' + name), 0, ('\\arabic{%s}.\\arabic{%s}' % (parent, name))) thecounter = name elif shared: thecounter = shared else: thecounter = name con.newcounter(name, initial=0) con.newcommand(('the' + name), 0, ('\\arabic{%s}' % name)) data = {'nodeName': 'thmenv', 'thmName': name, 'args': '[title]', 'counter': thecounter, 'caption': header, 'forcePars': True, 'style': style} th = type(name, (Environment,), data) return th<|docstring|>from amsthm.py<|endoftext|>
419dde14a0a78bf2e7268d171f99d1427a5cd6fbc06d340f2ddd4636cd276724
def is_math_mode(o): ' Hints if the object `o` starts a math mode or non-math mode LaTex\n Warning: it is quite primitive' assert isinstance(o, (str, named_stream)) r = None if isinstance(o, str): e = o l = ('str:' + o) elif isinstance(o, named_stream): e = o.environ l = ('named_stream:' + e) if e.startswith('E_'): e = e[2:] if (e[(- 1)] == '*'): e = e[:(- 1)] if (e in ('math', 'displaymath', 'equation', 'subequations', 'eqnarray', 'split', 'multiline', 'array', 'gather', 'align', 'flalign')): r = True else: r = False elif (o in ('\\[', '\\(')): r = True else: r = False assert (r is not None) logger.debug('This is %s math: %s', ('' if r else 'not'), l) return r
Hints if the object `o` starts a math mode or non-math mode LaTex Warning: it is quite primitive
ColDoc/blob_inator.py
is_math_mode
mennucc/ColDoc_project
0
python
def is_math_mode(o): ' Hints if the object `o` starts a math mode or non-math mode LaTex\n Warning: it is quite primitive' assert isinstance(o, (str, named_stream)) r = None if isinstance(o, str): e = o l = ('str:' + o) elif isinstance(o, named_stream): e = o.environ l = ('named_stream:' + e) if e.startswith('E_'): e = e[2:] if (e[(- 1)] == '*'): e = e[:(- 1)] if (e in ('math', 'displaymath', 'equation', 'subequations', 'eqnarray', 'split', 'multiline', 'array', 'gather', 'align', 'flalign')): r = True else: r = False elif (o in ('\\[', '\\(')): r = True else: r = False assert (r is not None) logger.debug('This is %s math: %s', ( if r else 'not'), l) return r
def is_math_mode(o): ' Hints if the object `o` starts a math mode or non-math mode LaTex\n Warning: it is quite primitive' assert isinstance(o, (str, named_stream)) r = None if isinstance(o, str): e = o l = ('str:' + o) elif isinstance(o, named_stream): e = o.environ l = ('named_stream:' + e) if e.startswith('E_'): e = e[2:] if (e[(- 1)] == '*'): e = e[:(- 1)] if (e in ('math', 'displaymath', 'equation', 'subequations', 'eqnarray', 'split', 'multiline', 'array', 'gather', 'align', 'flalign')): r = True else: r = False elif (o in ('\\[', '\\(')): r = True else: r = False assert (r is not None) logger.debug('This is %s math: %s', ( if r else 'not'), l) return r<|docstring|>Hints if the object `o` starts a math mode or non-math mode LaTex Warning: it is quite primitive<|endoftext|>
4ca15bd8f90c39aee5026351cc7f21401870976566761b93f47606babebf4837
def main(args, metadata_class, coldoc=None): ' `coldoc` is the nickname of the ColDoc , or the Django class for it' if args.zip_sections: args.split_sections = True args.add_UUID = {'yes': True, 'y': True, 'no': False, 'n': False, 'a': 'auto', 'auto': 'auto'}[args.add_UUID] named_stream._default_rstrip = args.strip named_stream._default_write_UUID = args.add_UUID args.split_environment += args.private_environment verbose = args.verbose assert ((type(verbose) == int) and (verbose >= 0)) if (verbose > 1): logging.getLogger().setLevel(logging.DEBUG) elif verbose: logging.getLogger().setLevel(logging.INFO) else: logging.getLogger().setLevel(logging.WARNING) if (not os.path.exists(args.blobs_dir)): os.mkdir(args.blobs_dir) assert os.path.isdir(args.blobs_dir), (' not a dir %r' % args.blobs_dir) assert os.path.isfile(args.input_file) try: import pycountry except: logger.warning('Please install `pycountry` ') pycountry = None if (pycountry and (args.language is not None)): l = args.language if (len(l) == 2): L = pycountry.languages.get(alpha_2=l) if (len(l) == 3): L = pycountry.languages.get(alpha_3=l) else: print('Use a 2 or 3 letter code for the language, not %r', l) sys.exit(2) if (L is None): print('--language %r is not a recognized language code', l) print(' Please use ISO_639-3 codes, see https://en.wikipedia.org/wiki/ISO_639-3') sys.exit(2) args.language = l = L.alpha_3 logger.info('Selected language: %s (%s)', l, L.name) mytex = TeX() mydocument = mytex.ownerDocument mycontext = mydocument.context if args.split_sections: mycontext.newcommand('section', 1, '\\section{#1}') mycontext.newcommand('subsection', 1, '\\subsection{#1}') for name in args.metadata_command: d = (('\\' + name) + '{#1}') n = 1 newclass = type(name, (plasTeX.NewCommand,), {'nargs': n, 'opt': None, 'definition': d}) assert (newclass.nargs == n) mycontext.addGlobal(name, newclass) if args.EDB: thecounter = 'thmCount' mycontext.newcounter(thecounter, initial=0) for name in ('wipExercise', 'extrastuff', 'delasol'): data = {'macroName': name, 'counter': thecounter, 'thehead': name, 'thename': name, 'labelable': True, 'forcePars': True, 'thestyle': 'plain'} th = type(name, (amsthm.theoremCommand,), data) mycontext.addGlobal(name, th) args.split_environment.append(name) for j in ('UUID', 'SEC', 'tmp'): d = osjoin(args.blobs_dir, j) if (not os.path.isdir(d)): os.mkdir(d) args.cwd = os.getcwd() f = osjoin(args.blobs_dir, '.blob_inator-args.json') if os.path.exists(f): sys.stderr.write(('Cannot reuse this same directory: %r\n' % (f,))) return 1 with open(f, 'w') as a: json.dump(args.__dict__, a, indent=2) logger.info(('processing %r' % args.input_file)) try: blob_inator(mytex, mydocument, mycontext, args, metadata_class, coldoc) except: logger.exception('blob_inator killed by exception:') return 1 else: logger.info('end of file') f = osjoin(args.blobs_dir, '.blob_inator-args.json') D = copy.copy(args.__dict__) for k in blob_inator_orig_keys: if (k in D): D[('orig_' + k)] = D.pop(k) with open(f, 'w') as a: json.dump(D, a, indent=2) return 0
`coldoc` is the nickname of the ColDoc , or the Django class for it
ColDoc/blob_inator.py
main
mennucc/ColDoc_project
0
python
def main(args, metadata_class, coldoc=None): ' ' if args.zip_sections: args.split_sections = True args.add_UUID = {'yes': True, 'y': True, 'no': False, 'n': False, 'a': 'auto', 'auto': 'auto'}[args.add_UUID] named_stream._default_rstrip = args.strip named_stream._default_write_UUID = args.add_UUID args.split_environment += args.private_environment verbose = args.verbose assert ((type(verbose) == int) and (verbose >= 0)) if (verbose > 1): logging.getLogger().setLevel(logging.DEBUG) elif verbose: logging.getLogger().setLevel(logging.INFO) else: logging.getLogger().setLevel(logging.WARNING) if (not os.path.exists(args.blobs_dir)): os.mkdir(args.blobs_dir) assert os.path.isdir(args.blobs_dir), (' not a dir %r' % args.blobs_dir) assert os.path.isfile(args.input_file) try: import pycountry except: logger.warning('Please install `pycountry` ') pycountry = None if (pycountry and (args.language is not None)): l = args.language if (len(l) == 2): L = pycountry.languages.get(alpha_2=l) if (len(l) == 3): L = pycountry.languages.get(alpha_3=l) else: print('Use a 2 or 3 letter code for the language, not %r', l) sys.exit(2) if (L is None): print('--language %r is not a recognized language code', l) print(' Please use ISO_639-3 codes, see https://en.wikipedia.org/wiki/ISO_639-3') sys.exit(2) args.language = l = L.alpha_3 logger.info('Selected language: %s (%s)', l, L.name) mytex = TeX() mydocument = mytex.ownerDocument mycontext = mydocument.context if args.split_sections: mycontext.newcommand('section', 1, '\\section{#1}') mycontext.newcommand('subsection', 1, '\\subsection{#1}') for name in args.metadata_command: d = (('\\' + name) + '{#1}') n = 1 newclass = type(name, (plasTeX.NewCommand,), {'nargs': n, 'opt': None, 'definition': d}) assert (newclass.nargs == n) mycontext.addGlobal(name, newclass) if args.EDB: thecounter = 'thmCount' mycontext.newcounter(thecounter, initial=0) for name in ('wipExercise', 'extrastuff', 'delasol'): data = {'macroName': name, 'counter': thecounter, 'thehead': name, 'thename': name, 'labelable': True, 'forcePars': True, 'thestyle': 'plain'} th = type(name, (amsthm.theoremCommand,), data) mycontext.addGlobal(name, th) args.split_environment.append(name) for j in ('UUID', 'SEC', 'tmp'): d = osjoin(args.blobs_dir, j) if (not os.path.isdir(d)): os.mkdir(d) args.cwd = os.getcwd() f = osjoin(args.blobs_dir, '.blob_inator-args.json') if os.path.exists(f): sys.stderr.write(('Cannot reuse this same directory: %r\n' % (f,))) return 1 with open(f, 'w') as a: json.dump(args.__dict__, a, indent=2) logger.info(('processing %r' % args.input_file)) try: blob_inator(mytex, mydocument, mycontext, args, metadata_class, coldoc) except: logger.exception('blob_inator killed by exception:') return 1 else: logger.info('end of file') f = osjoin(args.blobs_dir, '.blob_inator-args.json') D = copy.copy(args.__dict__) for k in blob_inator_orig_keys: if (k in D): D[('orig_' + k)] = D.pop(k) with open(f, 'w') as a: json.dump(D, a, indent=2) return 0
def main(args, metadata_class, coldoc=None): ' ' if args.zip_sections: args.split_sections = True args.add_UUID = {'yes': True, 'y': True, 'no': False, 'n': False, 'a': 'auto', 'auto': 'auto'}[args.add_UUID] named_stream._default_rstrip = args.strip named_stream._default_write_UUID = args.add_UUID args.split_environment += args.private_environment verbose = args.verbose assert ((type(verbose) == int) and (verbose >= 0)) if (verbose > 1): logging.getLogger().setLevel(logging.DEBUG) elif verbose: logging.getLogger().setLevel(logging.INFO) else: logging.getLogger().setLevel(logging.WARNING) if (not os.path.exists(args.blobs_dir)): os.mkdir(args.blobs_dir) assert os.path.isdir(args.blobs_dir), (' not a dir %r' % args.blobs_dir) assert os.path.isfile(args.input_file) try: import pycountry except: logger.warning('Please install `pycountry` ') pycountry = None if (pycountry and (args.language is not None)): l = args.language if (len(l) == 2): L = pycountry.languages.get(alpha_2=l) if (len(l) == 3): L = pycountry.languages.get(alpha_3=l) else: print('Use a 2 or 3 letter code for the language, not %r', l) sys.exit(2) if (L is None): print('--language %r is not a recognized language code', l) print(' Please use ISO_639-3 codes, see https://en.wikipedia.org/wiki/ISO_639-3') sys.exit(2) args.language = l = L.alpha_3 logger.info('Selected language: %s (%s)', l, L.name) mytex = TeX() mydocument = mytex.ownerDocument mycontext = mydocument.context if args.split_sections: mycontext.newcommand('section', 1, '\\section{#1}') mycontext.newcommand('subsection', 1, '\\subsection{#1}') for name in args.metadata_command: d = (('\\' + name) + '{#1}') n = 1 newclass = type(name, (plasTeX.NewCommand,), {'nargs': n, 'opt': None, 'definition': d}) assert (newclass.nargs == n) mycontext.addGlobal(name, newclass) if args.EDB: thecounter = 'thmCount' mycontext.newcounter(thecounter, initial=0) for name in ('wipExercise', 'extrastuff', 'delasol'): data = {'macroName': name, 'counter': thecounter, 'thehead': name, 'thename': name, 'labelable': True, 'forcePars': True, 'thestyle': 'plain'} th = type(name, (amsthm.theoremCommand,), data) mycontext.addGlobal(name, th) args.split_environment.append(name) for j in ('UUID', 'SEC', 'tmp'): d = osjoin(args.blobs_dir, j) if (not os.path.isdir(d)): os.mkdir(d) args.cwd = os.getcwd() f = osjoin(args.blobs_dir, '.blob_inator-args.json') if os.path.exists(f): sys.stderr.write(('Cannot reuse this same directory: %r\n' % (f,))) return 1 with open(f, 'w') as a: json.dump(args.__dict__, a, indent=2) logger.info(('processing %r' % args.input_file)) try: blob_inator(mytex, mydocument, mycontext, args, metadata_class, coldoc) except: logger.exception('blob_inator killed by exception:') return 1 else: logger.info('end of file') f = osjoin(args.blobs_dir, '.blob_inator-args.json') D = copy.copy(args.__dict__) for k in blob_inator_orig_keys: if (k in D): D[('orig_' + k)] = D.pop(k) with open(f, 'w') as a: json.dump(D, a, indent=2) return 0<|docstring|>`coldoc` is the nickname of the ColDoc , or the Django class for it<|endoftext|>
30a73450489804ddf63f471adfe87350b666f711c5c0d1502f3b3f25056bf3f6
def _find_unused_UUID(self): 'set `filename` and `metadata_filename`, using a new UUID' filename = None while (not filename): u = new_uuid(blobs_dir=self._basepath) d = uuid_to_dir(u, blobs_dir=self._basepath, create=True) filename = osjoin(d, (('blob_' + self._lang) + self._extension)) if os.path.exists(osjoin(self._basepath, filename)): logger.warn((' output exists %r, trying next UUID' % filename)) filename = None assert (not os.path.isabs(filename)) assert (not os.path.exists(osjoin(self._basepath, filename))) self._filename = filename self._dir = d self._uuid = u
set `filename` and `metadata_filename`, using a new UUID
ColDoc/blob_inator.py
_find_unused_UUID
mennucc/ColDoc_project
0
python
def _find_unused_UUID(self): filename = None while (not filename): u = new_uuid(blobs_dir=self._basepath) d = uuid_to_dir(u, blobs_dir=self._basepath, create=True) filename = osjoin(d, (('blob_' + self._lang) + self._extension)) if os.path.exists(osjoin(self._basepath, filename)): logger.warn((' output exists %r, trying next UUID' % filename)) filename = None assert (not os.path.isabs(filename)) assert (not os.path.exists(osjoin(self._basepath, filename))) self._filename = filename self._dir = d self._uuid = u
def _find_unused_UUID(self): filename = None while (not filename): u = new_uuid(blobs_dir=self._basepath) d = uuid_to_dir(u, blobs_dir=self._basepath, create=True) filename = osjoin(d, (('blob_' + self._lang) + self._extension)) if os.path.exists(osjoin(self._basepath, filename)): logger.warn((' output exists %r, trying next UUID' % filename)) filename = None assert (not os.path.isabs(filename)) assert (not os.path.exists(osjoin(self._basepath, filename))) self._filename = filename self._dir = d self._uuid = u<|docstring|>set `filename` and `metadata_filename`, using a new UUID<|endoftext|>
0f7ca2abb495947ca6fed79dd06e2524980a40f13d0cf471798ce195b4beb4c4
@property def symlink_dir(self): 'a symlink (relative to `basepath`) pointing to the directory where the content will be saved' return self._symlink_dir
a symlink (relative to `basepath`) pointing to the directory where the content will be saved
ColDoc/blob_inator.py
symlink_dir
mennucc/ColDoc_project
0
python
@property def symlink_dir(self): return self._symlink_dir
@property def symlink_dir(self): return self._symlink_dir<|docstring|>a symlink (relative to `basepath`) pointing to the directory where the content will be saved<|endoftext|>
e1ca697becfd60b66124e3d6ce152f276015a3faaedaa167f32000f8e9e0cb45
@symlink_dir.setter def symlink_dir(self, symlink_dir): ' set the symlink (relative to `basepath`)\n ' assert (not os.path.isabs(symlink_dir)) self._symlink_dir = symlink_dir
set the symlink (relative to `basepath`)
ColDoc/blob_inator.py
symlink_dir
mennucc/ColDoc_project
0
python
@symlink_dir.setter def symlink_dir(self, symlink_dir): ' \n ' assert (not os.path.isabs(symlink_dir)) self._symlink_dir = symlink_dir
@symlink_dir.setter def symlink_dir(self, symlink_dir): ' \n ' assert (not os.path.isabs(symlink_dir)) self._symlink_dir = symlink_dir<|docstring|>set the symlink (relative to `basepath`)<|endoftext|>
a6d538dc303e8d60c20a33602b451fa13e6e4065b622fc41274884ba5701c269
@property def symlink_files(self): 'a `set` of symlinks (relative to `basepath`) pointing to the blob' return self._symlink_files
a `set` of symlinks (relative to `basepath`) pointing to the blob
ColDoc/blob_inator.py
symlink_files
mennucc/ColDoc_project
0
python
@property def symlink_files(self): return self._symlink_files
@property def symlink_files(self): return self._symlink_files<|docstring|>a `set` of symlinks (relative to `basepath`) pointing to the blob<|endoftext|>
89fc1dc6e4691e19bbfa83d9f8bfe49d7993edf53af6a83d957349745b215cb6
@symlink_files.setter def symlink_files(self, symlink_file): ' please use `symlink_file_add`' raise NotImplementedError(' please use `symlink_file_add`')
please use `symlink_file_add`
ColDoc/blob_inator.py
symlink_files
mennucc/ColDoc_project
0
python
@symlink_files.setter def symlink_files(self, symlink_file): ' ' raise NotImplementedError(' ')
@symlink_files.setter def symlink_files(self, symlink_file): ' ' raise NotImplementedError(' ')<|docstring|>please use `symlink_file_add`<|endoftext|>
209f5477c2d36dc761d496ba086f3180b9d2c4fae50560ff7c645af325c569e9
def symlink_file_add(self, symlink_file): ' add a name for a symlink (relative to `basepath`) for this blob\n ' if ('..' in symlink_file.split(os.path.sep)): logger.warning(" will not create symlink with '..' in it: %r", symlink_file) elif os.path.isabs(symlink_file): logger.warning(' will not create absolute symlink: %r', symlink_file) else: self._symlink_files.add(symlink_file)
add a name for a symlink (relative to `basepath`) for this blob
ColDoc/blob_inator.py
symlink_file_add
mennucc/ColDoc_project
0
python
def symlink_file_add(self, symlink_file): ' \n ' if ('..' in symlink_file.split(os.path.sep)): logger.warning(" will not create symlink with '..' in it: %r", symlink_file) elif os.path.isabs(symlink_file): logger.warning(' will not create absolute symlink: %r', symlink_file) else: self._symlink_files.add(symlink_file)
def symlink_file_add(self, symlink_file): ' \n ' if ('..' in symlink_file.split(os.path.sep)): logger.warning(" will not create symlink with '..' in it: %r", symlink_file) elif os.path.isabs(symlink_file): logger.warning(' will not create absolute symlink: %r', symlink_file) else: self._symlink_files.add(symlink_file)<|docstring|>add a name for a symlink (relative to `basepath`) for this blob<|endoftext|>
1270dc2c798ac1fa833a9127bcb9321d6983189322f5870317b1d05b950d2474
@property def filename(self): 'the filename relative to `basepath` where the content will be saved' return self._filename
the filename relative to `basepath` where the content will be saved
ColDoc/blob_inator.py
filename
mennucc/ColDoc_project
0
python
@property def filename(self): return self._filename
@property def filename(self): return self._filename<|docstring|>the filename relative to `basepath` where the content will be saved<|endoftext|>
86a4d7b50b88d6ef745c4c229efecabad7f9328c7f4630bb9fb04cc6ff550570
@filename.setter def filename(self, filename): ' set the filename (relative to `basepath`) where the content will be saved ;\n this changes also the metadata filename.\n Please use `self.symlink_dir` and not this call.\n ' logger.warn(('Please do not use self.filename = %r, use self.symlink ' % (filename,))) assert (not os.path.isabs(filename)) self._filename = filename self._dir = os.path.dirname(filename)
set the filename (relative to `basepath`) where the content will be saved ; this changes also the metadata filename. Please use `self.symlink_dir` and not this call.
ColDoc/blob_inator.py
filename
mennucc/ColDoc_project
0
python
@filename.setter def filename(self, filename): ' set the filename (relative to `basepath`) where the content will be saved ;\n this changes also the metadata filename.\n Please use `self.symlink_dir` and not this call.\n ' logger.warn(('Please do not use self.filename = %r, use self.symlink ' % (filename,))) assert (not os.path.isabs(filename)) self._filename = filename self._dir = os.path.dirname(filename)
@filename.setter def filename(self, filename): ' set the filename (relative to `basepath`) where the content will be saved ;\n this changes also the metadata filename.\n Please use `self.symlink_dir` and not this call.\n ' logger.warn(('Please do not use self.filename = %r, use self.symlink ' % (filename,))) assert (not os.path.isabs(filename)) self._filename = filename self._dir = os.path.dirname(filename)<|docstring|>set the filename (relative to `basepath`) where the content will be saved ; this changes also the metadata filename. Please use `self.symlink_dir` and not this call.<|endoftext|>
625610391594faf745123e637031e8c9922e654781a941c2110712ec759ddb7e
def add_metadata(self, T, E, braces=False): " The parameter `braces` dictates if `E` will be enclosed in {};\n `braces` may be `True`,`False` or `None` (which means 'autodetect')\n " assert (not self._was_written) assert isinstance(E, str) assert E assert (braces in (True, False, None)) if (T == 'uuid'): logger.error('In %r cannot change uuid from %r to %r', self, self._uuid, E) return E = E.translate({10: 32}) if ((braces is False) or ((E[0] == '{') and (E[(- 1)] == '}') and (braces is None))): self._metadata.add(T, E) else: self._metadata.add(T, (('{' + E) + '}')) if (T in ('environ', 'lang', 'extension')): a = getattr(self, ('_' + T), None) if (a != E): logger.debug('In %r, %r changed from %r to %r', self, T, a, E) setattr(self, ('_' + T), E)
The parameter `braces` dictates if `E` will be enclosed in {}; `braces` may be `True`,`False` or `None` (which means 'autodetect')
ColDoc/blob_inator.py
add_metadata
mennucc/ColDoc_project
0
python
def add_metadata(self, T, E, braces=False): " The parameter `braces` dictates if `E` will be enclosed in {};\n `braces` may be `True`,`False` or `None` (which means 'autodetect')\n " assert (not self._was_written) assert isinstance(E, str) assert E assert (braces in (True, False, None)) if (T == 'uuid'): logger.error('In %r cannot change uuid from %r to %r', self, self._uuid, E) return E = E.translate({10: 32}) if ((braces is False) or ((E[0] == '{') and (E[(- 1)] == '}') and (braces is None))): self._metadata.add(T, E) else: self._metadata.add(T, (('{' + E) + '}')) if (T in ('environ', 'lang', 'extension')): a = getattr(self, ('_' + T), None) if (a != E): logger.debug('In %r, %r changed from %r to %r', self, T, a, E) setattr(self, ('_' + T), E)
def add_metadata(self, T, E, braces=False): " The parameter `braces` dictates if `E` will be enclosed in {};\n `braces` may be `True`,`False` or `None` (which means 'autodetect')\n " assert (not self._was_written) assert isinstance(E, str) assert E assert (braces in (True, False, None)) if (T == 'uuid'): logger.error('In %r cannot change uuid from %r to %r', self, self._uuid, E) return E = E.translate({10: 32}) if ((braces is False) or ((E[0] == '{') and (E[(- 1)] == '}') and (braces is None))): self._metadata.add(T, E) else: self._metadata.add(T, (('{' + E) + '}')) if (T in ('environ', 'lang', 'extension')): a = getattr(self, ('_' + T), None) if (a != E): logger.debug('In %r, %r changed from %r to %r', self, T, a, E) setattr(self, ('_' + T), E)<|docstring|>The parameter `braces` dictates if `E` will be enclosed in {}; `braces` may be `True`,`False` or `None` (which means 'autodetect')<|endoftext|>
b5df0a25f56a048a02946216a3b66d03206114b0054ff6dfa29806889f373827
def rstrip(self): ' returns the internal buffer, but splitting the final lines of the buffer,\n as long as they are all whitespace ;\n returns (initial_part, stripped_part) ' self.seek(0) l = self.readlines() sp = '' while (l and l[(- 1)] and (self._re_spaces_.match(l[(- 1)]) is not None)): sp = (l[(- 1)] + sp) l.pop() return (''.join(l), sp)
returns the internal buffer, but splitting the final lines of the buffer, as long as they are all whitespace ; returns (initial_part, stripped_part)
ColDoc/blob_inator.py
rstrip
mennucc/ColDoc_project
0
python
def rstrip(self): ' returns the internal buffer, but splitting the final lines of the buffer,\n as long as they are all whitespace ;\n returns (initial_part, stripped_part) ' self.seek(0) l = self.readlines() sp = while (l and l[(- 1)] and (self._re_spaces_.match(l[(- 1)]) is not None)): sp = (l[(- 1)] + sp) l.pop() return (.join(l), sp)
def rstrip(self): ' returns the internal buffer, but splitting the final lines of the buffer,\n as long as they are all whitespace ;\n returns (initial_part, stripped_part) ' self.seek(0) l = self.readlines() sp = while (l and l[(- 1)] and (self._re_spaces_.match(l[(- 1)]) is not None)): sp = (l[(- 1)] + sp) l.pop() return (.join(l), sp)<|docstring|>returns the internal buffer, but splitting the final lines of the buffer, as long as they are all whitespace ; returns (initial_part, stripped_part)<|endoftext|>
bdf2b6fa891ba101eb8a8194dddabf9b07871ac3890aaaed1ac4d1487540bc00
def writeout(self, write_UUID=None, rstrip=None): "Writes the content of the file; returns the `filename` where the content was stored,\n relative to `basedir` (using the `symlink_dir` if provided).\n\n - If `write_UUID` is `True`, the UUID will be written at the beginning of the blob\n (but for `section` blobs: for those it is written by another part of the code)\n\n - If `write_UUID` is 'auto', the UUID will be not be written in 'section'\n and in any other environ listed in `ColDoc_do_not_write_uuid_in`\n\n - If `write_UUID` is `False`, no UUID will be written.\n\n If `rstrip` is `True`, will use `self.rstrip` to strip away final lines of only whitespace\n " if self.obliterated: logger.warning('Will not write obliterated blob', repr(self)) return False if (rstrip is None): rstrip = self._default_rstrip if (write_UUID is None): write_UUID = self._default_write_UUID assert (write_UUID in (True, False, 'auto')) if ((self.environ == 'section') or ((write_UUID == 'auto') and (self.environ in self._do_not_write_uuid_in))): write_UUID = False elif (write_UUID == 'auto'): write_UUID = True if (self._filename is None): self._find_unused_UUID() if self._was_written: logger.critical(('file %r was already written ' % self._filename)) return self._filename if self.closed: logger.error(('file %r was closed before writeout' % self._filename)) if self.grouping_depth: logger.warning(('some grouping was not closed in %r' % self._filename)) filename = osjoin(self._basepath, self._filename) if True: self.flush() logger.debug(('writeout file %r ' % (self._filename,))) z = self._open(filename, 'w') if (write_UUID and self.uuid): z.write(('\\uuid{%s}%%\n' % (self.uuid,))) if rstrip: (cnt, tail) = self.rstrip() else: cnt = self.getvalue() z.write(cnt) z.close() if (len(cnt) == 0): logger.warning(('empty blob %r' % self)) self._metadata.uuid = self._uuid if ((self.environ[:2] == 'E_') and (self.environ[2:] in self._private)): self._metadata.add('access', 'private') if self._authors: self._metadata.save() for j in self._authors: self._metadata.add('author', j) self._metadata.blob_modification_time_update() self._metadata.save() r = self._filename self._was_written = True self.close() if self._symlink_dir: os_rel_symlink(self._dir, self._symlink_dir, basedir=self._basepath, force=True, target_is_directory=True) r = osjoin(self._symlink_dir, os.path.basename(filename)) if self._symlink_files: for j in self._symlink_files: os_rel_symlink(r, j, basedir=self._basepath, force=True, target_is_directory=False) return r
Writes the content of the file; returns the `filename` where the content was stored, relative to `basedir` (using the `symlink_dir` if provided). - If `write_UUID` is `True`, the UUID will be written at the beginning of the blob (but for `section` blobs: for those it is written by another part of the code) - If `write_UUID` is 'auto', the UUID will be not be written in 'section' and in any other environ listed in `ColDoc_do_not_write_uuid_in` - If `write_UUID` is `False`, no UUID will be written. If `rstrip` is `True`, will use `self.rstrip` to strip away final lines of only whitespace
ColDoc/blob_inator.py
writeout
mennucc/ColDoc_project
0
python
def writeout(self, write_UUID=None, rstrip=None): "Writes the content of the file; returns the `filename` where the content was stored,\n relative to `basedir` (using the `symlink_dir` if provided).\n\n - If `write_UUID` is `True`, the UUID will be written at the beginning of the blob\n (but for `section` blobs: for those it is written by another part of the code)\n\n - If `write_UUID` is 'auto', the UUID will be not be written in 'section'\n and in any other environ listed in `ColDoc_do_not_write_uuid_in`\n\n - If `write_UUID` is `False`, no UUID will be written.\n\n If `rstrip` is `True`, will use `self.rstrip` to strip away final lines of only whitespace\n " if self.obliterated: logger.warning('Will not write obliterated blob', repr(self)) return False if (rstrip is None): rstrip = self._default_rstrip if (write_UUID is None): write_UUID = self._default_write_UUID assert (write_UUID in (True, False, 'auto')) if ((self.environ == 'section') or ((write_UUID == 'auto') and (self.environ in self._do_not_write_uuid_in))): write_UUID = False elif (write_UUID == 'auto'): write_UUID = True if (self._filename is None): self._find_unused_UUID() if self._was_written: logger.critical(('file %r was already written ' % self._filename)) return self._filename if self.closed: logger.error(('file %r was closed before writeout' % self._filename)) if self.grouping_depth: logger.warning(('some grouping was not closed in %r' % self._filename)) filename = osjoin(self._basepath, self._filename) if True: self.flush() logger.debug(('writeout file %r ' % (self._filename,))) z = self._open(filename, 'w') if (write_UUID and self.uuid): z.write(('\\uuid{%s}%%\n' % (self.uuid,))) if rstrip: (cnt, tail) = self.rstrip() else: cnt = self.getvalue() z.write(cnt) z.close() if (len(cnt) == 0): logger.warning(('empty blob %r' % self)) self._metadata.uuid = self._uuid if ((self.environ[:2] == 'E_') and (self.environ[2:] in self._private)): self._metadata.add('access', 'private') if self._authors: self._metadata.save() for j in self._authors: self._metadata.add('author', j) self._metadata.blob_modification_time_update() self._metadata.save() r = self._filename self._was_written = True self.close() if self._symlink_dir: os_rel_symlink(self._dir, self._symlink_dir, basedir=self._basepath, force=True, target_is_directory=True) r = osjoin(self._symlink_dir, os.path.basename(filename)) if self._symlink_files: for j in self._symlink_files: os_rel_symlink(r, j, basedir=self._basepath, force=True, target_is_directory=False) return r
def writeout(self, write_UUID=None, rstrip=None): "Writes the content of the file; returns the `filename` where the content was stored,\n relative to `basedir` (using the `symlink_dir` if provided).\n\n - If `write_UUID` is `True`, the UUID will be written at the beginning of the blob\n (but for `section` blobs: for those it is written by another part of the code)\n\n - If `write_UUID` is 'auto', the UUID will be not be written in 'section'\n and in any other environ listed in `ColDoc_do_not_write_uuid_in`\n\n - If `write_UUID` is `False`, no UUID will be written.\n\n If `rstrip` is `True`, will use `self.rstrip` to strip away final lines of only whitespace\n " if self.obliterated: logger.warning('Will not write obliterated blob', repr(self)) return False if (rstrip is None): rstrip = self._default_rstrip if (write_UUID is None): write_UUID = self._default_write_UUID assert (write_UUID in (True, False, 'auto')) if ((self.environ == 'section') or ((write_UUID == 'auto') and (self.environ in self._do_not_write_uuid_in))): write_UUID = False elif (write_UUID == 'auto'): write_UUID = True if (self._filename is None): self._find_unused_UUID() if self._was_written: logger.critical(('file %r was already written ' % self._filename)) return self._filename if self.closed: logger.error(('file %r was closed before writeout' % self._filename)) if self.grouping_depth: logger.warning(('some grouping was not closed in %r' % self._filename)) filename = osjoin(self._basepath, self._filename) if True: self.flush() logger.debug(('writeout file %r ' % (self._filename,))) z = self._open(filename, 'w') if (write_UUID and self.uuid): z.write(('\\uuid{%s}%%\n' % (self.uuid,))) if rstrip: (cnt, tail) = self.rstrip() else: cnt = self.getvalue() z.write(cnt) z.close() if (len(cnt) == 0): logger.warning(('empty blob %r' % self)) self._metadata.uuid = self._uuid if ((self.environ[:2] == 'E_') and (self.environ[2:] in self._private)): self._metadata.add('access', 'private') if self._authors: self._metadata.save() for j in self._authors: self._metadata.add('author', j) self._metadata.blob_modification_time_update() self._metadata.save() r = self._filename self._was_written = True self.close() if self._symlink_dir: os_rel_symlink(self._dir, self._symlink_dir, basedir=self._basepath, force=True, target_is_directory=True) r = osjoin(self._symlink_dir, os.path.basename(filename)) if self._symlink_files: for j in self._symlink_files: os_rel_symlink(r, j, basedir=self._basepath, force=True, target_is_directory=False) return r<|docstring|>Writes the content of the file; returns the `filename` where the content was stored, relative to `basedir` (using the `symlink_dir` if provided). - If `write_UUID` is `True`, the UUID will be written at the beginning of the blob (but for `section` blobs: for those it is written by another part of the code) - If `write_UUID` is 'auto', the UUID will be not be written in 'section' and in any other environ listed in `ColDoc_do_not_write_uuid_in` - If `write_UUID` is `False`, no UUID will be written. If `rstrip` is `True`, will use `self.rstrip` to strip away final lines of only whitespace<|endoftext|>
b1f11e6e22293cfe23b620f1cbe02bcb10e4de1ad84c8840899e459908e75b26
@property def math_mode(self): 'fixme , this is not perfect, it does not deal with \text or \\parbox' return self._math_mode
fixme , this is not perfect, it does not deal with ext or \parbox
ColDoc/blob_inator.py
math_mode
mennucc/ColDoc_project
0
python
@property def math_mode(self): 'fixme , this is not perfect, it does not deal with \text or \\parbox' return self._math_mode
@property def math_mode(self): 'fixme , this is not perfect, it does not deal with \text or \\parbox' return self._math_mode<|docstring|>fixme , this is not perfect, it does not deal with ext or \parbox<|endoftext|>
bb0d74352cf8dc3b285e8862f284f042eaedd418ecad8f1f9ccbb6c0564700d8
@property def top(self): ' the top element' logger.debug(repr(self)) return self._stack[(- 1)]
the top element
ColDoc/blob_inator.py
top
mennucc/ColDoc_project
0
python
@property def top(self): ' ' logger.debug(repr(self)) return self._stack[(- 1)]
@property def top(self): ' ' logger.debug(repr(self)) return self._stack[(- 1)]<|docstring|>the top element<|endoftext|>
4ce5d5c7665597a93c6d66986068f4323bc3477a1fdda7450b88bc577086306c
@property def topstream(self): ' the topmost stream' return self._topstream
the topmost stream
ColDoc/blob_inator.py
topstream
mennucc/ColDoc_project
0
python
@property def topstream(self): ' ' return self._topstream
@property def topstream(self): ' ' return self._topstream<|docstring|>the topmost stream<|endoftext|>
e07cf6ad834c20ad23f3f40425da8c9fbb4d7b835ccc331c1bb7fa1074f676f5
@property def topenv(self): 'the top environment' s = self._stack[(- 1)] if isinstance(s, named_stream): return s.environ return s
the top environment
ColDoc/blob_inator.py
topenv
mennucc/ColDoc_project
0
python
@property def topenv(self): s = self._stack[(- 1)] if isinstance(s, named_stream): return s.environ return s
@property def topenv(self): s = self._stack[(- 1)] if isinstance(s, named_stream): return s.environ return s<|docstring|>the top environment<|endoftext|>
337de7cdef3311640994cf9aa9d53a1d4902568409b3aff0e81c1fb52becb137
def pop(self, index=(- 1), add_as_child=True, checknonempty=True): ' pops topmost element , or `index` element if given;\n if `add_as_child` and topmost element was a stream,\n write its UUID and filename in metadata of the parent stream' o = self._stack.pop(index) logger.debug(repr(self)) if isinstance(o, named_stream): self._set_topstream(checknonempty=checknonempty) if (add_as_child and (self._topstream is not None) and isinstance(self._topstream, named_stream) and (not o.obliterated)): if (o.uuid is None): logger.warning('Cannot add blob as child, uuid is None: %r', o) else: self._topstream.add_metadata('child_uuid', o.uuid) self._set_math_mode() return o
pops topmost element , or `index` element if given; if `add_as_child` and topmost element was a stream, write its UUID and filename in metadata of the parent stream
ColDoc/blob_inator.py
pop
mennucc/ColDoc_project
0
python
def pop(self, index=(- 1), add_as_child=True, checknonempty=True): ' pops topmost element , or `index` element if given;\n if `add_as_child` and topmost element was a stream,\n write its UUID and filename in metadata of the parent stream' o = self._stack.pop(index) logger.debug(repr(self)) if isinstance(o, named_stream): self._set_topstream(checknonempty=checknonempty) if (add_as_child and (self._topstream is not None) and isinstance(self._topstream, named_stream) and (not o.obliterated)): if (o.uuid is None): logger.warning('Cannot add blob as child, uuid is None: %r', o) else: self._topstream.add_metadata('child_uuid', o.uuid) self._set_math_mode() return o
def pop(self, index=(- 1), add_as_child=True, checknonempty=True): ' pops topmost element , or `index` element if given;\n if `add_as_child` and topmost element was a stream,\n write its UUID and filename in metadata of the parent stream' o = self._stack.pop(index) logger.debug(repr(self)) if isinstance(o, named_stream): self._set_topstream(checknonempty=checknonempty) if (add_as_child and (self._topstream is not None) and isinstance(self._topstream, named_stream) and (not o.obliterated)): if (o.uuid is None): logger.warning('Cannot add blob as child, uuid is None: %r', o) else: self._topstream.add_metadata('child_uuid', o.uuid) self._set_math_mode() return o<|docstring|>pops topmost element , or `index` element if given; if `add_as_child` and topmost element was a stream, write its UUID and filename in metadata of the parent stream<|endoftext|>
68c64cf261c1faec0ae1713c5a9acc99c62a1423168dcfc3e2937ef36bf2e210
def pop_str(self, warn=True, stopafter=None): ' pop strings, until a stream is reached or after popping `stopafter`' while self._stack: s = self._stack[(- 1)] if isinstance(s, str): if ((stopafter is not None) and (stopafter == s)): self._stack.pop() break if warn: logger.warning((' environment was not closed: %r' % s)) self._stack.pop() else: break self._set_math_mode() logger.debug(repr(self))
pop strings, until a stream is reached or after popping `stopafter`
ColDoc/blob_inator.py
pop_str
mennucc/ColDoc_project
0
python
def pop_str(self, warn=True, stopafter=None): ' ' while self._stack: s = self._stack[(- 1)] if isinstance(s, str): if ((stopafter is not None) and (stopafter == s)): self._stack.pop() break if warn: logger.warning((' environment was not closed: %r' % s)) self._stack.pop() else: break self._set_math_mode() logger.debug(repr(self))
def pop_str(self, warn=True, stopafter=None): ' ' while self._stack: s = self._stack[(- 1)] if isinstance(s, str): if ((stopafter is not None) and (stopafter == s)): self._stack.pop() break if warn: logger.warning((' environment was not closed: %r' % s)) self._stack.pop() else: break self._set_math_mode() logger.debug(repr(self))<|docstring|>pop strings, until a stream is reached or after popping `stopafter`<|endoftext|>
dd1e54b86cba46f1e9d5fc1a0fb6478f656825d59afafb886a7d3951606c3b30
def pop_stream(self, add_as_child=True): ' pops the topmost stream, w/o touching the non-stream elements of the stack' t = None for (n, j) in enumerate(self._stack): if isinstance(j, named_stream): t = n assert (t is not None) O = self.pop(index=t, add_as_child=add_as_child) self._set_math_mode() logger.debug(repr(self)) return O
pops the topmost stream, w/o touching the non-stream elements of the stack
ColDoc/blob_inator.py
pop_stream
mennucc/ColDoc_project
0
python
def pop_stream(self, add_as_child=True): ' ' t = None for (n, j) in enumerate(self._stack): if isinstance(j, named_stream): t = n assert (t is not None) O = self.pop(index=t, add_as_child=add_as_child) self._set_math_mode() logger.debug(repr(self)) return O
def pop_stream(self, add_as_child=True): ' ' t = None for (n, j) in enumerate(self._stack): if isinstance(j, named_stream): t = n assert (t is not None) O = self.pop(index=t, add_as_child=add_as_child) self._set_math_mode() logger.debug(repr(self)) return O<|docstring|>pops the topmost stream, w/o touching the non-stream elements of the stack<|endoftext|>
525d40037c378dc16eed89827ef2fe29897f2292b6157a8f194e14de3fd2f562
def stationarity_check(TS, column, plot_std=True): 'Outputs a plot of the Rolling Mean and Standard Deviation and prints results of the Dickey-Fuller Test\n TS: Time Series, this is the dataframe from which you are pulling your information\n column: This is the column within the TS that you are interested in\n plot_std: optional to plot the standard deviation or not' rolmean = TS[column].rolling(window=8, center=False).mean() rolstd = TS[column].rolling(window=8, center=False).std() dftest = adfuller(TS[column].dropna()) fig = plt.figure(figsize=(14, 8)) orig = plt.plot(TS[column], color='blue', label='Original') mean = plt.plot(rolmean, color='red', label='Rolling Mean') if plot_std: std = plt.plot(rolstd, color='black', label='Rolling Std') plt.title('Rolling Mean & Standard Deviation for {}'.format(column)) else: plt.title('Rolling Mean for {}'.format(column)) plt.legend(loc='best') plt.show(block=False) print('Results of Dickey-Fuller Test:') dfoutput = pd.Series(dftest[0:4], index=['Test Statistic', 'p-value', '#Lags Used', 'Number of Observations Used']) for (key, value) in dftest[4].items(): dfoutput['Critical Value {}'.format(key)] = value print(dfoutput)
Outputs a plot of the Rolling Mean and Standard Deviation and prints results of the Dickey-Fuller Test TS: Time Series, this is the dataframe from which you are pulling your information column: This is the column within the TS that you are interested in plot_std: optional to plot the standard deviation or not
src/visualize.py
stationarity_check
AdamBlomfield/timeseries_arima_zillowdata
4
python
def stationarity_check(TS, column, plot_std=True): 'Outputs a plot of the Rolling Mean and Standard Deviation and prints results of the Dickey-Fuller Test\n TS: Time Series, this is the dataframe from which you are pulling your information\n column: This is the column within the TS that you are interested in\n plot_std: optional to plot the standard deviation or not' rolmean = TS[column].rolling(window=8, center=False).mean() rolstd = TS[column].rolling(window=8, center=False).std() dftest = adfuller(TS[column].dropna()) fig = plt.figure(figsize=(14, 8)) orig = plt.plot(TS[column], color='blue', label='Original') mean = plt.plot(rolmean, color='red', label='Rolling Mean') if plot_std: std = plt.plot(rolstd, color='black', label='Rolling Std') plt.title('Rolling Mean & Standard Deviation for {}'.format(column)) else: plt.title('Rolling Mean for {}'.format(column)) plt.legend(loc='best') plt.show(block=False) print('Results of Dickey-Fuller Test:') dfoutput = pd.Series(dftest[0:4], index=['Test Statistic', 'p-value', '#Lags Used', 'Number of Observations Used']) for (key, value) in dftest[4].items(): dfoutput['Critical Value {}'.format(key)] = value print(dfoutput)
def stationarity_check(TS, column, plot_std=True): 'Outputs a plot of the Rolling Mean and Standard Deviation and prints results of the Dickey-Fuller Test\n TS: Time Series, this is the dataframe from which you are pulling your information\n column: This is the column within the TS that you are interested in\n plot_std: optional to plot the standard deviation or not' rolmean = TS[column].rolling(window=8, center=False).mean() rolstd = TS[column].rolling(window=8, center=False).std() dftest = adfuller(TS[column].dropna()) fig = plt.figure(figsize=(14, 8)) orig = plt.plot(TS[column], color='blue', label='Original') mean = plt.plot(rolmean, color='red', label='Rolling Mean') if plot_std: std = plt.plot(rolstd, color='black', label='Rolling Std') plt.title('Rolling Mean & Standard Deviation for {}'.format(column)) else: plt.title('Rolling Mean for {}'.format(column)) plt.legend(loc='best') plt.show(block=False) print('Results of Dickey-Fuller Test:') dfoutput = pd.Series(dftest[0:4], index=['Test Statistic', 'p-value', '#Lags Used', 'Number of Observations Used']) for (key, value) in dftest[4].items(): dfoutput['Critical Value {}'.format(key)] = value print(dfoutput)<|docstring|>Outputs a plot of the Rolling Mean and Standard Deviation and prints results of the Dickey-Fuller Test TS: Time Series, this is the dataframe from which you are pulling your information column: This is the column within the TS that you are interested in plot_std: optional to plot the standard deviation or not<|endoftext|>
f374972c547e28797b8d0f96cf1bfef6067d7153423a9aea1b852246e0442856
def plot_timeseries_model(data, column, res): 'Function will plot the original data, alongside any data for validation and prediction\n \n data: dataframe to be used\n column: the column to be plotted in your dataframe\n res: results\n ' column = column data = data date_start = 57 date_end = 63 data['{}_forecast'.format(column)] = res.predict(start=date_start, end=date_end, dynamic=False) ax = data[[column, '{}_forecast'.format(column)]] ax.plot(figsize=(16, 12)) date_start_year = data.index[date_start].year date_start_month = data.index[date_start].month date_end_year = data.index[date_end].year date_end_month = data.index[date_end].month ax.set(title="Median House Price in Chicago's {} Zipcode\n Between {}-{} and {}-{}".format(column[0], data[date_start_year], data[date_start_month], data[date_end_year], data[date_end_month]), xlabel='Date', ylabel='House Price in Dollars (USD)')
Function will plot the original data, alongside any data for validation and prediction data: dataframe to be used column: the column to be plotted in your dataframe res: results
src/visualize.py
plot_timeseries_model
AdamBlomfield/timeseries_arima_zillowdata
4
python
def plot_timeseries_model(data, column, res): 'Function will plot the original data, alongside any data for validation and prediction\n \n data: dataframe to be used\n column: the column to be plotted in your dataframe\n res: results\n ' column = column data = data date_start = 57 date_end = 63 data['{}_forecast'.format(column)] = res.predict(start=date_start, end=date_end, dynamic=False) ax = data[[column, '{}_forecast'.format(column)]] ax.plot(figsize=(16, 12)) date_start_year = data.index[date_start].year date_start_month = data.index[date_start].month date_end_year = data.index[date_end].year date_end_month = data.index[date_end].month ax.set(title="Median House Price in Chicago's {} Zipcode\n Between {}-{} and {}-{}".format(column[0], data[date_start_year], data[date_start_month], data[date_end_year], data[date_end_month]), xlabel='Date', ylabel='House Price in Dollars (USD)')
def plot_timeseries_model(data, column, res): 'Function will plot the original data, alongside any data for validation and prediction\n \n data: dataframe to be used\n column: the column to be plotted in your dataframe\n res: results\n ' column = column data = data date_start = 57 date_end = 63 data['{}_forecast'.format(column)] = res.predict(start=date_start, end=date_end, dynamic=False) ax = data[[column, '{}_forecast'.format(column)]] ax.plot(figsize=(16, 12)) date_start_year = data.index[date_start].year date_start_month = data.index[date_start].month date_end_year = data.index[date_end].year date_end_month = data.index[date_end].month ax.set(title="Median House Price in Chicago's {} Zipcode\n Between {}-{} and {}-{}".format(column[0], data[date_start_year], data[date_start_month], data[date_end_year], data[date_end_month]), xlabel='Date', ylabel='House Price in Dollars (USD)')<|docstring|>Function will plot the original data, alongside any data for validation and prediction data: dataframe to be used column: the column to be plotted in your dataframe res: results<|endoftext|>
a39e21706c7a81ab5e1f1055f80ce9851a4a3822cacae64929d2876999403007
def copy(self): " Shallow copy of the clip. \n \n Returns a shallow copy of the clip whose mask and audio will\n be shallow copies of the clip's mask and audio if they exist.\n\n This method is intensively used to produce new clips every time\n there is an outplace transformation of the clip (clip.resize,\n clip.subclip, etc.)\n " newclip = copy(self) if hasattr(self, 'audio'): newclip.audio = copy(self.audio) if hasattr(self, 'mask'): newclip.mask = copy(self.mask) return newclip
Shallow copy of the clip. Returns a shallow copy of the clip whose mask and audio will be shallow copies of the clip's mask and audio if they exist. This method is intensively used to produce new clips every time there is an outplace transformation of the clip (clip.resize, clip.subclip, etc.)
moviepy/Clip.py
copy
Baronsindo/moviepy
3
python
def copy(self): " Shallow copy of the clip. \n \n Returns a shallow copy of the clip whose mask and audio will\n be shallow copies of the clip's mask and audio if they exist.\n\n This method is intensively used to produce new clips every time\n there is an outplace transformation of the clip (clip.resize,\n clip.subclip, etc.)\n " newclip = copy(self) if hasattr(self, 'audio'): newclip.audio = copy(self.audio) if hasattr(self, 'mask'): newclip.mask = copy(self.mask) return newclip
def copy(self): " Shallow copy of the clip. \n \n Returns a shallow copy of the clip whose mask and audio will\n be shallow copies of the clip's mask and audio if they exist.\n\n This method is intensively used to produce new clips every time\n there is an outplace transformation of the clip (clip.resize,\n clip.subclip, etc.)\n " newclip = copy(self) if hasattr(self, 'audio'): newclip.audio = copy(self.audio) if hasattr(self, 'mask'): newclip.mask = copy(self.mask) return newclip<|docstring|>Shallow copy of the clip. Returns a shallow copy of the clip whose mask and audio will be shallow copies of the clip's mask and audio if they exist. This method is intensively used to produce new clips every time there is an outplace transformation of the clip (clip.resize, clip.subclip, etc.)<|endoftext|>
7de05a55615ce27e22567a76425cc1f1b2663d058cffe9a3f3653736f017916f
@convert_to_seconds(['t']) def get_frame(self, t): '\n Gets a numpy array representing the RGB picture of the clip at time t\n or (mono or stereo) value for a sound clip\n ' if self.memoize: if (t == self.memoized_t): return self.memoized_frame else: frame = self.make_frame(t) self.memoized_t = t self.memoized_frame = frame return frame else: return self.make_frame(t)
Gets a numpy array representing the RGB picture of the clip at time t or (mono or stereo) value for a sound clip
moviepy/Clip.py
get_frame
Baronsindo/moviepy
3
python
@convert_to_seconds(['t']) def get_frame(self, t): '\n Gets a numpy array representing the RGB picture of the clip at time t\n or (mono or stereo) value for a sound clip\n ' if self.memoize: if (t == self.memoized_t): return self.memoized_frame else: frame = self.make_frame(t) self.memoized_t = t self.memoized_frame = frame return frame else: return self.make_frame(t)
@convert_to_seconds(['t']) def get_frame(self, t): '\n Gets a numpy array representing the RGB picture of the clip at time t\n or (mono or stereo) value for a sound clip\n ' if self.memoize: if (t == self.memoized_t): return self.memoized_frame else: frame = self.make_frame(t) self.memoized_t = t self.memoized_frame = frame return frame else: return self.make_frame(t)<|docstring|>Gets a numpy array representing the RGB picture of the clip at time t or (mono or stereo) value for a sound clip<|endoftext|>
6fc02ad5731d7b474d82d4d9bec2f473332e4f0a89ae2e2b344acefd0e248dda
def fl(self, fun, apply_to=None, keep_duration=True): " General processing of a clip.\n\n Returns a new Clip whose frames are a transformation\n (through function ``fun``) of the frames of the current clip.\n\n Parameters\n -----------\n\n fun\n A function with signature (gf,t -> frame) where ``gf`` will\n represent the current clip's ``get_frame`` method,\n i.e. ``gf`` is a function (t->image). Parameter `t` is a time\n in seconds, `frame` is a picture (=Numpy array) which will be\n returned by the transformed clip (see examples below).\n\n apply_to\n Can be either ``'mask'``, or ``'audio'``, or\n ``['mask','audio']``.\n Specifies if the filter ``fl`` should also be applied to the\n audio or the mask of the clip, if any.\n\n keep_duration\n Set to True if the transformation does not change the\n ``duration`` of the clip.\n\n Examples\n --------\n\n In the following ``newclip`` a 100 pixels-high clip whose video\n content scrolls from the top to the bottom of the frames of\n ``clip``.\n\n >>> fl = lambda gf,t : gf(t)[int(t):int(t)+50, :]\n >>> newclip = clip.fl(fl, apply_to='mask')\n\n " if (apply_to is None): apply_to = [] newclip = self.set_make_frame((lambda t: fun(self.get_frame, t))) if (not keep_duration): newclip.duration = None newclip.end = None if isinstance(apply_to, str): apply_to = [apply_to] for attr in apply_to: if hasattr(newclip, attr): a = getattr(newclip, attr) if (a is not None): new_a = a.fl(fun, keep_duration=keep_duration) setattr(newclip, attr, new_a) return newclip
General processing of a clip. Returns a new Clip whose frames are a transformation (through function ``fun``) of the frames of the current clip. Parameters ----------- fun A function with signature (gf,t -> frame) where ``gf`` will represent the current clip's ``get_frame`` method, i.e. ``gf`` is a function (t->image). Parameter `t` is a time in seconds, `frame` is a picture (=Numpy array) which will be returned by the transformed clip (see examples below). apply_to Can be either ``'mask'``, or ``'audio'``, or ``['mask','audio']``. Specifies if the filter ``fl`` should also be applied to the audio or the mask of the clip, if any. keep_duration Set to True if the transformation does not change the ``duration`` of the clip. Examples -------- In the following ``newclip`` a 100 pixels-high clip whose video content scrolls from the top to the bottom of the frames of ``clip``. >>> fl = lambda gf,t : gf(t)[int(t):int(t)+50, :] >>> newclip = clip.fl(fl, apply_to='mask')
moviepy/Clip.py
fl
Baronsindo/moviepy
3
python
def fl(self, fun, apply_to=None, keep_duration=True): " General processing of a clip.\n\n Returns a new Clip whose frames are a transformation\n (through function ``fun``) of the frames of the current clip.\n\n Parameters\n -----------\n\n fun\n A function with signature (gf,t -> frame) where ``gf`` will\n represent the current clip's ``get_frame`` method,\n i.e. ``gf`` is a function (t->image). Parameter `t` is a time\n in seconds, `frame` is a picture (=Numpy array) which will be\n returned by the transformed clip (see examples below).\n\n apply_to\n Can be either ``'mask'``, or ``'audio'``, or\n ``['mask','audio']``.\n Specifies if the filter ``fl`` should also be applied to the\n audio or the mask of the clip, if any.\n\n keep_duration\n Set to True if the transformation does not change the\n ``duration`` of the clip.\n\n Examples\n --------\n\n In the following ``newclip`` a 100 pixels-high clip whose video\n content scrolls from the top to the bottom of the frames of\n ``clip``.\n\n >>> fl = lambda gf,t : gf(t)[int(t):int(t)+50, :]\n >>> newclip = clip.fl(fl, apply_to='mask')\n\n " if (apply_to is None): apply_to = [] newclip = self.set_make_frame((lambda t: fun(self.get_frame, t))) if (not keep_duration): newclip.duration = None newclip.end = None if isinstance(apply_to, str): apply_to = [apply_to] for attr in apply_to: if hasattr(newclip, attr): a = getattr(newclip, attr) if (a is not None): new_a = a.fl(fun, keep_duration=keep_duration) setattr(newclip, attr, new_a) return newclip
def fl(self, fun, apply_to=None, keep_duration=True): " General processing of a clip.\n\n Returns a new Clip whose frames are a transformation\n (through function ``fun``) of the frames of the current clip.\n\n Parameters\n -----------\n\n fun\n A function with signature (gf,t -> frame) where ``gf`` will\n represent the current clip's ``get_frame`` method,\n i.e. ``gf`` is a function (t->image). Parameter `t` is a time\n in seconds, `frame` is a picture (=Numpy array) which will be\n returned by the transformed clip (see examples below).\n\n apply_to\n Can be either ``'mask'``, or ``'audio'``, or\n ``['mask','audio']``.\n Specifies if the filter ``fl`` should also be applied to the\n audio or the mask of the clip, if any.\n\n keep_duration\n Set to True if the transformation does not change the\n ``duration`` of the clip.\n\n Examples\n --------\n\n In the following ``newclip`` a 100 pixels-high clip whose video\n content scrolls from the top to the bottom of the frames of\n ``clip``.\n\n >>> fl = lambda gf,t : gf(t)[int(t):int(t)+50, :]\n >>> newclip = clip.fl(fl, apply_to='mask')\n\n " if (apply_to is None): apply_to = [] newclip = self.set_make_frame((lambda t: fun(self.get_frame, t))) if (not keep_duration): newclip.duration = None newclip.end = None if isinstance(apply_to, str): apply_to = [apply_to] for attr in apply_to: if hasattr(newclip, attr): a = getattr(newclip, attr) if (a is not None): new_a = a.fl(fun, keep_duration=keep_duration) setattr(newclip, attr, new_a) return newclip<|docstring|>General processing of a clip. Returns a new Clip whose frames are a transformation (through function ``fun``) of the frames of the current clip. Parameters ----------- fun A function with signature (gf,t -> frame) where ``gf`` will represent the current clip's ``get_frame`` method, i.e. ``gf`` is a function (t->image). Parameter `t` is a time in seconds, `frame` is a picture (=Numpy array) which will be returned by the transformed clip (see examples below). apply_to Can be either ``'mask'``, or ``'audio'``, or ``['mask','audio']``. Specifies if the filter ``fl`` should also be applied to the audio or the mask of the clip, if any. keep_duration Set to True if the transformation does not change the ``duration`` of the clip. Examples -------- In the following ``newclip`` a 100 pixels-high clip whose video content scrolls from the top to the bottom of the frames of ``clip``. >>> fl = lambda gf,t : gf(t)[int(t):int(t)+50, :] >>> newclip = clip.fl(fl, apply_to='mask')<|endoftext|>
1f0a533b158f12fecd670f5e13cdccfc6d4f0c5b00e06b610a9f60fa089cc4f1
def fl_time(self, t_func, apply_to=None, keep_duration=False): "\n Returns a Clip instance playing the content of the current clip\n but with a modified timeline, time ``t`` being replaced by another\n time `t_func(t)`.\n\n Parameters\n -----------\n\n t_func:\n A function ``t-> new_t``\n\n apply_to:\n Can be either 'mask', or 'audio', or ['mask','audio'].\n Specifies if the filter ``fl`` should also be applied to the\n audio or the mask of the clip, if any.\n\n keep_duration:\n ``False`` (default) if the transformation modifies the\n ``duration`` of the clip.\n\n Examples\n --------\n\n >>> # plays the clip (and its mask and sound) twice faster\n >>> newclip = clip.fl_time(lambda: 2*t, apply_to=['mask', 'audio'])\n >>>\n >>> # plays the clip starting at t=3, and backwards:\n >>> newclip = clip.fl_time(lambda: 3-t)\n\n " if (apply_to is None): apply_to = [] return self.fl((lambda gf, t: gf(t_func(t))), apply_to, keep_duration=keep_duration)
Returns a Clip instance playing the content of the current clip but with a modified timeline, time ``t`` being replaced by another time `t_func(t)`. Parameters ----------- t_func: A function ``t-> new_t`` apply_to: Can be either 'mask', or 'audio', or ['mask','audio']. Specifies if the filter ``fl`` should also be applied to the audio or the mask of the clip, if any. keep_duration: ``False`` (default) if the transformation modifies the ``duration`` of the clip. Examples -------- >>> # plays the clip (and its mask and sound) twice faster >>> newclip = clip.fl_time(lambda: 2*t, apply_to=['mask', 'audio']) >>> >>> # plays the clip starting at t=3, and backwards: >>> newclip = clip.fl_time(lambda: 3-t)
moviepy/Clip.py
fl_time
Baronsindo/moviepy
3
python
def fl_time(self, t_func, apply_to=None, keep_duration=False): "\n Returns a Clip instance playing the content of the current clip\n but with a modified timeline, time ``t`` being replaced by another\n time `t_func(t)`.\n\n Parameters\n -----------\n\n t_func:\n A function ``t-> new_t``\n\n apply_to:\n Can be either 'mask', or 'audio', or ['mask','audio'].\n Specifies if the filter ``fl`` should also be applied to the\n audio or the mask of the clip, if any.\n\n keep_duration:\n ``False`` (default) if the transformation modifies the\n ``duration`` of the clip.\n\n Examples\n --------\n\n >>> # plays the clip (and its mask and sound) twice faster\n >>> newclip = clip.fl_time(lambda: 2*t, apply_to=['mask', 'audio'])\n >>>\n >>> # plays the clip starting at t=3, and backwards:\n >>> newclip = clip.fl_time(lambda: 3-t)\n\n " if (apply_to is None): apply_to = [] return self.fl((lambda gf, t: gf(t_func(t))), apply_to, keep_duration=keep_duration)
def fl_time(self, t_func, apply_to=None, keep_duration=False): "\n Returns a Clip instance playing the content of the current clip\n but with a modified timeline, time ``t`` being replaced by another\n time `t_func(t)`.\n\n Parameters\n -----------\n\n t_func:\n A function ``t-> new_t``\n\n apply_to:\n Can be either 'mask', or 'audio', or ['mask','audio'].\n Specifies if the filter ``fl`` should also be applied to the\n audio or the mask of the clip, if any.\n\n keep_duration:\n ``False`` (default) if the transformation modifies the\n ``duration`` of the clip.\n\n Examples\n --------\n\n >>> # plays the clip (and its mask and sound) twice faster\n >>> newclip = clip.fl_time(lambda: 2*t, apply_to=['mask', 'audio'])\n >>>\n >>> # plays the clip starting at t=3, and backwards:\n >>> newclip = clip.fl_time(lambda: 3-t)\n\n " if (apply_to is None): apply_to = [] return self.fl((lambda gf, t: gf(t_func(t))), apply_to, keep_duration=keep_duration)<|docstring|>Returns a Clip instance playing the content of the current clip but with a modified timeline, time ``t`` being replaced by another time `t_func(t)`. Parameters ----------- t_func: A function ``t-> new_t`` apply_to: Can be either 'mask', or 'audio', or ['mask','audio']. Specifies if the filter ``fl`` should also be applied to the audio or the mask of the clip, if any. keep_duration: ``False`` (default) if the transformation modifies the ``duration`` of the clip. Examples -------- >>> # plays the clip (and its mask and sound) twice faster >>> newclip = clip.fl_time(lambda: 2*t, apply_to=['mask', 'audio']) >>> >>> # plays the clip starting at t=3, and backwards: >>> newclip = clip.fl_time(lambda: 3-t)<|endoftext|>
7e131c360cc3e473cd32cb062205857bed885be8baa359040db321e484deaa7c
def fx(self, func, *args, **kwargs): "\n\n Returns the result of ``func(self, *args, **kwargs)``.\n for instance\n\n >>> newclip = clip.fx(resize, 0.2, method='bilinear')\n\n is equivalent to\n\n >>> newclip = resize(clip, 0.2, method='bilinear')\n\n The motivation of fx is to keep the name of the effect near its\n parameters, when the effects are chained:\n\n >>> from moviepy.video.fx import volumex, resize, mirrorx\n >>> clip.fx( volumex, 0.5).fx( resize, 0.3).fx( mirrorx )\n >>> # Is equivalent, but clearer than\n >>> resize( volumex( mirrorx( clip ), 0.5), 0.3)\n\n " return func(self, *args, **kwargs)
Returns the result of ``func(self, *args, **kwargs)``. for instance >>> newclip = clip.fx(resize, 0.2, method='bilinear') is equivalent to >>> newclip = resize(clip, 0.2, method='bilinear') The motivation of fx is to keep the name of the effect near its parameters, when the effects are chained: >>> from moviepy.video.fx import volumex, resize, mirrorx >>> clip.fx( volumex, 0.5).fx( resize, 0.3).fx( mirrorx ) >>> # Is equivalent, but clearer than >>> resize( volumex( mirrorx( clip ), 0.5), 0.3)
moviepy/Clip.py
fx
Baronsindo/moviepy
3
python
def fx(self, func, *args, **kwargs): "\n\n Returns the result of ``func(self, *args, **kwargs)``.\n for instance\n\n >>> newclip = clip.fx(resize, 0.2, method='bilinear')\n\n is equivalent to\n\n >>> newclip = resize(clip, 0.2, method='bilinear')\n\n The motivation of fx is to keep the name of the effect near its\n parameters, when the effects are chained:\n\n >>> from moviepy.video.fx import volumex, resize, mirrorx\n >>> clip.fx( volumex, 0.5).fx( resize, 0.3).fx( mirrorx )\n >>> # Is equivalent, but clearer than\n >>> resize( volumex( mirrorx( clip ), 0.5), 0.3)\n\n " return func(self, *args, **kwargs)
def fx(self, func, *args, **kwargs): "\n\n Returns the result of ``func(self, *args, **kwargs)``.\n for instance\n\n >>> newclip = clip.fx(resize, 0.2, method='bilinear')\n\n is equivalent to\n\n >>> newclip = resize(clip, 0.2, method='bilinear')\n\n The motivation of fx is to keep the name of the effect near its\n parameters, when the effects are chained:\n\n >>> from moviepy.video.fx import volumex, resize, mirrorx\n >>> clip.fx( volumex, 0.5).fx( resize, 0.3).fx( mirrorx )\n >>> # Is equivalent, but clearer than\n >>> resize( volumex( mirrorx( clip ), 0.5), 0.3)\n\n " return func(self, *args, **kwargs)<|docstring|>Returns the result of ``func(self, *args, **kwargs)``. for instance >>> newclip = clip.fx(resize, 0.2, method='bilinear') is equivalent to >>> newclip = resize(clip, 0.2, method='bilinear') The motivation of fx is to keep the name of the effect near its parameters, when the effects are chained: >>> from moviepy.video.fx import volumex, resize, mirrorx >>> clip.fx( volumex, 0.5).fx( resize, 0.3).fx( mirrorx ) >>> # Is equivalent, but clearer than >>> resize( volumex( mirrorx( clip ), 0.5), 0.3)<|endoftext|>
7ad6570fdf49373b64ab4d263642f19e3f0c77c7ca6bc6ba804bed582ce01ffd
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_start(self, t, change_end=True): "\n Returns a copy of the clip, with the ``start`` attribute set\n to ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n\n\n If ``change_end=True`` and the clip has a ``duration`` attribute,\n the ``end`` atrribute of the clip will be updated to\n ``start+duration``.\n\n If ``change_end=False`` and the clip has a ``end`` attribute,\n the ``duration`` attribute of the clip will be updated to\n ``end-start``\n\n These changes are also applied to the ``audio`` and ``mask``\n clips of the current clip, if they exist.\n " self.start = t if ((self.duration is not None) and change_end): self.end = (t + self.duration) elif (self.end is not None): self.duration = (self.end - self.start)
Returns a copy of the clip, with the ``start`` attribute set to ``t``, which can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. If ``change_end=True`` and the clip has a ``duration`` attribute, the ``end`` atrribute of the clip will be updated to ``start+duration``. If ``change_end=False`` and the clip has a ``end`` attribute, the ``duration`` attribute of the clip will be updated to ``end-start`` These changes are also applied to the ``audio`` and ``mask`` clips of the current clip, if they exist.
moviepy/Clip.py
set_start
Baronsindo/moviepy
3
python
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_start(self, t, change_end=True): "\n Returns a copy of the clip, with the ``start`` attribute set\n to ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n\n\n If ``change_end=True`` and the clip has a ``duration`` attribute,\n the ``end`` atrribute of the clip will be updated to\n ``start+duration``.\n\n If ``change_end=False`` and the clip has a ``end`` attribute,\n the ``duration`` attribute of the clip will be updated to\n ``end-start``\n\n These changes are also applied to the ``audio`` and ``mask``\n clips of the current clip, if they exist.\n " self.start = t if ((self.duration is not None) and change_end): self.end = (t + self.duration) elif (self.end is not None): self.duration = (self.end - self.start)
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_start(self, t, change_end=True): "\n Returns a copy of the clip, with the ``start`` attribute set\n to ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n\n\n If ``change_end=True`` and the clip has a ``duration`` attribute,\n the ``end`` atrribute of the clip will be updated to\n ``start+duration``.\n\n If ``change_end=False`` and the clip has a ``end`` attribute,\n the ``duration`` attribute of the clip will be updated to\n ``end-start``\n\n These changes are also applied to the ``audio`` and ``mask``\n clips of the current clip, if they exist.\n " self.start = t if ((self.duration is not None) and change_end): self.end = (t + self.duration) elif (self.end is not None): self.duration = (self.end - self.start)<|docstring|>Returns a copy of the clip, with the ``start`` attribute set to ``t``, which can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. If ``change_end=True`` and the clip has a ``duration`` attribute, the ``end`` atrribute of the clip will be updated to ``start+duration``. If ``change_end=False`` and the clip has a ``end`` attribute, the ``duration`` attribute of the clip will be updated to ``end-start`` These changes are also applied to the ``audio`` and ``mask`` clips of the current clip, if they exist.<|endoftext|>
9d8eec53487820c580f85866e57225779fbdca5c728e4a686b3de5c796210096
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_end(self, t): "\n Returns a copy of the clip, with the ``end`` attribute set to\n ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n Also sets the duration of the mask and audio, if any,\n of the returned clip.\n " self.end = t if (self.end is None): return if (self.start is None): if (self.duration is not None): self.start = max(0, (t - newclip.duration)) else: self.duration = (self.end - self.start)
Returns a copy of the clip, with the ``end`` attribute set to ``t``, which can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. Also sets the duration of the mask and audio, if any, of the returned clip.
moviepy/Clip.py
set_end
Baronsindo/moviepy
3
python
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_end(self, t): "\n Returns a copy of the clip, with the ``end`` attribute set to\n ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n Also sets the duration of the mask and audio, if any,\n of the returned clip.\n " self.end = t if (self.end is None): return if (self.start is None): if (self.duration is not None): self.start = max(0, (t - newclip.duration)) else: self.duration = (self.end - self.start)
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_end(self, t): "\n Returns a copy of the clip, with the ``end`` attribute set to\n ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n Also sets the duration of the mask and audio, if any,\n of the returned clip.\n " self.end = t if (self.end is None): return if (self.start is None): if (self.duration is not None): self.start = max(0, (t - newclip.duration)) else: self.duration = (self.end - self.start)<|docstring|>Returns a copy of the clip, with the ``end`` attribute set to ``t``, which can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. Also sets the duration of the mask and audio, if any, of the returned clip.<|endoftext|>
4d1c064c84e218a57f45754647c058e581df70626bae47963a4773dda1d3ae10
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_duration(self, t, change_end=True): "\n Returns a copy of the clip, with the ``duration`` attribute\n set to ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n Also sets the duration of the mask and audio, if any, of the\n returned clip.\n If change_end is False, the start attribute of the clip will\n be modified in function of the duration and the preset end\n of the clip.\n " self.duration = t if change_end: self.end = (None if (t is None) else (self.start + t)) else: if (self.duration is None): raise Exception('Cannot change clip start when newduration is None') self.start = (self.end - t)
Returns a copy of the clip, with the ``duration`` attribute set to ``t``, which can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. Also sets the duration of the mask and audio, if any, of the returned clip. If change_end is False, the start attribute of the clip will be modified in function of the duration and the preset end of the clip.
moviepy/Clip.py
set_duration
Baronsindo/moviepy
3
python
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_duration(self, t, change_end=True): "\n Returns a copy of the clip, with the ``duration`` attribute\n set to ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n Also sets the duration of the mask and audio, if any, of the\n returned clip.\n If change_end is False, the start attribute of the clip will\n be modified in function of the duration and the preset end\n of the clip.\n " self.duration = t if change_end: self.end = (None if (t is None) else (self.start + t)) else: if (self.duration is None): raise Exception('Cannot change clip start when newduration is None') self.start = (self.end - t)
@apply_to_mask @apply_to_audio @convert_to_seconds(['t']) @outplace def set_duration(self, t, change_end=True): "\n Returns a copy of the clip, with the ``duration`` attribute\n set to ``t``, which can be expressed in seconds (15.35), in (min, sec),\n in (hour, min, sec), or as a string: '01:03:05.35'.\n Also sets the duration of the mask and audio, if any, of the\n returned clip.\n If change_end is False, the start attribute of the clip will\n be modified in function of the duration and the preset end\n of the clip.\n " self.duration = t if change_end: self.end = (None if (t is None) else (self.start + t)) else: if (self.duration is None): raise Exception('Cannot change clip start when newduration is None') self.start = (self.end - t)<|docstring|>Returns a copy of the clip, with the ``duration`` attribute set to ``t``, which can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. Also sets the duration of the mask and audio, if any, of the returned clip. If change_end is False, the start attribute of the clip will be modified in function of the duration and the preset end of the clip.<|endoftext|>
c621cbd457a120645ebb065263f6869629a91a4d91049e5aeb4de28edc5a486d
@outplace def set_make_frame(self, make_frame): '\n Sets a ``make_frame`` attribute for the clip. Useful for setting\n arbitrary/complicated videoclips.\n ' self.make_frame = make_frame
Sets a ``make_frame`` attribute for the clip. Useful for setting arbitrary/complicated videoclips.
moviepy/Clip.py
set_make_frame
Baronsindo/moviepy
3
python
@outplace def set_make_frame(self, make_frame): '\n Sets a ``make_frame`` attribute for the clip. Useful for setting\n arbitrary/complicated videoclips.\n ' self.make_frame = make_frame
@outplace def set_make_frame(self, make_frame): '\n Sets a ``make_frame`` attribute for the clip. Useful for setting\n arbitrary/complicated videoclips.\n ' self.make_frame = make_frame<|docstring|>Sets a ``make_frame`` attribute for the clip. Useful for setting arbitrary/complicated videoclips.<|endoftext|>
9641517edd75e68aab3b7a2deb23f48f791195acda77359ad4f817eb0bb053c3
@outplace def set_fps(self, fps): ' Returns a copy of the clip with a new default fps for functions like\n write_videofile, iterframe, etc. ' self.fps = fps
Returns a copy of the clip with a new default fps for functions like write_videofile, iterframe, etc.
moviepy/Clip.py
set_fps
Baronsindo/moviepy
3
python
@outplace def set_fps(self, fps): ' Returns a copy of the clip with a new default fps for functions like\n write_videofile, iterframe, etc. ' self.fps = fps
@outplace def set_fps(self, fps): ' Returns a copy of the clip with a new default fps for functions like\n write_videofile, iterframe, etc. ' self.fps = fps<|docstring|>Returns a copy of the clip with a new default fps for functions like write_videofile, iterframe, etc.<|endoftext|>
8caa0af61db274991aa0659662c9a19a2291e207d26d21064bba1a1d35f683a7
@outplace def set_ismask(self, ismask): ' Says wheter the clip is a mask or not (ismask is a boolean)' self.ismask = ismask
Says wheter the clip is a mask or not (ismask is a boolean)
moviepy/Clip.py
set_ismask
Baronsindo/moviepy
3
python
@outplace def set_ismask(self, ismask): ' ' self.ismask = ismask
@outplace def set_ismask(self, ismask): ' ' self.ismask = ismask<|docstring|>Says wheter the clip is a mask or not (ismask is a boolean)<|endoftext|>
1a44baa480630ae835cc26fe80cad96ebb187a93099715b7ace618b794990f53
@outplace def set_memoize(self, memoize): ' Sets wheter the clip should keep the last frame read in memory ' self.memoize = memoize
Sets wheter the clip should keep the last frame read in memory
moviepy/Clip.py
set_memoize
Baronsindo/moviepy
3
python
@outplace def set_memoize(self, memoize): ' ' self.memoize = memoize
@outplace def set_memoize(self, memoize): ' ' self.memoize = memoize<|docstring|>Sets wheter the clip should keep the last frame read in memory<|endoftext|>
9711971a6c533134a9de9467d181ba6f5490148368e825337139b6099135768c
@convert_to_seconds(['t']) def is_playing(self, t): "\n\n If t is a time, returns true if t is between the start and\n the end of the clip. t can be expressed in seconds (15.35),\n in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'.\n If t is a numpy array, returns False if none of the t is in\n theclip, else returns a vector [b_1, b_2, b_3...] where b_i\n is true iff tti is in the clip.\n " if isinstance(t, np.ndarray): (tmin, tmax) = (t.min(), t.max()) if ((self.end is not None) and (tmin >= self.end)): return False if (tmax < self.start): return False result = (1 * (t >= self.start)) if (self.end is not None): result *= (t <= self.end) return result else: return ((t >= self.start) and ((self.end is None) or (t < self.end)))
If t is a time, returns true if t is between the start and the end of the clip. t can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. If t is a numpy array, returns False if none of the t is in theclip, else returns a vector [b_1, b_2, b_3...] where b_i is true iff tti is in the clip.
moviepy/Clip.py
is_playing
Baronsindo/moviepy
3
python
@convert_to_seconds(['t']) def is_playing(self, t): "\n\n If t is a time, returns true if t is between the start and\n the end of the clip. t can be expressed in seconds (15.35),\n in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'.\n If t is a numpy array, returns False if none of the t is in\n theclip, else returns a vector [b_1, b_2, b_3...] where b_i\n is true iff tti is in the clip.\n " if isinstance(t, np.ndarray): (tmin, tmax) = (t.min(), t.max()) if ((self.end is not None) and (tmin >= self.end)): return False if (tmax < self.start): return False result = (1 * (t >= self.start)) if (self.end is not None): result *= (t <= self.end) return result else: return ((t >= self.start) and ((self.end is None) or (t < self.end)))
@convert_to_seconds(['t']) def is_playing(self, t): "\n\n If t is a time, returns true if t is between the start and\n the end of the clip. t can be expressed in seconds (15.35),\n in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'.\n If t is a numpy array, returns False if none of the t is in\n theclip, else returns a vector [b_1, b_2, b_3...] where b_i\n is true iff tti is in the clip.\n " if isinstance(t, np.ndarray): (tmin, tmax) = (t.min(), t.max()) if ((self.end is not None) and (tmin >= self.end)): return False if (tmax < self.start): return False result = (1 * (t >= self.start)) if (self.end is not None): result *= (t <= self.end) return result else: return ((t >= self.start) and ((self.end is None) or (t < self.end)))<|docstring|>If t is a time, returns true if t is between the start and the end of the clip. t can be expressed in seconds (15.35), in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. If t is a numpy array, returns False if none of the t is in theclip, else returns a vector [b_1, b_2, b_3...] where b_i is true iff tti is in the clip.<|endoftext|>