max_stars_repo_path
stringlengths
4
286
max_stars_repo_name
stringlengths
5
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.03M
content_cleaned
stringlengths
6
1.03M
language
stringclasses
111 values
language_score
float64
0.03
1
comments
stringlengths
0
556k
edu_score
float64
0.32
5.03
edu_int_score
int64
0
5
scripts/update_dreqs/update_dreqs_0263.py
jonseddon/primavera-dmt
0
6624551
#!/usr/bin/env python """ update_dreqs_0263.py Create a retrieval request for data that's required for ESGF publication for EC-Earth Stream 1 highres-future. """ from __future__ import unicode_literals, division, absolute_import import argparse import datetime import logging.config import sys import django django.setup() from django.template.defaultfilters import filesizeformat from django.contrib.auth.models import User from pdata_app.models import RetrievalRequest, DataRequest from pdata_app.utils.common import get_request_size __version__ = '0.1.0b1' DEFAULT_LOG_LEVEL = logging.WARNING DEFAULT_LOG_FORMAT = '%(levelname)s: %(message)s' logger = logging.getLogger(__name__) def parse_args(): """ Parse command-line arguments """ parser = argparse.ArgumentParser(description='Create retrieval requests') parser.add_argument('-l', '--log-level', help='set logging level to one of ' 'debug, info, warn (the ' 'default), or error') parser.add_argument('-c', '--create', help='Create the retrieval request ' 'rather than just displaying ' 'the data volums', action='store_true') parser.add_argument('--version', action='version', version='%(prog)s {}'.format(__version__)) args = parser.parse_args() return args def main(args): """ Main entry point """ start_year = 1948 end_year = 2150 # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P', # experiment__short_name='highres-future', # rip_code='r1i1p2f1', # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P', # experiment__short_name='highres-future', # rip_code='r3i1p2f1', # 'r2i1p2f1', # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() data_reqs = DataRequest.objects.filter( climate_model__short_name='EC-Earth3P-HR', experiment__short_name='highres-future', rip_code='r1i1p2f1', variable_request__frequency__in=['6hr', '3hr'], datafile__isnull=False ).exclude( variable_request__table_name__startswith='Prim' ).exclude( variable_request__dimensions__contains='alevhalf' ).exclude( variable_request__dimensions__contains='alevel' ).distinct() # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P-HR', # experiment__short_name='highres-future', # rip_code='r3i1p2f1', # 'r2i1p2f1', # variable_request__frequency__in=['mon', 'day'], # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() logger.debug('Total data volume: {} Volume to restore: {}'.format( filesizeformat(get_request_size(data_reqs, start_year, end_year)). replace('\xa0', ' '), filesizeformat(get_request_size(data_reqs, start_year, end_year, offline=True)).replace('\xa0', ' '), )) if args.create: jon = User.objects.get(username='jseddon') rr = RetrievalRequest.objects.create(requester=jon, start_year=start_year, end_year=end_year) time_zone = datetime.timezone(datetime.timedelta()) rr.date_created = datetime.datetime(2000, 1, 1, 0, 0, tzinfo=time_zone) rr.save() rr.data_request.add(*data_reqs) logger.debug('Retrieval request {} created.'.format(rr.id)) if __name__ == "__main__": cmd_args = parse_args() # determine the log level if cmd_args.log_level: try: log_level = getattr(logging, cmd_args.log_level.upper()) except AttributeError: logger.setLevel(logging.WARNING) logger.error('log-level must be one of: debug, info, warn or error') sys.exit(1) else: log_level = DEFAULT_LOG_LEVEL # configure the logger logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'standard': { 'format': DEFAULT_LOG_FORMAT, }, }, 'handlers': { 'default': { 'level': log_level, 'class': 'logging.StreamHandler', 'formatter': 'standard' }, }, 'loggers': { '': { 'handlers': ['default'], 'level': log_level, 'propagate': True } } }) # run the code main(cmd_args)
#!/usr/bin/env python """ update_dreqs_0263.py Create a retrieval request for data that's required for ESGF publication for EC-Earth Stream 1 highres-future. """ from __future__ import unicode_literals, division, absolute_import import argparse import datetime import logging.config import sys import django django.setup() from django.template.defaultfilters import filesizeformat from django.contrib.auth.models import User from pdata_app.models import RetrievalRequest, DataRequest from pdata_app.utils.common import get_request_size __version__ = '0.1.0b1' DEFAULT_LOG_LEVEL = logging.WARNING DEFAULT_LOG_FORMAT = '%(levelname)s: %(message)s' logger = logging.getLogger(__name__) def parse_args(): """ Parse command-line arguments """ parser = argparse.ArgumentParser(description='Create retrieval requests') parser.add_argument('-l', '--log-level', help='set logging level to one of ' 'debug, info, warn (the ' 'default), or error') parser.add_argument('-c', '--create', help='Create the retrieval request ' 'rather than just displaying ' 'the data volums', action='store_true') parser.add_argument('--version', action='version', version='%(prog)s {}'.format(__version__)) args = parser.parse_args() return args def main(args): """ Main entry point """ start_year = 1948 end_year = 2150 # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P', # experiment__short_name='highres-future', # rip_code='r1i1p2f1', # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P', # experiment__short_name='highres-future', # rip_code='r3i1p2f1', # 'r2i1p2f1', # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() data_reqs = DataRequest.objects.filter( climate_model__short_name='EC-Earth3P-HR', experiment__short_name='highres-future', rip_code='r1i1p2f1', variable_request__frequency__in=['6hr', '3hr'], datafile__isnull=False ).exclude( variable_request__table_name__startswith='Prim' ).exclude( variable_request__dimensions__contains='alevhalf' ).exclude( variable_request__dimensions__contains='alevel' ).distinct() # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P-HR', # experiment__short_name='highres-future', # rip_code='r3i1p2f1', # 'r2i1p2f1', # variable_request__frequency__in=['mon', 'day'], # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() logger.debug('Total data volume: {} Volume to restore: {}'.format( filesizeformat(get_request_size(data_reqs, start_year, end_year)). replace('\xa0', ' '), filesizeformat(get_request_size(data_reqs, start_year, end_year, offline=True)).replace('\xa0', ' '), )) if args.create: jon = User.objects.get(username='jseddon') rr = RetrievalRequest.objects.create(requester=jon, start_year=start_year, end_year=end_year) time_zone = datetime.timezone(datetime.timedelta()) rr.date_created = datetime.datetime(2000, 1, 1, 0, 0, tzinfo=time_zone) rr.save() rr.data_request.add(*data_reqs) logger.debug('Retrieval request {} created.'.format(rr.id)) if __name__ == "__main__": cmd_args = parse_args() # determine the log level if cmd_args.log_level: try: log_level = getattr(logging, cmd_args.log_level.upper()) except AttributeError: logger.setLevel(logging.WARNING) logger.error('log-level must be one of: debug, info, warn or error') sys.exit(1) else: log_level = DEFAULT_LOG_LEVEL # configure the logger logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'standard': { 'format': DEFAULT_LOG_FORMAT, }, }, 'handlers': { 'default': { 'level': log_level, 'class': 'logging.StreamHandler', 'formatter': 'standard' }, }, 'loggers': { '': { 'handlers': ['default'], 'level': log_level, 'propagate': True } } }) # run the code main(cmd_args)
en
0.256195
#!/usr/bin/env python update_dreqs_0263.py Create a retrieval request for data that's required for ESGF publication for EC-Earth Stream 1 highres-future. Parse command-line arguments Main entry point # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P', # experiment__short_name='highres-future', # rip_code='r1i1p2f1', # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P', # experiment__short_name='highres-future', # rip_code='r3i1p2f1', # 'r2i1p2f1', # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() # data_reqs = DataRequest.objects.filter( # climate_model__short_name='EC-Earth3P-HR', # experiment__short_name='highres-future', # rip_code='r3i1p2f1', # 'r2i1p2f1', # variable_request__frequency__in=['mon', 'day'], # datafile__isnull=False # ).exclude( # variable_request__table_name__startswith='Prim' # ).exclude( # variable_request__dimensions__contains='alevhalf' # ).exclude( # variable_request__dimensions__contains='alevel' # ).distinct() # determine the log level # configure the logger # run the code
2.530815
3
phonenumber_field/modelfields.py
mediapredict/django-phonenumber-field
0
6624552
<reponame>mediapredict/django-phonenumber-field<filename>phonenumber_field/modelfields.py<gh_stars>0 # -*- coding: utf-8 -*- from builtins import object from django.core import validators from django.db import models from django.utils.translation import ugettext_lazy as _ from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field import formfields from phonenumber_field.phonenumber import PhoneNumber, to_python, string_types class PhoneNumberDescriptor(object): """ The descriptor for the phone number attribute on the model instance. Returns a PhoneNumber when accessed so you can do stuff like:: >>> instance.phone_number.as_international Assigns a phone number object on assignment so you can do:: >>> instance.phone_number = PhoneNumber(...) or >>> instance.phone_number = '+414204242' """ def __init__(self, field): self.field = field def __get__(self, instance=None, owner=None): if instance is None: raise AttributeError( "The '%s' attribute can only be accessed from %s instances." % (self.field.name, owner.__name__)) return instance.__dict__[self.field.name] def __set__(self, instance, value): instance.__dict__[self.field.name] = to_python(value) class PhoneNumberField(models.Field): attr_class = PhoneNumber descriptor_class = PhoneNumberDescriptor default_validators = [validate_international_phonenumber] description = _("Phone number") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 128) super(PhoneNumberField, self).__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def get_internal_type(self): return "CharField" def get_prep_value(self, value): "Returns field's value prepared for saving into a database." if value is None or value == '': if not self.blank: return to_python(self.default) elif self.blank: return to_python(self.default) or '' value = to_python(value) if isinstance(value, string_types): # it is an invalid phone number return value return value.as_e164 def contribute_to_class(self, cls, name): super(PhoneNumberField, self).contribute_to_class(cls, name) setattr(cls, self.name, self.descriptor_class(self)) def formfield(self, **kwargs): defaults = { 'form_class': formfields.PhoneNumberField, } defaults.update(kwargs) return super(PhoneNumberField, self).formfield(**defaults) try: from south.modelsinspector import add_introspection_rules add_introspection_rules([ ( [PhoneNumberField], [], {}, ), ], ["^phonenumber_field\.modelfields\.PhoneNumberField"]) except ImportError: pass
# -*- coding: utf-8 -*- from builtins import object from django.core import validators from django.db import models from django.utils.translation import ugettext_lazy as _ from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field import formfields from phonenumber_field.phonenumber import PhoneNumber, to_python, string_types class PhoneNumberDescriptor(object): """ The descriptor for the phone number attribute on the model instance. Returns a PhoneNumber when accessed so you can do stuff like:: >>> instance.phone_number.as_international Assigns a phone number object on assignment so you can do:: >>> instance.phone_number = PhoneNumber(...) or >>> instance.phone_number = '+414204242' """ def __init__(self, field): self.field = field def __get__(self, instance=None, owner=None): if instance is None: raise AttributeError( "The '%s' attribute can only be accessed from %s instances." % (self.field.name, owner.__name__)) return instance.__dict__[self.field.name] def __set__(self, instance, value): instance.__dict__[self.field.name] = to_python(value) class PhoneNumberField(models.Field): attr_class = PhoneNumber descriptor_class = PhoneNumberDescriptor default_validators = [validate_international_phonenumber] description = _("Phone number") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 128) super(PhoneNumberField, self).__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def get_internal_type(self): return "CharField" def get_prep_value(self, value): "Returns field's value prepared for saving into a database." if value is None or value == '': if not self.blank: return to_python(self.default) elif self.blank: return to_python(self.default) or '' value = to_python(value) if isinstance(value, string_types): # it is an invalid phone number return value return value.as_e164 def contribute_to_class(self, cls, name): super(PhoneNumberField, self).contribute_to_class(cls, name) setattr(cls, self.name, self.descriptor_class(self)) def formfield(self, **kwargs): defaults = { 'form_class': formfields.PhoneNumberField, } defaults.update(kwargs) return super(PhoneNumberField, self).formfield(**defaults) try: from south.modelsinspector import add_introspection_rules add_introspection_rules([ ( [PhoneNumberField], [], {}, ), ], ["^phonenumber_field\.modelfields\.PhoneNumberField"]) except ImportError: pass
en
0.646003
# -*- coding: utf-8 -*- The descriptor for the phone number attribute on the model instance. Returns a PhoneNumber when accessed so you can do stuff like:: >>> instance.phone_number.as_international Assigns a phone number object on assignment so you can do:: >>> instance.phone_number = PhoneNumber(...) or >>> instance.phone_number = '+414204242' # it is an invalid phone number
2.417709
2
sdk/python/pulumi_azure_native/authorization/v20160901/outputs.py
pulumi-bot/pulumi-azure-native
31
6624553
<gh_stars>10-100 # coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from ._enums import * __all__ = [ 'ManagementLockOwnerResponse', ] @pulumi.output_type class ManagementLockOwnerResponse(dict): """ Lock owner properties. """ def __init__(__self__, *, application_id: Optional[str] = None): """ Lock owner properties. :param str application_id: The application ID of the lock owner. """ if application_id is not None: pulumi.set(__self__, "application_id", application_id) @property @pulumi.getter(name="applicationId") def application_id(self) -> Optional[str]: """ The application ID of the lock owner. """ return pulumi.get(self, "application_id") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from ._enums import * __all__ = [ 'ManagementLockOwnerResponse', ] @pulumi.output_type class ManagementLockOwnerResponse(dict): """ Lock owner properties. """ def __init__(__self__, *, application_id: Optional[str] = None): """ Lock owner properties. :param str application_id: The application ID of the lock owner. """ if application_id is not None: pulumi.set(__self__, "application_id", application_id) @property @pulumi.getter(name="applicationId") def application_id(self) -> Optional[str]: """ The application ID of the lock owner. """ return pulumi.get(self, "application_id") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
en
0.948637
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** Lock owner properties. Lock owner properties. :param str application_id: The application ID of the lock owner. The application ID of the lock owner.
1.714069
2
tests/diffex/diffexSrc/src/ccalnoir/ccalnoir/machine_learning/glmnet.py
genepattern/docker-aws-python36
0
6624554
import numbers import numpy as np import pandas as pd import rpy2.robjects as ro from rpy2.robjects import numpy2ri from rpy2.robjects.packages import importr from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin from sklearn.utils import compute_class_weight glmnet = importr("glmnet") base = importr("base") dollar = base.__dict__["$"] stats = importr('stats') """ class GLMNet(BaseEstimator, RegressorMixin): # Todo: flesh out this class for non-CV fitting def __init__(self, alpha=0, l1_ratio=0.5): self.coef_ = None self.alpha = alpha self.l1_ratio = l1_ratio def fit(self, x, y, upper=None, lower=None): pass def predict(self, x): pass """ class GLMNetCV(BaseEstimator, RegressorMixin, ClassifierMixin): def __init__(self, lower=None, upper=None, loss_metric='mse'): self.coef_ = None self.model = None self.lower = lower self.upper = upper self.loss_metric = loss_metric def fit(self, x, y): self.model = glmnet_fit( np.array(x), np.array(y), self.reg_type, self.lower, self.upper, cv=True, loss_metric=self.loss_metric) self.lambda_min = np.array(dollar(self.model, 'lambda.min'))[0] self.coef_ = get_coeffs(self.model, lmda=self.lambda_min) cv_losses = get_values_from_glmnet_fit(self.model, 'cvm') self.loss = np.min(cv_losses) # Todo: some valid loss_metric arguments such as 'auc' are not losses, # and should be maximized, not minimized class GLMNetLinearRegressionCV(GLMNetCV): def __init__(self, lower=None, upper=None, loss_metric='mse'): super().__init__(lower=lower, upper=upper, loss_metric=loss_metric) self.reg_type = 'linear' def transform(self, x): return np.array(ro.r['predict']( self.model, newx=np.array(x), s=self.lambda_min)).T[0] class GLMNetLogisticRegressionCV(GLMNetCV): def __init__(self, lower=None, upper=None, loss_metric='mse'): super().__init__(lower=lower, upper=upper, loss_metric=loss_metric) self.reg_type = 'logistic' def predict(self, x): return np.array( list( map(int, ro.r['predict'](self.model, newx=np.array(x), s=self.lambda_min, type='class')))) def predict_proba(self, x): return np.array(ro.r['predict']( self.model, newx=np.array( x), s=self.lambda_min, type='response')).T[0] def get_coeffs(cvfit, lmda='min'): if not isinstance(lmda, numbers.Number): if isinstance(lmda, str): if lmda not in ['min', '1se']: raise ValueError( "{} not an accepted lmda; try 'min', '1se', or a number") else: lmda = get_values_from_glmnet_fit(cvfit, 'lambda.{}'.format(lmda))[0] else: raise ValueError("lmda must be a string or number") r = ro.r coeffs = np.array(r['as.matrix'](stats.coef(cvfit, s=lmda))) return coeffs[1:].T[0] def glmnet_cv(x, y, reg_type='linear', lower=None, upper=None): cvfit = glmnet_fit( x, y, reg_type=reg_type, lower=lower, upper=upper, cv=True) coeffs = get_coeffs(cvfit) return coeffs def glmnet_fit(x, y, reg_type='linear', lower=None, upper=None, cv=False, loss_metric='mse'): # Todo: better options for sample or class weighting fit_func = glmnet.cv_glmnet if cv else glmnet.glmnet lower = float('-inf') if lower is None else lower upper = float('inf') if upper is None else upper x_used = x.values if isinstance(x, pd.DataFrame) else x y_used = y.values if isinstance(y, pd.DataFrame) or isinstance( y, pd.Series) else y numpy2ri.activate() if reg_type == 'linear': fit = fit_func( x_used, np.atleast_2d(y_used), lower=lower, upper=upper, **{"type.measure": loss_metric}) else: class_weights = compute_class_weight('balanced', np.unique(y), y) sample_weights = [class_weights[int(y[i])] for i in range(len(y))] if reg_type == 'logistic': fit = fit_func( x_used, np.atleast_2d(y_used), family='binomial', lower=lower, upper=upper, weights=sample_weights, **{"type.measure": loss_metric}) elif reg_type == 'multinomial': fit = fit_func( x_used, np.atleast_2d(y_used), family='multinomial', lower=lower, upper=upper, weights=sample_weights, **{"type.measure": loss_metric}) else: raise ValueError( '{} is not a supported regression type; try "linear", "logistic", or "multinomial"' ) numpy2ri.deactivate() return fit def get_values_from_glmnet_fit(fit, field): names = list(fit.names) #print names if field not in names: raise ValueError("{} not a field of glmnet fit object".format(field)) return np.array(fit[names.index(field)]) def glmnet_rank(x, y, reg_type='linear', lower=None, upper=None): """ Returns indices of variables according to the order in which they appear in the model (most relevant first). There can be ties; it doesn't attempt to settle that, but returns them in arbitrary order It may never include some variables. I was hoping it would include them all at some regularization, but if they simply are not relevant, they don't get included... :param x: :param y: :param reg_type: :param positive: :return: """ fit = glmnet_fit( x, y, reg_type=reg_type, lower=lower, upper=upper, cv=False) r = ro.r dfs = get_values_from_glmnet_fit(fit, 'df') lambdas = get_values_from_glmnet_fit(fit, 'lambda') # why isn't it one var out per lambda? i thought that's what the path is about... df_change_idxs = [] cum_nonzero_idxs = [] for i in range(1, len(dfs)): if dfs[i] > dfs[i - 1]: df_change_idxs.append(i) for idx in df_change_idxs: coeffs = np.array(r['as.matrix'](stats.coef(fit, s=lambdas[idx]))) coeffs[cum_nonzero_idxs] = 0 nonzero_idxs = np.nonzero(coeffs)[0] #print nonzero_idxs cum_nonzero_idxs.extend(nonzero_idxs) if 0 in cum_nonzero_idxs: cum_nonzero_idxs.remove(0) # intercept return list(np.array(cum_nonzero_idxs) - 1)
import numbers import numpy as np import pandas as pd import rpy2.robjects as ro from rpy2.robjects import numpy2ri from rpy2.robjects.packages import importr from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin from sklearn.utils import compute_class_weight glmnet = importr("glmnet") base = importr("base") dollar = base.__dict__["$"] stats = importr('stats') """ class GLMNet(BaseEstimator, RegressorMixin): # Todo: flesh out this class for non-CV fitting def __init__(self, alpha=0, l1_ratio=0.5): self.coef_ = None self.alpha = alpha self.l1_ratio = l1_ratio def fit(self, x, y, upper=None, lower=None): pass def predict(self, x): pass """ class GLMNetCV(BaseEstimator, RegressorMixin, ClassifierMixin): def __init__(self, lower=None, upper=None, loss_metric='mse'): self.coef_ = None self.model = None self.lower = lower self.upper = upper self.loss_metric = loss_metric def fit(self, x, y): self.model = glmnet_fit( np.array(x), np.array(y), self.reg_type, self.lower, self.upper, cv=True, loss_metric=self.loss_metric) self.lambda_min = np.array(dollar(self.model, 'lambda.min'))[0] self.coef_ = get_coeffs(self.model, lmda=self.lambda_min) cv_losses = get_values_from_glmnet_fit(self.model, 'cvm') self.loss = np.min(cv_losses) # Todo: some valid loss_metric arguments such as 'auc' are not losses, # and should be maximized, not minimized class GLMNetLinearRegressionCV(GLMNetCV): def __init__(self, lower=None, upper=None, loss_metric='mse'): super().__init__(lower=lower, upper=upper, loss_metric=loss_metric) self.reg_type = 'linear' def transform(self, x): return np.array(ro.r['predict']( self.model, newx=np.array(x), s=self.lambda_min)).T[0] class GLMNetLogisticRegressionCV(GLMNetCV): def __init__(self, lower=None, upper=None, loss_metric='mse'): super().__init__(lower=lower, upper=upper, loss_metric=loss_metric) self.reg_type = 'logistic' def predict(self, x): return np.array( list( map(int, ro.r['predict'](self.model, newx=np.array(x), s=self.lambda_min, type='class')))) def predict_proba(self, x): return np.array(ro.r['predict']( self.model, newx=np.array( x), s=self.lambda_min, type='response')).T[0] def get_coeffs(cvfit, lmda='min'): if not isinstance(lmda, numbers.Number): if isinstance(lmda, str): if lmda not in ['min', '1se']: raise ValueError( "{} not an accepted lmda; try 'min', '1se', or a number") else: lmda = get_values_from_glmnet_fit(cvfit, 'lambda.{}'.format(lmda))[0] else: raise ValueError("lmda must be a string or number") r = ro.r coeffs = np.array(r['as.matrix'](stats.coef(cvfit, s=lmda))) return coeffs[1:].T[0] def glmnet_cv(x, y, reg_type='linear', lower=None, upper=None): cvfit = glmnet_fit( x, y, reg_type=reg_type, lower=lower, upper=upper, cv=True) coeffs = get_coeffs(cvfit) return coeffs def glmnet_fit(x, y, reg_type='linear', lower=None, upper=None, cv=False, loss_metric='mse'): # Todo: better options for sample or class weighting fit_func = glmnet.cv_glmnet if cv else glmnet.glmnet lower = float('-inf') if lower is None else lower upper = float('inf') if upper is None else upper x_used = x.values if isinstance(x, pd.DataFrame) else x y_used = y.values if isinstance(y, pd.DataFrame) or isinstance( y, pd.Series) else y numpy2ri.activate() if reg_type == 'linear': fit = fit_func( x_used, np.atleast_2d(y_used), lower=lower, upper=upper, **{"type.measure": loss_metric}) else: class_weights = compute_class_weight('balanced', np.unique(y), y) sample_weights = [class_weights[int(y[i])] for i in range(len(y))] if reg_type == 'logistic': fit = fit_func( x_used, np.atleast_2d(y_used), family='binomial', lower=lower, upper=upper, weights=sample_weights, **{"type.measure": loss_metric}) elif reg_type == 'multinomial': fit = fit_func( x_used, np.atleast_2d(y_used), family='multinomial', lower=lower, upper=upper, weights=sample_weights, **{"type.measure": loss_metric}) else: raise ValueError( '{} is not a supported regression type; try "linear", "logistic", or "multinomial"' ) numpy2ri.deactivate() return fit def get_values_from_glmnet_fit(fit, field): names = list(fit.names) #print names if field not in names: raise ValueError("{} not a field of glmnet fit object".format(field)) return np.array(fit[names.index(field)]) def glmnet_rank(x, y, reg_type='linear', lower=None, upper=None): """ Returns indices of variables according to the order in which they appear in the model (most relevant first). There can be ties; it doesn't attempt to settle that, but returns them in arbitrary order It may never include some variables. I was hoping it would include them all at some regularization, but if they simply are not relevant, they don't get included... :param x: :param y: :param reg_type: :param positive: :return: """ fit = glmnet_fit( x, y, reg_type=reg_type, lower=lower, upper=upper, cv=False) r = ro.r dfs = get_values_from_glmnet_fit(fit, 'df') lambdas = get_values_from_glmnet_fit(fit, 'lambda') # why isn't it one var out per lambda? i thought that's what the path is about... df_change_idxs = [] cum_nonzero_idxs = [] for i in range(1, len(dfs)): if dfs[i] > dfs[i - 1]: df_change_idxs.append(i) for idx in df_change_idxs: coeffs = np.array(r['as.matrix'](stats.coef(fit, s=lambdas[idx]))) coeffs[cum_nonzero_idxs] = 0 nonzero_idxs = np.nonzero(coeffs)[0] #print nonzero_idxs cum_nonzero_idxs.extend(nonzero_idxs) if 0 in cum_nonzero_idxs: cum_nonzero_idxs.remove(0) # intercept return list(np.array(cum_nonzero_idxs) - 1)
en
0.895667
class GLMNet(BaseEstimator, RegressorMixin): # Todo: flesh out this class for non-CV fitting def __init__(self, alpha=0, l1_ratio=0.5): self.coef_ = None self.alpha = alpha self.l1_ratio = l1_ratio def fit(self, x, y, upper=None, lower=None): pass def predict(self, x): pass # Todo: some valid loss_metric arguments such as 'auc' are not losses, # and should be maximized, not minimized # Todo: better options for sample or class weighting #print names Returns indices of variables according to the order in which they appear in the model (most relevant first). There can be ties; it doesn't attempt to settle that, but returns them in arbitrary order It may never include some variables. I was hoping it would include them all at some regularization, but if they simply are not relevant, they don't get included... :param x: :param y: :param reg_type: :param positive: :return: # why isn't it one var out per lambda? i thought that's what the path is about... #print nonzero_idxs # intercept
2.434759
2
server/ema/views.py
eIGato/mahjong-portal
10
6624555
import datetime from django.shortcuts import render from rating.models import Rating, RatingResult from rating.utils import get_latest_rating_date def best_countries(request): rating = Rating.objects.get(type=Rating.EMA) today, rating_date = get_latest_rating_date(rating) ema_ratings = RatingResult.objects.filter(rating=rating, date=rating_date).prefetch_related( "player", "rating", "player__country" ) countries = _get_countries_data(ema_ratings) return render(request, "ema/best_countries.html", {"countries": countries}) def ema_quotas(request): """ More details: http://mahjong-europe.org/ranking/BasicsQuotas.html """ total_seats = 70 scores_required = 700 # European and World champions available_seats = total_seats - 2 ema_ratings = RatingResult.objects.filter(rating__type=Rating.EMA, date=datetime.date(2020, 1, 1)).prefetch_related( "player", "rating", "player__country" ) countries_data = _get_countries_data(ema_ratings) for data in countries_data: data["b_quota"] = 0 data["country_players"] = len(data["players_rating"]) data["country_required_rating_players"] = len( [x for x in data["players_rating"] if x["score"] >= scores_required] ) quotas = {} # First, seats will be given to all countries in descending order of ranking countries_data = sorted(countries_data, key=lambda x: x["country_rating"], reverse=True) for rank, data in enumerate(countries_data): country = data["country"] available_seats -= 1 quotas[country.code] = { "country": country, "rank": rank + 1, "base_seat": 1, "scores_700_seat": 0, "top_3_seats": 0, "b_part_seats": 0, } # After this, seats will be given to all countries with a player with >700 points, in # descending order of country ranking (part A3) for data in countries_data: country = data["country"] if data["country_required_rating_players"] == 0: continue available_seats -= 1 quotas[country.code]["scores_700_seat"] = 1 # Then seats will be given to the top 3 ranked countries in the EMA, in descending # order of country ranking (part A2) top_countries = 3 for data in countries_data[:top_countries]: country = data["country"] available_seats -= 1 quotas[country.code]["top_3_seats"] = 1 # Finally, any leftover seats will be distributed using part B of the quota formula. total_players = sum([data["country_players"] for data in countries_data]) total_required_rating_players = sum([data["country_required_rating_players"] for data in countries_data]) n = 0 while n < available_seats: n += 1 for data in countries_data: country_players = data["country_players"] country_required_rating_players = data["country_required_rating_players"] b1 = country_players / total_players b2 = country_required_rating_players / total_required_rating_players b3 = (b1 + b2) / 2 data["b_coefficient"] = b3 * n # Increase the B-quota of the country with the largest B3*N that is also smaller than its # current B-quota by 1 countries_data = sorted(countries_data, key=lambda x: x["b_coefficient"] - x["b_quota"], reverse=True) for data in countries_data: if data["b_quota"] <= data["b_coefficient"]: data["b_quota"] += 1 break total_quotas = 0 countries_data = sorted(countries_data, key=lambda x: x["country_rating"]) for data in countries_data: country = data["country"] quotas[country.code]["b_part_seats"] = data["b_quota"] quotas[country.code]["seats"] = sum( [ quotas[country.code]["scores_700_seat"], quotas[country.code]["base_seat"], quotas[country.code]["top_3_seats"], quotas[country.code]["b_part_seats"], ] ) total_quotas += quotas[country.code]["seats"] quotas = sorted(quotas.values(), key=lambda x: x["rank"]) return render(request, "ema/quotas.html", {"quotas": quotas, "total_quotas": total_quotas}) def _get_countries_data(ema_ratings): countries_temp = {} for rating in ema_ratings: country_code = rating.player.country.code if country_code not in countries_temp: countries_temp[country_code] = {"country": rating.player.country, "players_rating": []} countries_temp[country_code]["players_rating"].append({"player": rating.player, "score": rating.score}) countries = [] for data in countries_temp.values(): best_3 = sorted(data["players_rating"], key=lambda x: x["score"], reverse=True)[:3] countries.append( { "country": data["country"], "players_rating": data["players_rating"], "number_of_players": len(data["players_rating"]), "country_rating": sum([x["score"] for x in best_3]) / 3, "best_3": best_3, } ) countries = sorted(countries, key=lambda x: x["country_rating"], reverse=True) return countries
import datetime from django.shortcuts import render from rating.models import Rating, RatingResult from rating.utils import get_latest_rating_date def best_countries(request): rating = Rating.objects.get(type=Rating.EMA) today, rating_date = get_latest_rating_date(rating) ema_ratings = RatingResult.objects.filter(rating=rating, date=rating_date).prefetch_related( "player", "rating", "player__country" ) countries = _get_countries_data(ema_ratings) return render(request, "ema/best_countries.html", {"countries": countries}) def ema_quotas(request): """ More details: http://mahjong-europe.org/ranking/BasicsQuotas.html """ total_seats = 70 scores_required = 700 # European and World champions available_seats = total_seats - 2 ema_ratings = RatingResult.objects.filter(rating__type=Rating.EMA, date=datetime.date(2020, 1, 1)).prefetch_related( "player", "rating", "player__country" ) countries_data = _get_countries_data(ema_ratings) for data in countries_data: data["b_quota"] = 0 data["country_players"] = len(data["players_rating"]) data["country_required_rating_players"] = len( [x for x in data["players_rating"] if x["score"] >= scores_required] ) quotas = {} # First, seats will be given to all countries in descending order of ranking countries_data = sorted(countries_data, key=lambda x: x["country_rating"], reverse=True) for rank, data in enumerate(countries_data): country = data["country"] available_seats -= 1 quotas[country.code] = { "country": country, "rank": rank + 1, "base_seat": 1, "scores_700_seat": 0, "top_3_seats": 0, "b_part_seats": 0, } # After this, seats will be given to all countries with a player with >700 points, in # descending order of country ranking (part A3) for data in countries_data: country = data["country"] if data["country_required_rating_players"] == 0: continue available_seats -= 1 quotas[country.code]["scores_700_seat"] = 1 # Then seats will be given to the top 3 ranked countries in the EMA, in descending # order of country ranking (part A2) top_countries = 3 for data in countries_data[:top_countries]: country = data["country"] available_seats -= 1 quotas[country.code]["top_3_seats"] = 1 # Finally, any leftover seats will be distributed using part B of the quota formula. total_players = sum([data["country_players"] for data in countries_data]) total_required_rating_players = sum([data["country_required_rating_players"] for data in countries_data]) n = 0 while n < available_seats: n += 1 for data in countries_data: country_players = data["country_players"] country_required_rating_players = data["country_required_rating_players"] b1 = country_players / total_players b2 = country_required_rating_players / total_required_rating_players b3 = (b1 + b2) / 2 data["b_coefficient"] = b3 * n # Increase the B-quota of the country with the largest B3*N that is also smaller than its # current B-quota by 1 countries_data = sorted(countries_data, key=lambda x: x["b_coefficient"] - x["b_quota"], reverse=True) for data in countries_data: if data["b_quota"] <= data["b_coefficient"]: data["b_quota"] += 1 break total_quotas = 0 countries_data = sorted(countries_data, key=lambda x: x["country_rating"]) for data in countries_data: country = data["country"] quotas[country.code]["b_part_seats"] = data["b_quota"] quotas[country.code]["seats"] = sum( [ quotas[country.code]["scores_700_seat"], quotas[country.code]["base_seat"], quotas[country.code]["top_3_seats"], quotas[country.code]["b_part_seats"], ] ) total_quotas += quotas[country.code]["seats"] quotas = sorted(quotas.values(), key=lambda x: x["rank"]) return render(request, "ema/quotas.html", {"quotas": quotas, "total_quotas": total_quotas}) def _get_countries_data(ema_ratings): countries_temp = {} for rating in ema_ratings: country_code = rating.player.country.code if country_code not in countries_temp: countries_temp[country_code] = {"country": rating.player.country, "players_rating": []} countries_temp[country_code]["players_rating"].append({"player": rating.player, "score": rating.score}) countries = [] for data in countries_temp.values(): best_3 = sorted(data["players_rating"], key=lambda x: x["score"], reverse=True)[:3] countries.append( { "country": data["country"], "players_rating": data["players_rating"], "number_of_players": len(data["players_rating"]), "country_rating": sum([x["score"] for x in best_3]) / 3, "best_3": best_3, } ) countries = sorted(countries, key=lambda x: x["country_rating"], reverse=True) return countries
en
0.888914
More details: http://mahjong-europe.org/ranking/BasicsQuotas.html # European and World champions # First, seats will be given to all countries in descending order of ranking # After this, seats will be given to all countries with a player with >700 points, in # descending order of country ranking (part A3) # Then seats will be given to the top 3 ranked countries in the EMA, in descending # order of country ranking (part A2) # Finally, any leftover seats will be distributed using part B of the quota formula. # Increase the B-quota of the country with the largest B3*N that is also smaller than its # current B-quota by 1
2.331207
2
Exemplos/gerador_aleatorio.py
RCSM/workshop-cawen-2019
0
6624556
import random nums = [] for _ in range(21): nums.append(random.randint(0, 999)) print(nums)
import random nums = [] for _ in range(21): nums.append(random.randint(0, 999)) print(nums)
none
1
3.151911
3
tencent/callback_url.py
wangjinyu124419/long_audio_asr
0
6624557
import time import logging import logging.handlers import json import tornado.ioloop import tornado.web import tornado.httpserver from tornado.options import define,options define("port", default=8322, type=int, help="run server on the given port.") # 定义服务器监听端口选项 def setup_logging(level=logging.DEBUG, stream=True, stream_level=logging.DEBUG, *, save_file=True, filename=''): formatter = logging.Formatter( '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(name)s:%(lineno)d] %(message)s', datefmt='%y%m%d %H:%M:%S', ) logging.getLogger().setLevel(level) if save_file: rotating_file_handler = logging.handlers.RotatingFileHandler( 'server.log' if not filename else filename, mode='a', maxBytes=100*1024*1024, backupCount=10, encoding='utf-8', ) rotating_file_handler.setFormatter(formatter) logging.getLogger().addHandler(rotating_file_handler) if stream: stream_handler = logging.StreamHandler() stream_handler.setFormatter(formatter) stream_handler.setLevel(stream_level) logging.getLogger().addHandler(stream_handler) setup_logging(filename='server.log') logger=logging.getLogger(__name__) class MainHandler(tornado.web.RequestHandler): #定义tornado的post方法 def post(self, *args, **kwargs): callback = {} try: response = self.request.body res_dict=json.loads(response.decode('utf-8')) text=res_dict['data']['text'] except Exception as err: logger.debug(type(err)) logger.exception('get_result_fail') logging.debug(type(err.__repr__())) # logger.debug('get_result_fail %r',err) callback['ret'] = -1 callback['msg'] = err.__repr__() else: logger.debug('res_dict:%r'%res_dict) fp=open('tencent.txt','a') fp.write(text+'\n') callback['ret']=0 callback['msg']='ok' self.write(callback) def make_app(): return tornado.web.Application( [(r"/", MainHandler), ], debug=True) if __name__ == "__main__": app = make_app() http_server = tornado.httpserver.HTTPServer(app) http_server.bind(options.port) logger.debug('监听端口:%d'%options.port) http_server.start(1) tornado.ioloop.IOLoop.current().start()
import time import logging import logging.handlers import json import tornado.ioloop import tornado.web import tornado.httpserver from tornado.options import define,options define("port", default=8322, type=int, help="run server on the given port.") # 定义服务器监听端口选项 def setup_logging(level=logging.DEBUG, stream=True, stream_level=logging.DEBUG, *, save_file=True, filename=''): formatter = logging.Formatter( '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(name)s:%(lineno)d] %(message)s', datefmt='%y%m%d %H:%M:%S', ) logging.getLogger().setLevel(level) if save_file: rotating_file_handler = logging.handlers.RotatingFileHandler( 'server.log' if not filename else filename, mode='a', maxBytes=100*1024*1024, backupCount=10, encoding='utf-8', ) rotating_file_handler.setFormatter(formatter) logging.getLogger().addHandler(rotating_file_handler) if stream: stream_handler = logging.StreamHandler() stream_handler.setFormatter(formatter) stream_handler.setLevel(stream_level) logging.getLogger().addHandler(stream_handler) setup_logging(filename='server.log') logger=logging.getLogger(__name__) class MainHandler(tornado.web.RequestHandler): #定义tornado的post方法 def post(self, *args, **kwargs): callback = {} try: response = self.request.body res_dict=json.loads(response.decode('utf-8')) text=res_dict['data']['text'] except Exception as err: logger.debug(type(err)) logger.exception('get_result_fail') logging.debug(type(err.__repr__())) # logger.debug('get_result_fail %r',err) callback['ret'] = -1 callback['msg'] = err.__repr__() else: logger.debug('res_dict:%r'%res_dict) fp=open('tencent.txt','a') fp.write(text+'\n') callback['ret']=0 callback['msg']='ok' self.write(callback) def make_app(): return tornado.web.Application( [(r"/", MainHandler), ], debug=True) if __name__ == "__main__": app = make_app() http_server = tornado.httpserver.HTTPServer(app) http_server.bind(options.port) logger.debug('监听端口:%d'%options.port) http_server.start(1) tornado.ioloop.IOLoop.current().start()
zh
0.650421
# 定义服务器监听端口选项 #定义tornado的post方法 # logger.debug('get_result_fail %r',err)
2.451422
2
render.py
ziotom78/pytracer
2
6624558
# -*- encoding: utf-8 -*- # # The MIT License (MIT) # # Copyright © 2021 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of # the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT # LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF # CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from colors import Color, WHITE, BLACK from geometry import normalized_dot from pcg import PCG from ray import Ray from world import World class Renderer: """A class implementing a solver of the rendering equation. This is an abstract class; you should use a derived concrete class.""" def __init__(self, world: World, background_color: Color = BLACK): self.world = world self.background_color = background_color def __call__(self, ray: Ray) -> Color: """Estimate the radiance along a ray""" raise NotImplementedError("Unable to call Renderer.radiance, it is an abstract method") class OnOffRenderer(Renderer): """A on/off renderer This renderer is mostly useful for debugging purposes, as it is really fast, but it produces boring images.""" def __init__(self, world: World, background_color: Color = BLACK, color=WHITE): super().__init__(world, background_color) self.world = world self.color = color def __call__(self, ray: Ray) -> Color: return self.color if self.world.ray_intersection(ray) else self.background_color class FlatRenderer(Renderer): """A «flat» renderer This renderer estimates the solution of the rendering equation by neglecting any contribution of the light. It just uses the pigment of each surface to determine how to compute the final radiance.""" def __init__(self, world: World, background_color: Color = BLACK): super().__init__(world, background_color) def __call__(self, ray: Ray) -> Color: hit = self.world.ray_intersection(ray) if not hit: return self.background_color material = hit.material return (material.brdf.pigment.get_color(hit.surface_point) + material.emitted_radiance.get_color(hit.surface_point)) class PathTracer(Renderer): """A simple path-tracing renderer The algorithm implemented here allows the caller to tune number of rays thrown at each iteration, as well as the maximum depth. It implements Russian roulette, so in principle it will take a finite time to complete the calculation even if you set max_depth to `math.inf`. """ def __init__(self, world: World, background_color: Color = BLACK, pcg: PCG = PCG(), num_of_rays: int = 10, max_depth: int = 2, russian_roulette_limit=3): super().__init__(world, background_color) self.pcg = pcg self.num_of_rays = num_of_rays self.max_depth = max_depth self.russian_roulette_limit = russian_roulette_limit def __call__(self, ray: Ray) -> Color: if ray.depth > self.max_depth: return Color(0.0, 0.0, 0.0) hit_record = self.world.ray_intersection(ray) if not hit_record: return self.background_color hit_material = hit_record.material hit_color = hit_material.brdf.pigment.get_color(hit_record.surface_point) emitted_radiance = hit_material.emitted_radiance.get_color(hit_record.surface_point) hit_color_lum = max(hit_color.r, hit_color.g, hit_color.b) # Russian roulette if ray.depth >= self.russian_roulette_limit: q = max(0.05, 1 - hit_color_lum) if self.pcg.random_float() > q: # Keep the recursion going, but compensate for other potentially discarded rays hit_color *= 1.0 / (1.0 - q) else: # Terminate prematurely return emitted_radiance cum_radiance = Color(0.0, 0.0, 0.0) if hit_color_lum > 0.0: # Only do costly recursions if it's worth it for ray_index in range(self.num_of_rays): new_ray = hit_material.brdf.scatter_ray( pcg=self.pcg, incoming_dir=hit_record.ray.dir, interaction_point=hit_record.world_point, normal=hit_record.normal, depth=ray.depth + 1, ) # Recursive call new_radiance = self(new_ray) cum_radiance += hit_color * new_radiance return emitted_radiance + cum_radiance * (1.0 / self.num_of_rays) class PointLightRenderer(Renderer): """A simple point-light renderer This renderer is similar to what POV-Ray provides by default. """ def __init__(self, world: World, background_color: Color = BLACK, ambient_color: Color = Color(0.1, 0.1, 0.1)): super().__init__(world, background_color) self.ambient_color = ambient_color def __call__(self, ray: Ray) -> Color: hit_record = self.world.ray_intersection(ray) if not hit_record: return self.background_color hit_material = hit_record.material result_color = self.ambient_color for cur_light in self.world.point_lights: if self.world.is_point_visible(point = cur_light.position, observer_pos=hit_record.world_point): distance_vec = hit_record.world_point - cur_light.position distance = distance_vec.norm() in_dir = distance_vec * (1.0 / distance) cos_theta = max(0.0, normalized_dot(-ray.dir, hit_record.normal)) distance_factor = (cur_light.linear_radius / distance)**2 if (cur_light.linear_radius > 0) else 1.0 emitted_color = hit_material.emitted_radiance.get_color(hit_record.surface_point) brdf_color = hit_material.brdf.eval( normal=hit_record.normal, in_dir=in_dir, out_dir=-ray.dir, uv=hit_record.surface_point, ) result_color += (emitted_color + brdf_color) * cur_light.color * cos_theta * distance_factor return result_color
# -*- encoding: utf-8 -*- # # The MIT License (MIT) # # Copyright © 2021 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of # the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT # LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF # CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from colors import Color, WHITE, BLACK from geometry import normalized_dot from pcg import PCG from ray import Ray from world import World class Renderer: """A class implementing a solver of the rendering equation. This is an abstract class; you should use a derived concrete class.""" def __init__(self, world: World, background_color: Color = BLACK): self.world = world self.background_color = background_color def __call__(self, ray: Ray) -> Color: """Estimate the radiance along a ray""" raise NotImplementedError("Unable to call Renderer.radiance, it is an abstract method") class OnOffRenderer(Renderer): """A on/off renderer This renderer is mostly useful for debugging purposes, as it is really fast, but it produces boring images.""" def __init__(self, world: World, background_color: Color = BLACK, color=WHITE): super().__init__(world, background_color) self.world = world self.color = color def __call__(self, ray: Ray) -> Color: return self.color if self.world.ray_intersection(ray) else self.background_color class FlatRenderer(Renderer): """A «flat» renderer This renderer estimates the solution of the rendering equation by neglecting any contribution of the light. It just uses the pigment of each surface to determine how to compute the final radiance.""" def __init__(self, world: World, background_color: Color = BLACK): super().__init__(world, background_color) def __call__(self, ray: Ray) -> Color: hit = self.world.ray_intersection(ray) if not hit: return self.background_color material = hit.material return (material.brdf.pigment.get_color(hit.surface_point) + material.emitted_radiance.get_color(hit.surface_point)) class PathTracer(Renderer): """A simple path-tracing renderer The algorithm implemented here allows the caller to tune number of rays thrown at each iteration, as well as the maximum depth. It implements Russian roulette, so in principle it will take a finite time to complete the calculation even if you set max_depth to `math.inf`. """ def __init__(self, world: World, background_color: Color = BLACK, pcg: PCG = PCG(), num_of_rays: int = 10, max_depth: int = 2, russian_roulette_limit=3): super().__init__(world, background_color) self.pcg = pcg self.num_of_rays = num_of_rays self.max_depth = max_depth self.russian_roulette_limit = russian_roulette_limit def __call__(self, ray: Ray) -> Color: if ray.depth > self.max_depth: return Color(0.0, 0.0, 0.0) hit_record = self.world.ray_intersection(ray) if not hit_record: return self.background_color hit_material = hit_record.material hit_color = hit_material.brdf.pigment.get_color(hit_record.surface_point) emitted_radiance = hit_material.emitted_radiance.get_color(hit_record.surface_point) hit_color_lum = max(hit_color.r, hit_color.g, hit_color.b) # Russian roulette if ray.depth >= self.russian_roulette_limit: q = max(0.05, 1 - hit_color_lum) if self.pcg.random_float() > q: # Keep the recursion going, but compensate for other potentially discarded rays hit_color *= 1.0 / (1.0 - q) else: # Terminate prematurely return emitted_radiance cum_radiance = Color(0.0, 0.0, 0.0) if hit_color_lum > 0.0: # Only do costly recursions if it's worth it for ray_index in range(self.num_of_rays): new_ray = hit_material.brdf.scatter_ray( pcg=self.pcg, incoming_dir=hit_record.ray.dir, interaction_point=hit_record.world_point, normal=hit_record.normal, depth=ray.depth + 1, ) # Recursive call new_radiance = self(new_ray) cum_radiance += hit_color * new_radiance return emitted_radiance + cum_radiance * (1.0 / self.num_of_rays) class PointLightRenderer(Renderer): """A simple point-light renderer This renderer is similar to what POV-Ray provides by default. """ def __init__(self, world: World, background_color: Color = BLACK, ambient_color: Color = Color(0.1, 0.1, 0.1)): super().__init__(world, background_color) self.ambient_color = ambient_color def __call__(self, ray: Ray) -> Color: hit_record = self.world.ray_intersection(ray) if not hit_record: return self.background_color hit_material = hit_record.material result_color = self.ambient_color for cur_light in self.world.point_lights: if self.world.is_point_visible(point = cur_light.position, observer_pos=hit_record.world_point): distance_vec = hit_record.world_point - cur_light.position distance = distance_vec.norm() in_dir = distance_vec * (1.0 / distance) cos_theta = max(0.0, normalized_dot(-ray.dir, hit_record.normal)) distance_factor = (cur_light.linear_radius / distance)**2 if (cur_light.linear_radius > 0) else 1.0 emitted_color = hit_material.emitted_radiance.get_color(hit_record.surface_point) brdf_color = hit_material.brdf.eval( normal=hit_record.normal, in_dir=in_dir, out_dir=-ray.dir, uv=hit_record.surface_point, ) result_color += (emitted_color + brdf_color) * cur_light.color * cos_theta * distance_factor return result_color
en
0.824302
# -*- encoding: utf-8 -*- # # The MIT License (MIT) # # Copyright © 2021 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of # the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT # LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF # CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. A class implementing a solver of the rendering equation. This is an abstract class; you should use a derived concrete class. Estimate the radiance along a ray A on/off renderer This renderer is mostly useful for debugging purposes, as it is really fast, but it produces boring images. A «flat» renderer This renderer estimates the solution of the rendering equation by neglecting any contribution of the light. It just uses the pigment of each surface to determine how to compute the final radiance. A simple path-tracing renderer The algorithm implemented here allows the caller to tune number of rays thrown at each iteration, as well as the maximum depth. It implements Russian roulette, so in principle it will take a finite time to complete the calculation even if you set max_depth to `math.inf`. # Russian roulette # Keep the recursion going, but compensate for other potentially discarded rays # Terminate prematurely # Only do costly recursions if it's worth it # Recursive call A simple point-light renderer This renderer is similar to what POV-Ray provides by default.
2.025115
2
tests/parsers/plist_plugins/appleaccount.py
bodik/plaso
0
6624559
<reponame>bodik/plaso<filename>tests/parsers/plist_plugins/appleaccount.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the Apple account plist plugin.""" from __future__ import unicode_literals import unittest from plaso.formatters import plist # pylint: disable=unused-import from plaso.parsers.plist_plugins import appleaccount from tests.parsers.plist_plugins import test_lib class AppleAccountPluginTest(test_lib.PlistPluginTestCase): """Tests for the Apple account plist plugin.""" def testProcess(self): """Tests the Process function.""" plist_file = ( 'com.apple.coreservices.appleidauthenticationinfo.' 'ABC0ABC1-ABC0-ABC0-ABC0-ABC0ABC1ABC2.plist') plist_name = plist_file plugin = appleaccount.AppleAccountPlugin() storage_writer = self._ParsePlistFileWithPlugin( plugin, [plist_name], plist_name) self.assertEqual(storage_writer.number_of_warnings, 0) self.assertEqual(storage_writer.number_of_events, 3) # The order in which PlistParser generates events is nondeterministic # hence we sort the events. events = list(storage_writer.GetSortedEvents()) expected_timestamps = [1372106802000000, 1387980032000000, 1387980032000000] timestamps = sorted([event.timestamp for event in events]) self.assertEqual(timestamps, expected_timestamps) event = events[0] event_data = self._GetEventDataOfEvent(storage_writer, event) self.assertEqual(event_data.root, '/Accounts') self.assertEqual(event_data.key, '<EMAIL>') expected_description = ( 'Configured Apple account <EMAIL> (<NAME>)') self.assertEqual(event_data.desc, expected_description) expected_message = '/Accounts/<EMAIL> {0:s}'.format( expected_description) expected_short_message = '{0:s}...'.format(expected_message[:77]) self._TestGetMessageStrings(event, expected_message, expected_short_message) event = events[1] event_data = self._GetEventDataOfEvent(storage_writer, event) expected_description = ( 'Connected Apple account ' '<EMAIL> (<NAME>)') self.assertEqual(event_data.desc, expected_description) event = events[2] event_data = self._GetEventDataOfEvent(storage_writer, event) expected_description = ( 'Last validation Apple account ' '<EMAIL> (<NAME>)') self.assertEqual(event_data.desc, expected_description) if __name__ == '__main__': unittest.main()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the Apple account plist plugin.""" from __future__ import unicode_literals import unittest from plaso.formatters import plist # pylint: disable=unused-import from plaso.parsers.plist_plugins import appleaccount from tests.parsers.plist_plugins import test_lib class AppleAccountPluginTest(test_lib.PlistPluginTestCase): """Tests for the Apple account plist plugin.""" def testProcess(self): """Tests the Process function.""" plist_file = ( 'com.apple.coreservices.appleidauthenticationinfo.' 'ABC0ABC1-ABC0-ABC0-ABC0-ABC0ABC1ABC2.plist') plist_name = plist_file plugin = appleaccount.AppleAccountPlugin() storage_writer = self._ParsePlistFileWithPlugin( plugin, [plist_name], plist_name) self.assertEqual(storage_writer.number_of_warnings, 0) self.assertEqual(storage_writer.number_of_events, 3) # The order in which PlistParser generates events is nondeterministic # hence we sort the events. events = list(storage_writer.GetSortedEvents()) expected_timestamps = [1372106802000000, 1387980032000000, 1387980032000000] timestamps = sorted([event.timestamp for event in events]) self.assertEqual(timestamps, expected_timestamps) event = events[0] event_data = self._GetEventDataOfEvent(storage_writer, event) self.assertEqual(event_data.root, '/Accounts') self.assertEqual(event_data.key, '<EMAIL>') expected_description = ( 'Configured Apple account <EMAIL> (<NAME>)') self.assertEqual(event_data.desc, expected_description) expected_message = '/Accounts/<EMAIL> {0:s}'.format( expected_description) expected_short_message = '{0:s}...'.format(expected_message[:77]) self._TestGetMessageStrings(event, expected_message, expected_short_message) event = events[1] event_data = self._GetEventDataOfEvent(storage_writer, event) expected_description = ( 'Connected Apple account ' '<EMAIL> (<NAME>)') self.assertEqual(event_data.desc, expected_description) event = events[2] event_data = self._GetEventDataOfEvent(storage_writer, event) expected_description = ( 'Last validation Apple account ' '<EMAIL> (<NAME>)') self.assertEqual(event_data.desc, expected_description) if __name__ == '__main__': unittest.main()
en
0.725567
#!/usr/bin/env python3 # -*- coding: utf-8 -*- Tests for the Apple account plist plugin. # pylint: disable=unused-import Tests for the Apple account plist plugin. Tests the Process function. # The order in which PlistParser generates events is nondeterministic # hence we sort the events.
2.291354
2
martools/marttools.py
sourcery-ai-bot/predacogs
0
6624560
import asyncio import time import logging from collections import Counter from datetime import datetime from typing import Union import apsw import discord import lavalink from redbot.cogs.audio.audio_dataclasses import Query from redbot.core import Config, bank, commands from redbot.core.bot import Red from redbot.core.cog_manager import cog_data_path from redbot.core.i18n import Translator, cog_i18n from redbot.core.utils.chat_formatting import bold, box, humanize_number, humanize_timedelta from .listeners import Listeners from .statements import ( CREATE_TABLE, CREATE_VERSION_TABLE, DROP_OLD_PERMA, DROP_OLD_TEMP, GET_EVENT_VALUE, INSERT_DO_NOTHING, SELECT_OLD, UPSERT, PRAGMA_journal_mode, PRAGMA_wal_autocheckpoint, ) from .utils import events_names, threadexec log = logging.getLogger("red.predacogs.martools") _ = Translator("MartTools", __file__) @cog_i18n(_) class MartTools(Listeners, commands.Cog): """Multiple tools that are originally used on Martine.""" __author__ = ["Predä", "Draper"] __version__ = "2.0.0" async def red_delete_data_for_user(self, **kwargs): """Nothing to delete.""" return def __init__(self, bot: Red): self.bot = bot self._connection = apsw.Connection(str(cog_data_path(self) / "MartTools.db")) self.cursor = self._connection.cursor() self.cache = {"perma": Counter(), "session": Counter()} self.uptime = datetime.utcnow() self.init_task = self.bot.loop.create_task(self.initialize()) self.dump_cache_task = self.bot.loop.create_task(self.__dump_cache_to_db()) def cog_unload(self): self.dump_cache_task.cancel() if self.init_task: self.init_task.cancel() for event_name, value in self.cache["perma"].items(): threadexec(self.cursor.execute, UPSERT, (event_name, value)) self._connection.close() del self.cache def format_help_for_context(self, ctx: commands.Context) -> str: """Thanks Sinbad!""" pre_processed = super().format_help_for_context(ctx) return f"{pre_processed}\n\nAuthors: {', '.join(self.__author__)}\nCog Version: {self.__version__}" async def initialize(self): threadexec(self.cursor.execute, PRAGMA_journal_mode) threadexec(self.cursor.execute, PRAGMA_wal_autocheckpoint) threadexec(self.cursor.execute, CREATE_TABLE) threadexec(self.cursor.execute, CREATE_VERSION_TABLE) try: check_result = list(threadexec(self.cursor.execute, "SELECT * FROM bot_stats_perma")) except apsw.SQLError: await self.__populate_cache() return else: if check_result: await self.__migrate_data() threadexec(self.cursor.execute, INSERT_DO_NOTHING, ("creation_time", time.time())) await self.__populate_cache() async def __migrate_data(self): for event_name in events_names: result = list(threadexec(self.cursor.execute, SELECT_OLD, {"event": event_name})) if not result: continue old_value = result[0][0] threadexec(self.cursor.execute, UPSERT, (event_name, old_value)) old_value = list( threadexec( self.cursor.execute, SELECT_OLD, {"event": "creation_time", "guild_id": -1000} ) ) threadexec( self.cursor.execute, UPSERT, ("creation_time", old_value[0][0] if old_value else time.time()), ) threadexec(self.cursor.execute, DROP_OLD_TEMP) threadexec(self.cursor.execute, DROP_OLD_PERMA) threadexec( self.cursor.execute, ("INSERT or IGNORE INTO version (version_num) VALUES (2)"), ) async def __populate_cache(self): for event_name in events_names: result = list(threadexec(self.cursor.execute, GET_EVENT_VALUE, {"event": event_name})) if not result: continue self.cache["perma"][event_name] = result[0][0] result = list(threadexec(self.cursor.execute, GET_EVENT_VALUE, {"event": "creation_time"})) self.cache["perma"]["creation_time"] = result[0][0] if result else time.time() async def __dump_cache_to_db(self): await self.bot.wait_until_red_ready() while True: await asyncio.sleep(300) try: for event_name, value in self.cache["perma"].items(): threadexec(self.cursor.execute, UPSERT, (event_name, value)) except Exception: log.exception("Something went wrong in __dump_cache_to_db task:") def get_value(self, key: str, perma: bool = False, raw: bool = False) -> Union[int, str]: if raw: return self.cache["perma" if perma else "session"][key] return humanize_number(self.cache["perma" if perma else "session"][key]) def get_bot_uptime(self): delta = datetime.utcnow() - self.uptime return str(humanize_timedelta(timedelta=delta)) def usage_counts_cpm(self, key: str, time: int = 60): delta = datetime.utcnow() - self.uptime minutes = delta.total_seconds() / time total = self.get_value(key, raw=True) return total / minutes @commands.command() @commands.guild_only() @commands.bot_has_permissions(embed_links=True) async def bankstats(self, ctx: commands.Context): """Show stats of the bank.""" icon = self.bot.user.avatar_url_as(static_format="png") user_bal = await bank.get_balance(ctx.author) credits_name = await bank.get_currency_name(ctx.guild) pos = await bank.get_leaderboard_position(ctx.author) bank_name = await bank.get_bank_name(ctx.guild) bank_config = bank._config if await bank.is_global(): all_accounts = len(await bank_config.all_users()) accounts = await bank_config.all_users() else: all_accounts = len(await bank_config.all_members(ctx.guild)) accounts = await bank_config.all_members(ctx.guild) member_account = await bank.get_account(ctx.author) created_at = str(member_account.created_at) no = "1970-01-01 00:00:00" overall = sum(value["balance"] for key, value in accounts.items()) em = discord.Embed(color=await ctx.embed_colour()) em.set_author(name=_("{} stats:").format(bank_name), icon_url=icon) em.add_field( name=_("{} stats:").format("Global" if await bank.is_global() else "Bank"), value=_( "Total accounts: **{all_accounts}**\nTotal amount: **{overall} {credits_name}**" ).format( all_accounts=all_accounts, overall=humanize_number(overall), credits_name=credits_name, ), ) if pos is not None: percent = round((int(user_bal) / overall * 100), 3) em.add_field( name=_("Your stats:"), value=_( "You have **{bal} {currency}**.\n" "It's **{percent}%** of the {g}amount in the bank.\n" "You are **{pos}/{all_accounts}** in the {g}leaderboard." ).format( bal=humanize_number(user_bal), currency=credits_name, percent=percent, g="global " if await bank.is_global() else "", pos=humanize_number(pos), all_accounts=humanize_number(all_accounts), ), inline=False, ) if created_at != no: em.set_footer(text=_("Bank account created on: ") + str(created_at)) await ctx.send(embed=em) @commands.command(aliases=["usagec"]) async def usagecount(self, ctx: commands.Context): """ Show the usage count of the bot. Commands processed, messages received, and music on servers. """ msg = _( "**Commands processed:** `{commands_count}` commands. (`{cpm_commands:.2f}`/min)\n" "**Commands errors:** `{errors_count}` errors.\n" "**Messages received:** `{messages_read}` messages. (`{cpm_msgs:.2f}`/min)\n" "**Messages sent:** `{messages_sent}` messages. (`{cpm_msgs_sent:.2f}`/min)\n" "**Playing music on:** `{ll_players}` servers.\n" "**Tracks played:** `{tracks_played}` tracks. (`{cpm_tracks:.2f}`/min)\n\n" "**Servers joined:** `{guild_join}` servers. (`{cpm_guild_join:.2f}`/hour)\n" "**Servers left:** `{guild_leave}` servers. (`{cpm_guild_leave:.2f}`/hour)" ).format( commands_count=self.get_value("processed_commands"), cpm_commands=self.usage_counts_cpm("processed_commands"), errors_count=self.get_value("command_error"), messages_read=self.get_value("messages_read"), cpm_msgs=self.usage_counts_cpm("messages_read"), messages_sent=self.get_value("msg_sent"), cpm_msgs_sent=self.usage_counts_cpm("msg_sent"), ll_players="`{}/{}`".format( humanize_number(len(lavalink.active_players())), humanize_number(len(lavalink.all_players())), ), tracks_played=self.get_value("tracks_played"), cpm_tracks=self.usage_counts_cpm("tracks_played"), guild_join=self.get_value("guild_join"), cpm_guild_join=self.usage_counts_cpm("guild_join", 3600), guild_leave=self.get_value("guild_remove"), cpm_guild_leave=self.usage_counts_cpm("guild_remove", 3600), ) if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("Usage count of {} since last restart:").format(self.bot.user.name), description=msg, ) em.set_thumbnail(url=self.bot.user.avatar_url_as(static_format="png")) em.set_footer(text=_("Since {}").format(self.get_bot_uptime())) await ctx.send(embed=em) else: await ctx.send( _("Usage count of {} since last restart:\n").format(ctx.bot.user.name) + msg + _("\n\nSince {}").format(self.get_bot_uptime()) ) @commands.bot_has_permissions(embed_links=True) @commands.command(aliases=["advusagec"]) async def advusagecount(self, ctx: commands.Context): """ Permanent stats since first time that the cog has been loaded. """ avatar = self.bot.user.avatar_url_as(static_format="png") delta = datetime.utcnow() - datetime.utcfromtimestamp( self.get_value("creation_time", perma=True, raw=True) ) uptime = humanize_timedelta(timedelta=delta) ll_players = "{}/{}".format( humanize_number(len(lavalink.active_players())), humanize_number(len(lavalink.all_players())), ) em = discord.Embed( title=_("Usage count of {}:").format(ctx.bot.user.name), color=await ctx.embed_colour(), ) em.add_field( name=_("Message Stats"), value=box( _( "Messages Read : {messages_read}\n" "Messages Sent : {msg_sent}\n" "Messages Deleted : {messages_deleted}\n" "Messages Edited : {messages_edited}\n" "DMs Received : {dms_received}\n" ).format_map( { "messages_read": self.get_value("messages_read", perma=True), "msg_sent": self.get_value("msg_sent", perma=True), "messages_deleted": self.get_value("messages_deleted", perma=True), "messages_edited": self.get_value("messages_edited", perma=True), "dms_received": self.get_value("dms_received", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Commands Stats"), value=box( _( "Commands Processed : {processed_commands}\n" "Errors Occured : {command_error}\n" "Sessions Resumed : {sessions_resumed}\n" ).format_map( { "processed_commands": self.get_value("processed_commands", perma=True), "command_error": self.get_value("command_error", perma=True), "sessions_resumed": self.get_value("sessions_resumed", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Guild Stats"), value=box( _( "Guilds Joined : {guild_join}\n" "Guilds Left : {guild_remove}\n" ).format_map( { "guild_join": self.get_value("guild_join", perma=True), "guild_remove": self.get_value("guild_remove", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("User Stats"), value=box( _( "New Users : {new_members}\n" "Left Users : {members_left}\n" "Banned Users : {members_banned}\n" "Unbanned Users : {members_unbanned}\n" ).format_map( { "new_members": self.get_value("new_members", perma=True), "members_left": self.get_value("members_left", perma=True), "members_banned": self.get_value("members_banned", perma=True), "members_unbanned": self.get_value("members_unbanned", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Role Stats"), value=box( _( "Roles Added : {roles_added}\n" "Roles Removed : {roles_removed}\n" "Roles Updated : {roles_updated}\n" ).format_map( { "roles_added": self.get_value("roles_added", perma=True), "roles_removed": self.get_value("roles_removed", perma=True), "roles_updated": self.get_value("roles_updated", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Emoji Stats"), value=box( _( "Reacts Added : {reactions_added}\n" "Reacts Removed : {reactions_removed}\n" "Emoji Added : {emojis_added}\n" "Emoji Removed : {emojis_removed}\n" "Emoji Updated : {emojis_updated}\n" ).format_map( { "reactions_added": self.get_value("reactions_added", perma=True), "reactions_removed": self.get_value("reactions_removed", perma=True), "emojis_added": self.get_value("emojis_added", perma=True), "emojis_removed": self.get_value("emojis_removed", perma=True), "emojis_updated": self.get_value("emojis_updated", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Audio Stats"), value=box( _( "Users Who Joined VC : {users_joined_bot_music_room}\n" "Tracks Played : {tracks_played}\n" "Number Of Players : {ll_players}" ).format( users_joined_bot_music_room=self.get_value( "users_joined_bot_music_room", perma=True ), tracks_played=self.get_value("tracks_played", perma=True), ll_players=ll_players, ), lang="prolog", ), inline=False, ) if Query: em.add_field( name=_("Track Stats"), value=box( _( "Streams : {streams_played}\n" "YouTube Streams : {yt_streams_played}\n" "Twitch Streams : {ttv_streams_played}\n" "Other Streams : {streams_played}\n" "YouTube Tracks : {youtube_tracks}\n" "Soundcloud Tracks : {soundcloud_tracks}\n" "Bandcamp Tracks : {bandcamp_tracks}\n" "Vimeo Tracks : {vimeo_tracks}\n" "Twitch Tracks : {twitch_tracks}\n" "Other Tracks : {other_tracks}\n" ).format( streams_played=self.get_value("streams_played", perma=True), yt_streams_played=self.get_value("yt_streams_played", perma=True), ttv_streams_played=self.get_value("ttv_streams_played", perma=True), other_streams_played=self.get_value("other_streams_played", perma=True), youtube_tracks=self.get_value("youtube_tracks", perma=True), soundcloud_tracks=self.get_value("soundcloud_tracks", perma=True), bandcamp_tracks=self.get_value("bandcamp_tracks", perma=True), vimeo_tracks=self.get_value("vimeo_tracks", perma=True), twitch_tracks=self.get_value("twitch_tracks", perma=True), other_tracks=self.get_value("other_tracks", perma=True), ), lang="prolog", ), inline=False, ) em.set_thumbnail(url=avatar) em.set_footer(text=_("Since {}").format(uptime)) await ctx.send(embed=em) @commands.command(aliases=["prefixes"]) async def prefix(self, ctx: commands.Context): """Show all prefixes of the bot""" default_prefixes = await self.bot._config.prefix() try: guild_prefixes = await self.bot._config.guild(ctx.guild).prefix() except AttributeError: guild_prefixes = False bot_name = ctx.bot.user.name avatar = self.bot.user.avatar_url_as(static_format="png") if not guild_prefixes: to_send = [f"`\u200b{p}\u200b`" for p in default_prefixes] plural = _("Prefixes") if len(default_prefixes) >= 2 else _("Prefix") if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("{} of {}:").format(plural, bot_name), description=" ".join(to_send), ) em.set_thumbnail(url=avatar) await ctx.send(embed=em) else: await ctx.send(bold(_("{} of {}:\n")).format(plural, bot_name) + " ".join(to_send)) else: to_send = [f"`\u200b{p}\u200b`" for p in guild_prefixes] plural = _("prefixes") if len(default_prefixes) >= 2 else _("prefix") if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("Server {} of {}:").format(plural, bot_name), description=" ".join(to_send), ) em.set_thumbnail(url=avatar) await ctx.send(embed=em) else: await ctx.send( bold(_("Server {} of {name}:\n")).format(plural, bot_name) + " ".join(to_send) ) @commands.command(aliases=["serverc", "serversc"]) async def servercount(self, ctx: commands.Context): """Send servers stats of the bot.""" visible_users = sum(len(s.members) for s in self.bot.guilds) total_users = sum(s.member_count for s in self.bot.guilds) msg = _( "{name} is running on `{shard_count}` {shards}.\n" "Serving `{servs}` servers (`{channels}` channels).\n" "For a total of `{visible_users}` users (`{unique}` unique).\n" "(`{visible_users}` visible now, `{total_users}` total, `{percentage_chunked:.2f}%` chunked)" ).format( name=ctx.bot.user.name, shard_count=humanize_number(self.bot.shard_count), shards=_("shards") if self.bot.shard_count > 1 else _("shard"), servs=humanize_number(len(self.bot.guilds)), channels=humanize_number(sum(len(s.channels) for s in self.bot.guilds)), visible_users=humanize_number(visible_users), unique=humanize_number(len(self.bot.users)), total_users=humanize_number(total_users), percentage_chunked=visible_users / total_users * 100, ) if await ctx.embed_requested(): em = discord.Embed(color=await ctx.embed_colour(), description=msg) await ctx.send(embed=em) else: await ctx.send(msg) @commands.command(aliases=["servreg"]) async def serversregions(self, ctx: commands.Context, sort: str = "guilds"): """ Show total of regions where the bot is. You can also sort by number of users by using `[p]serversregions users` By default it sort by guilds. """ regions_dict = { "vip-us-east": ":flag_us:" + _(" __VIP__ US East"), "vip-us-west": ":flag_us:" + _(" __VIP__ US West"), "vip-amsterdam": ":flag_nl:" + _(" __VIP__ Amsterdam"), "eu-west": ":flag_eu:" + _(" EU West"), "eu-central": ":flag_eu:" + _(" EU Central"), "europe": ":flag_eu:" + _(" Europe"), "london": ":flag_gb:" + _(" London"), "frankfurt": ":flag_de:" + _(" Frankfurt"), "amsterdam": ":flag_nl:" + _(" Amsterdam"), "us-west": ":flag_us:" + _(" US West"), "us-east": ":flag_us:" + _(" US East"), "us-south": ":flag_us:" + _(" US South"), "us-central": ":flag_us:" + _(" US Central"), "singapore": ":flag_sg:" + _(" Singapore"), "sydney": ":flag_au:" + _(" Sydney"), "brazil": ":flag_br:" + _(" Brazil"), "hongkong": ":flag_hk:" + _(" Hong Kong"), "russia": ":flag_ru:" + _(" Russia"), "japan": ":flag_jp:" + _(" Japan"), "southafrica": ":flag_za:" + _(" South Africa"), "india": ":flag_in:" + _(" India"), "dubai": ":flag_ae:" + _(" Dubai"), "south-korea": ":flag_kr:" + _(" South Korea"), } regions = {} for guild in self.bot.guilds: region = str(guild.region) if region not in regions: regions[region] = {"guilds": 0, "users": 0} regions[region]["users"] += guild.member_count regions[region]["guilds"] += 1 def sort_keys(key: str): return ( (key[1]["guilds"], key[1]["users"]) if sort != "users" else (key[1]["users"], key[1]["guilds"]) ) regions_stats = dict(sorted(regions.items(), key=lambda x: sort_keys(x), reverse=True)) msg = [ _("{flag}: {guilds_len} and {users_len}").format( flag=regions_dict[region_name], guilds_len=( f"`{humanize_number(values['guilds'])} {_('server') if values['guilds'] < 2 else _('servers')}`" ), users_len=( f"`{humanize_number(values['users'])} {_('user') if values['users'] < 2 else _('users')}`" ), ) for region_name, values in regions_stats.items() ] guilds_word = _("server") if len(self.bot.guilds) < 2 else _("servers") users_word = ( _("user") if sum(k["users"] for k in regions_stats.values()) < 2 else _("users") ) footer = _("For a total of {guilds} and {users}").format( guilds=f"{humanize_number(len(self.bot.guilds))} {guilds_word}", users=f"{humanize_number(sum(k['users'] for k in regions_stats.values()))} {users_word}", ) if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("Servers regions stats:"), description="\n".join(msg), ) em.set_footer(text=footer) await ctx.send(embed=em) else: msg = bold(_("Servers regions stats:\n\n")) + "\n".join(msg) + "\n" + bold(footer) await ctx.send(msg)
import asyncio import time import logging from collections import Counter from datetime import datetime from typing import Union import apsw import discord import lavalink from redbot.cogs.audio.audio_dataclasses import Query from redbot.core import Config, bank, commands from redbot.core.bot import Red from redbot.core.cog_manager import cog_data_path from redbot.core.i18n import Translator, cog_i18n from redbot.core.utils.chat_formatting import bold, box, humanize_number, humanize_timedelta from .listeners import Listeners from .statements import ( CREATE_TABLE, CREATE_VERSION_TABLE, DROP_OLD_PERMA, DROP_OLD_TEMP, GET_EVENT_VALUE, INSERT_DO_NOTHING, SELECT_OLD, UPSERT, PRAGMA_journal_mode, PRAGMA_wal_autocheckpoint, ) from .utils import events_names, threadexec log = logging.getLogger("red.predacogs.martools") _ = Translator("MartTools", __file__) @cog_i18n(_) class MartTools(Listeners, commands.Cog): """Multiple tools that are originally used on Martine.""" __author__ = ["Predä", "Draper"] __version__ = "2.0.0" async def red_delete_data_for_user(self, **kwargs): """Nothing to delete.""" return def __init__(self, bot: Red): self.bot = bot self._connection = apsw.Connection(str(cog_data_path(self) / "MartTools.db")) self.cursor = self._connection.cursor() self.cache = {"perma": Counter(), "session": Counter()} self.uptime = datetime.utcnow() self.init_task = self.bot.loop.create_task(self.initialize()) self.dump_cache_task = self.bot.loop.create_task(self.__dump_cache_to_db()) def cog_unload(self): self.dump_cache_task.cancel() if self.init_task: self.init_task.cancel() for event_name, value in self.cache["perma"].items(): threadexec(self.cursor.execute, UPSERT, (event_name, value)) self._connection.close() del self.cache def format_help_for_context(self, ctx: commands.Context) -> str: """Thanks Sinbad!""" pre_processed = super().format_help_for_context(ctx) return f"{pre_processed}\n\nAuthors: {', '.join(self.__author__)}\nCog Version: {self.__version__}" async def initialize(self): threadexec(self.cursor.execute, PRAGMA_journal_mode) threadexec(self.cursor.execute, PRAGMA_wal_autocheckpoint) threadexec(self.cursor.execute, CREATE_TABLE) threadexec(self.cursor.execute, CREATE_VERSION_TABLE) try: check_result = list(threadexec(self.cursor.execute, "SELECT * FROM bot_stats_perma")) except apsw.SQLError: await self.__populate_cache() return else: if check_result: await self.__migrate_data() threadexec(self.cursor.execute, INSERT_DO_NOTHING, ("creation_time", time.time())) await self.__populate_cache() async def __migrate_data(self): for event_name in events_names: result = list(threadexec(self.cursor.execute, SELECT_OLD, {"event": event_name})) if not result: continue old_value = result[0][0] threadexec(self.cursor.execute, UPSERT, (event_name, old_value)) old_value = list( threadexec( self.cursor.execute, SELECT_OLD, {"event": "creation_time", "guild_id": -1000} ) ) threadexec( self.cursor.execute, UPSERT, ("creation_time", old_value[0][0] if old_value else time.time()), ) threadexec(self.cursor.execute, DROP_OLD_TEMP) threadexec(self.cursor.execute, DROP_OLD_PERMA) threadexec( self.cursor.execute, ("INSERT or IGNORE INTO version (version_num) VALUES (2)"), ) async def __populate_cache(self): for event_name in events_names: result = list(threadexec(self.cursor.execute, GET_EVENT_VALUE, {"event": event_name})) if not result: continue self.cache["perma"][event_name] = result[0][0] result = list(threadexec(self.cursor.execute, GET_EVENT_VALUE, {"event": "creation_time"})) self.cache["perma"]["creation_time"] = result[0][0] if result else time.time() async def __dump_cache_to_db(self): await self.bot.wait_until_red_ready() while True: await asyncio.sleep(300) try: for event_name, value in self.cache["perma"].items(): threadexec(self.cursor.execute, UPSERT, (event_name, value)) except Exception: log.exception("Something went wrong in __dump_cache_to_db task:") def get_value(self, key: str, perma: bool = False, raw: bool = False) -> Union[int, str]: if raw: return self.cache["perma" if perma else "session"][key] return humanize_number(self.cache["perma" if perma else "session"][key]) def get_bot_uptime(self): delta = datetime.utcnow() - self.uptime return str(humanize_timedelta(timedelta=delta)) def usage_counts_cpm(self, key: str, time: int = 60): delta = datetime.utcnow() - self.uptime minutes = delta.total_seconds() / time total = self.get_value(key, raw=True) return total / minutes @commands.command() @commands.guild_only() @commands.bot_has_permissions(embed_links=True) async def bankstats(self, ctx: commands.Context): """Show stats of the bank.""" icon = self.bot.user.avatar_url_as(static_format="png") user_bal = await bank.get_balance(ctx.author) credits_name = await bank.get_currency_name(ctx.guild) pos = await bank.get_leaderboard_position(ctx.author) bank_name = await bank.get_bank_name(ctx.guild) bank_config = bank._config if await bank.is_global(): all_accounts = len(await bank_config.all_users()) accounts = await bank_config.all_users() else: all_accounts = len(await bank_config.all_members(ctx.guild)) accounts = await bank_config.all_members(ctx.guild) member_account = await bank.get_account(ctx.author) created_at = str(member_account.created_at) no = "1970-01-01 00:00:00" overall = sum(value["balance"] for key, value in accounts.items()) em = discord.Embed(color=await ctx.embed_colour()) em.set_author(name=_("{} stats:").format(bank_name), icon_url=icon) em.add_field( name=_("{} stats:").format("Global" if await bank.is_global() else "Bank"), value=_( "Total accounts: **{all_accounts}**\nTotal amount: **{overall} {credits_name}**" ).format( all_accounts=all_accounts, overall=humanize_number(overall), credits_name=credits_name, ), ) if pos is not None: percent = round((int(user_bal) / overall * 100), 3) em.add_field( name=_("Your stats:"), value=_( "You have **{bal} {currency}**.\n" "It's **{percent}%** of the {g}amount in the bank.\n" "You are **{pos}/{all_accounts}** in the {g}leaderboard." ).format( bal=humanize_number(user_bal), currency=credits_name, percent=percent, g="global " if await bank.is_global() else "", pos=humanize_number(pos), all_accounts=humanize_number(all_accounts), ), inline=False, ) if created_at != no: em.set_footer(text=_("Bank account created on: ") + str(created_at)) await ctx.send(embed=em) @commands.command(aliases=["usagec"]) async def usagecount(self, ctx: commands.Context): """ Show the usage count of the bot. Commands processed, messages received, and music on servers. """ msg = _( "**Commands processed:** `{commands_count}` commands. (`{cpm_commands:.2f}`/min)\n" "**Commands errors:** `{errors_count}` errors.\n" "**Messages received:** `{messages_read}` messages. (`{cpm_msgs:.2f}`/min)\n" "**Messages sent:** `{messages_sent}` messages. (`{cpm_msgs_sent:.2f}`/min)\n" "**Playing music on:** `{ll_players}` servers.\n" "**Tracks played:** `{tracks_played}` tracks. (`{cpm_tracks:.2f}`/min)\n\n" "**Servers joined:** `{guild_join}` servers. (`{cpm_guild_join:.2f}`/hour)\n" "**Servers left:** `{guild_leave}` servers. (`{cpm_guild_leave:.2f}`/hour)" ).format( commands_count=self.get_value("processed_commands"), cpm_commands=self.usage_counts_cpm("processed_commands"), errors_count=self.get_value("command_error"), messages_read=self.get_value("messages_read"), cpm_msgs=self.usage_counts_cpm("messages_read"), messages_sent=self.get_value("msg_sent"), cpm_msgs_sent=self.usage_counts_cpm("msg_sent"), ll_players="`{}/{}`".format( humanize_number(len(lavalink.active_players())), humanize_number(len(lavalink.all_players())), ), tracks_played=self.get_value("tracks_played"), cpm_tracks=self.usage_counts_cpm("tracks_played"), guild_join=self.get_value("guild_join"), cpm_guild_join=self.usage_counts_cpm("guild_join", 3600), guild_leave=self.get_value("guild_remove"), cpm_guild_leave=self.usage_counts_cpm("guild_remove", 3600), ) if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("Usage count of {} since last restart:").format(self.bot.user.name), description=msg, ) em.set_thumbnail(url=self.bot.user.avatar_url_as(static_format="png")) em.set_footer(text=_("Since {}").format(self.get_bot_uptime())) await ctx.send(embed=em) else: await ctx.send( _("Usage count of {} since last restart:\n").format(ctx.bot.user.name) + msg + _("\n\nSince {}").format(self.get_bot_uptime()) ) @commands.bot_has_permissions(embed_links=True) @commands.command(aliases=["advusagec"]) async def advusagecount(self, ctx: commands.Context): """ Permanent stats since first time that the cog has been loaded. """ avatar = self.bot.user.avatar_url_as(static_format="png") delta = datetime.utcnow() - datetime.utcfromtimestamp( self.get_value("creation_time", perma=True, raw=True) ) uptime = humanize_timedelta(timedelta=delta) ll_players = "{}/{}".format( humanize_number(len(lavalink.active_players())), humanize_number(len(lavalink.all_players())), ) em = discord.Embed( title=_("Usage count of {}:").format(ctx.bot.user.name), color=await ctx.embed_colour(), ) em.add_field( name=_("Message Stats"), value=box( _( "Messages Read : {messages_read}\n" "Messages Sent : {msg_sent}\n" "Messages Deleted : {messages_deleted}\n" "Messages Edited : {messages_edited}\n" "DMs Received : {dms_received}\n" ).format_map( { "messages_read": self.get_value("messages_read", perma=True), "msg_sent": self.get_value("msg_sent", perma=True), "messages_deleted": self.get_value("messages_deleted", perma=True), "messages_edited": self.get_value("messages_edited", perma=True), "dms_received": self.get_value("dms_received", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Commands Stats"), value=box( _( "Commands Processed : {processed_commands}\n" "Errors Occured : {command_error}\n" "Sessions Resumed : {sessions_resumed}\n" ).format_map( { "processed_commands": self.get_value("processed_commands", perma=True), "command_error": self.get_value("command_error", perma=True), "sessions_resumed": self.get_value("sessions_resumed", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Guild Stats"), value=box( _( "Guilds Joined : {guild_join}\n" "Guilds Left : {guild_remove}\n" ).format_map( { "guild_join": self.get_value("guild_join", perma=True), "guild_remove": self.get_value("guild_remove", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("User Stats"), value=box( _( "New Users : {new_members}\n" "Left Users : {members_left}\n" "Banned Users : {members_banned}\n" "Unbanned Users : {members_unbanned}\n" ).format_map( { "new_members": self.get_value("new_members", perma=True), "members_left": self.get_value("members_left", perma=True), "members_banned": self.get_value("members_banned", perma=True), "members_unbanned": self.get_value("members_unbanned", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Role Stats"), value=box( _( "Roles Added : {roles_added}\n" "Roles Removed : {roles_removed}\n" "Roles Updated : {roles_updated}\n" ).format_map( { "roles_added": self.get_value("roles_added", perma=True), "roles_removed": self.get_value("roles_removed", perma=True), "roles_updated": self.get_value("roles_updated", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Emoji Stats"), value=box( _( "Reacts Added : {reactions_added}\n" "Reacts Removed : {reactions_removed}\n" "Emoji Added : {emojis_added}\n" "Emoji Removed : {emojis_removed}\n" "Emoji Updated : {emojis_updated}\n" ).format_map( { "reactions_added": self.get_value("reactions_added", perma=True), "reactions_removed": self.get_value("reactions_removed", perma=True), "emojis_added": self.get_value("emojis_added", perma=True), "emojis_removed": self.get_value("emojis_removed", perma=True), "emojis_updated": self.get_value("emojis_updated", perma=True), } ), lang="prolog", ), inline=False, ) em.add_field( name=_("Audio Stats"), value=box( _( "Users Who Joined VC : {users_joined_bot_music_room}\n" "Tracks Played : {tracks_played}\n" "Number Of Players : {ll_players}" ).format( users_joined_bot_music_room=self.get_value( "users_joined_bot_music_room", perma=True ), tracks_played=self.get_value("tracks_played", perma=True), ll_players=ll_players, ), lang="prolog", ), inline=False, ) if Query: em.add_field( name=_("Track Stats"), value=box( _( "Streams : {streams_played}\n" "YouTube Streams : {yt_streams_played}\n" "Twitch Streams : {ttv_streams_played}\n" "Other Streams : {streams_played}\n" "YouTube Tracks : {youtube_tracks}\n" "Soundcloud Tracks : {soundcloud_tracks}\n" "Bandcamp Tracks : {bandcamp_tracks}\n" "Vimeo Tracks : {vimeo_tracks}\n" "Twitch Tracks : {twitch_tracks}\n" "Other Tracks : {other_tracks}\n" ).format( streams_played=self.get_value("streams_played", perma=True), yt_streams_played=self.get_value("yt_streams_played", perma=True), ttv_streams_played=self.get_value("ttv_streams_played", perma=True), other_streams_played=self.get_value("other_streams_played", perma=True), youtube_tracks=self.get_value("youtube_tracks", perma=True), soundcloud_tracks=self.get_value("soundcloud_tracks", perma=True), bandcamp_tracks=self.get_value("bandcamp_tracks", perma=True), vimeo_tracks=self.get_value("vimeo_tracks", perma=True), twitch_tracks=self.get_value("twitch_tracks", perma=True), other_tracks=self.get_value("other_tracks", perma=True), ), lang="prolog", ), inline=False, ) em.set_thumbnail(url=avatar) em.set_footer(text=_("Since {}").format(uptime)) await ctx.send(embed=em) @commands.command(aliases=["prefixes"]) async def prefix(self, ctx: commands.Context): """Show all prefixes of the bot""" default_prefixes = await self.bot._config.prefix() try: guild_prefixes = await self.bot._config.guild(ctx.guild).prefix() except AttributeError: guild_prefixes = False bot_name = ctx.bot.user.name avatar = self.bot.user.avatar_url_as(static_format="png") if not guild_prefixes: to_send = [f"`\u200b{p}\u200b`" for p in default_prefixes] plural = _("Prefixes") if len(default_prefixes) >= 2 else _("Prefix") if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("{} of {}:").format(plural, bot_name), description=" ".join(to_send), ) em.set_thumbnail(url=avatar) await ctx.send(embed=em) else: await ctx.send(bold(_("{} of {}:\n")).format(plural, bot_name) + " ".join(to_send)) else: to_send = [f"`\u200b{p}\u200b`" for p in guild_prefixes] plural = _("prefixes") if len(default_prefixes) >= 2 else _("prefix") if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("Server {} of {}:").format(plural, bot_name), description=" ".join(to_send), ) em.set_thumbnail(url=avatar) await ctx.send(embed=em) else: await ctx.send( bold(_("Server {} of {name}:\n")).format(plural, bot_name) + " ".join(to_send) ) @commands.command(aliases=["serverc", "serversc"]) async def servercount(self, ctx: commands.Context): """Send servers stats of the bot.""" visible_users = sum(len(s.members) for s in self.bot.guilds) total_users = sum(s.member_count for s in self.bot.guilds) msg = _( "{name} is running on `{shard_count}` {shards}.\n" "Serving `{servs}` servers (`{channels}` channels).\n" "For a total of `{visible_users}` users (`{unique}` unique).\n" "(`{visible_users}` visible now, `{total_users}` total, `{percentage_chunked:.2f}%` chunked)" ).format( name=ctx.bot.user.name, shard_count=humanize_number(self.bot.shard_count), shards=_("shards") if self.bot.shard_count > 1 else _("shard"), servs=humanize_number(len(self.bot.guilds)), channels=humanize_number(sum(len(s.channels) for s in self.bot.guilds)), visible_users=humanize_number(visible_users), unique=humanize_number(len(self.bot.users)), total_users=humanize_number(total_users), percentage_chunked=visible_users / total_users * 100, ) if await ctx.embed_requested(): em = discord.Embed(color=await ctx.embed_colour(), description=msg) await ctx.send(embed=em) else: await ctx.send(msg) @commands.command(aliases=["servreg"]) async def serversregions(self, ctx: commands.Context, sort: str = "guilds"): """ Show total of regions where the bot is. You can also sort by number of users by using `[p]serversregions users` By default it sort by guilds. """ regions_dict = { "vip-us-east": ":flag_us:" + _(" __VIP__ US East"), "vip-us-west": ":flag_us:" + _(" __VIP__ US West"), "vip-amsterdam": ":flag_nl:" + _(" __VIP__ Amsterdam"), "eu-west": ":flag_eu:" + _(" EU West"), "eu-central": ":flag_eu:" + _(" EU Central"), "europe": ":flag_eu:" + _(" Europe"), "london": ":flag_gb:" + _(" London"), "frankfurt": ":flag_de:" + _(" Frankfurt"), "amsterdam": ":flag_nl:" + _(" Amsterdam"), "us-west": ":flag_us:" + _(" US West"), "us-east": ":flag_us:" + _(" US East"), "us-south": ":flag_us:" + _(" US South"), "us-central": ":flag_us:" + _(" US Central"), "singapore": ":flag_sg:" + _(" Singapore"), "sydney": ":flag_au:" + _(" Sydney"), "brazil": ":flag_br:" + _(" Brazil"), "hongkong": ":flag_hk:" + _(" Hong Kong"), "russia": ":flag_ru:" + _(" Russia"), "japan": ":flag_jp:" + _(" Japan"), "southafrica": ":flag_za:" + _(" South Africa"), "india": ":flag_in:" + _(" India"), "dubai": ":flag_ae:" + _(" Dubai"), "south-korea": ":flag_kr:" + _(" South Korea"), } regions = {} for guild in self.bot.guilds: region = str(guild.region) if region not in regions: regions[region] = {"guilds": 0, "users": 0} regions[region]["users"] += guild.member_count regions[region]["guilds"] += 1 def sort_keys(key: str): return ( (key[1]["guilds"], key[1]["users"]) if sort != "users" else (key[1]["users"], key[1]["guilds"]) ) regions_stats = dict(sorted(regions.items(), key=lambda x: sort_keys(x), reverse=True)) msg = [ _("{flag}: {guilds_len} and {users_len}").format( flag=regions_dict[region_name], guilds_len=( f"`{humanize_number(values['guilds'])} {_('server') if values['guilds'] < 2 else _('servers')}`" ), users_len=( f"`{humanize_number(values['users'])} {_('user') if values['users'] < 2 else _('users')}`" ), ) for region_name, values in regions_stats.items() ] guilds_word = _("server") if len(self.bot.guilds) < 2 else _("servers") users_word = ( _("user") if sum(k["users"] for k in regions_stats.values()) < 2 else _("users") ) footer = _("For a total of {guilds} and {users}").format( guilds=f"{humanize_number(len(self.bot.guilds))} {guilds_word}", users=f"{humanize_number(sum(k['users'] for k in regions_stats.values()))} {users_word}", ) if await ctx.embed_requested(): em = discord.Embed( color=await ctx.embed_colour(), title=_("Servers regions stats:"), description="\n".join(msg), ) em.set_footer(text=footer) await ctx.send(embed=em) else: msg = bold(_("Servers regions stats:\n\n")) + "\n".join(msg) + "\n" + bold(footer) await ctx.send(msg)
en
0.904051
Multiple tools that are originally used on Martine. Nothing to delete. Thanks Sinbad! Show stats of the bank. Show the usage count of the bot. Commands processed, messages received, and music on servers. Permanent stats since first time that the cog has been loaded. Show all prefixes of the bot Send servers stats of the bot. Show total of regions where the bot is. You can also sort by number of users by using `[p]serversregions users` By default it sort by guilds.
1.987974
2
Google-Playstore-/code.py
anajikadam17/ga-learner-dsmp-repo
0
6624561
<reponame>anajikadam17/ga-learner-dsmp-repo<filename>Google-Playstore-/code.py<gh_stars>0 # -------------- #Importing header files import pandas as pd import matplotlib.pyplot as plt import seaborn as sns #Code starts here data = pd.read_csv(path) data = data[data['Rating']<=5] sns.countplot('Rating', data=data) #Code ends here # -------------- # code starts here total_null = data.isnull().sum() percent_null = (total_null/data.isnull().count()) missing_data = pd.concat([total_null, percent_null], axis=1, keys=['Total','Percent']) print(missing_data.head()) data = data.dropna(axis = 0, how ='any') total_null_1 = data.isnull().sum() percent_null_1 = (total_null_1/data.isnull().count()) missing_data_1 = pd.concat([total_null_1, percent_null_1], axis=1, keys=['Total','Percent']) print(missing_data.head()) # code ends here # -------------- #Code starts here sns.catplot(x="Category",y="Rating",data = data, kind="box", height = 10) plt.xticks(rotation = '90') plt.title('Rating vs Category [BoxPlot]') #Code ends here # -------------- #Importing header files from sklearn.preprocessing import MinMaxScaler, LabelEncoder #Code starts here print(data['Installs'].value_counts()) # data['Installs'] = data['Installs'].str.extract('(\d+)', expand=False).astype(int) data['Installs']=data['Installs'].apply(lambda x: x.replace(',','')) data['Installs']=data['Installs'].apply(lambda x: x.replace('+','')).astype(int) le=LabelEncoder() data['Installs']=le.fit_transform(data['Installs']) sns.regplot(x="Installs", y="Rating", data=data) plt.title('Rating vs Installs [RegPlot]') plt.show() #Code ends here # -------------- #Code starts here print(data['Price'].value_counts()) data['Price']=data['Price'].apply(lambda x: x.replace('$','')).astype(float) le=LabelEncoder() data['Installs']=le.fit_transform(data['Installs']) sns.regplot(x="Price", y="Rating", data=data) plt.title('Rating vs Price [RegPlot]') plt.show() #Code ends here # -------------- #Code starts here print(data['Genres'].unique()) data['Genres'] = data['Genres'].str.split(';').str[0] gr_mean = data.groupby('Genres',as_index=False)['Rating'].mean() print(gr_mean.describe()) gr_mean = gr_mean.sort_values(['Rating']) #Code ends here # -------------- #Code starts here from datetime import datetime,date print(data['Last Updated']) data['Last Updated']=pd.to_datetime(data['Last Updated']) max_date=max(data['Last Updated']) data['Last Updated Days']=(max_date-data['Last Updated']).dt.days sns.regplot(x="Last Updated Days", y="Rating", data=data) plt.title('Rating vs "Last Updated Days" [RegPlot]') plt.show() #Code ends here
# -------------- #Importing header files import pandas as pd import matplotlib.pyplot as plt import seaborn as sns #Code starts here data = pd.read_csv(path) data = data[data['Rating']<=5] sns.countplot('Rating', data=data) #Code ends here # -------------- # code starts here total_null = data.isnull().sum() percent_null = (total_null/data.isnull().count()) missing_data = pd.concat([total_null, percent_null], axis=1, keys=['Total','Percent']) print(missing_data.head()) data = data.dropna(axis = 0, how ='any') total_null_1 = data.isnull().sum() percent_null_1 = (total_null_1/data.isnull().count()) missing_data_1 = pd.concat([total_null_1, percent_null_1], axis=1, keys=['Total','Percent']) print(missing_data.head()) # code ends here # -------------- #Code starts here sns.catplot(x="Category",y="Rating",data = data, kind="box", height = 10) plt.xticks(rotation = '90') plt.title('Rating vs Category [BoxPlot]') #Code ends here # -------------- #Importing header files from sklearn.preprocessing import MinMaxScaler, LabelEncoder #Code starts here print(data['Installs'].value_counts()) # data['Installs'] = data['Installs'].str.extract('(\d+)', expand=False).astype(int) data['Installs']=data['Installs'].apply(lambda x: x.replace(',','')) data['Installs']=data['Installs'].apply(lambda x: x.replace('+','')).astype(int) le=LabelEncoder() data['Installs']=le.fit_transform(data['Installs']) sns.regplot(x="Installs", y="Rating", data=data) plt.title('Rating vs Installs [RegPlot]') plt.show() #Code ends here # -------------- #Code starts here print(data['Price'].value_counts()) data['Price']=data['Price'].apply(lambda x: x.replace('$','')).astype(float) le=LabelEncoder() data['Installs']=le.fit_transform(data['Installs']) sns.regplot(x="Price", y="Rating", data=data) plt.title('Rating vs Price [RegPlot]') plt.show() #Code ends here # -------------- #Code starts here print(data['Genres'].unique()) data['Genres'] = data['Genres'].str.split(';').str[0] gr_mean = data.groupby('Genres',as_index=False)['Rating'].mean() print(gr_mean.describe()) gr_mean = gr_mean.sort_values(['Rating']) #Code ends here # -------------- #Code starts here from datetime import datetime,date print(data['Last Updated']) data['Last Updated']=pd.to_datetime(data['Last Updated']) max_date=max(data['Last Updated']) data['Last Updated Days']=(max_date-data['Last Updated']).dt.days sns.regplot(x="Last Updated Days", y="Rating", data=data) plt.title('Rating vs "Last Updated Days" [RegPlot]') plt.show() #Code ends here
en
0.525354
# -------------- #Importing header files #Code starts here #Code ends here # -------------- # code starts here # code ends here # -------------- #Code starts here #Code ends here # -------------- #Importing header files #Code starts here # data['Installs'] = data['Installs'].str.extract('(\d+)', expand=False).astype(int) #Code ends here # -------------- #Code starts here #Code ends here # -------------- #Code starts here #Code ends here # -------------- #Code starts here #Code ends here
2.972195
3
setup.py
marickmanrho/ToCM_reference_data
0
6624562
# Setup for the ToCM reference data package # # Written using https://packaging.python.org/tutorials/packaging-projects/ # Most importandly import setuptools from setuptools import setup import os # Use README.md as long description with open("README.md", "r") as file: long_description = file.read() # Use requirements.txt as requirements with open("requirements.txt", "r") as file: requirements = file.read() package_location = "tocm_reference_data" data_files = [] for root, dirs, files in os.walk(package_location + "/lib"): for file in files: if file.endswith(".json") or file.endswith(".csv"): data_files.append( os.path.relpath(os.path.join(root, file), package_location) ) setup( name="tocm_reference_data", version=0.4, url="https://github.com/marickmanrho/tocm_reference_data", license="MIT", author="<NAME>", author_email="<EMAIL>", description="Reference data used in my research", long_description=long_description, long_description_content_type="text/markdown", keywords=["scientific", "University of Groningen"], install_requires=requirements, packages=["tocm_reference_data"], entry_points={ "console_scripts": ["tocm_reference_data=tocm_reference_data.manage:manage"] }, include_package_data=True, package_data={"": data_files}, platforms="any", classifiers=[ "Programming Language :: Python :: 3", "Natural Language :: English", "Operating System :: OS Independent", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Physics", ], )
# Setup for the ToCM reference data package # # Written using https://packaging.python.org/tutorials/packaging-projects/ # Most importandly import setuptools from setuptools import setup import os # Use README.md as long description with open("README.md", "r") as file: long_description = file.read() # Use requirements.txt as requirements with open("requirements.txt", "r") as file: requirements = file.read() package_location = "tocm_reference_data" data_files = [] for root, dirs, files in os.walk(package_location + "/lib"): for file in files: if file.endswith(".json") or file.endswith(".csv"): data_files.append( os.path.relpath(os.path.join(root, file), package_location) ) setup( name="tocm_reference_data", version=0.4, url="https://github.com/marickmanrho/tocm_reference_data", license="MIT", author="<NAME>", author_email="<EMAIL>", description="Reference data used in my research", long_description=long_description, long_description_content_type="text/markdown", keywords=["scientific", "University of Groningen"], install_requires=requirements, packages=["tocm_reference_data"], entry_points={ "console_scripts": ["tocm_reference_data=tocm_reference_data.manage:manage"] }, include_package_data=True, package_data={"": data_files}, platforms="any", classifiers=[ "Programming Language :: Python :: 3", "Natural Language :: English", "Operating System :: OS Independent", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Physics", ], )
en
0.714187
# Setup for the ToCM reference data package # # Written using https://packaging.python.org/tutorials/packaging-projects/ # Most importandly import setuptools # Use README.md as long description # Use requirements.txt as requirements
1.988986
2
label_studio/utils/argparser.py
gauthamsuresh09/label-studio
0
6624563
import os from label_studio.utils.io import find_dir from label_studio.utils.misc import iter_config_templates def parse_input_args(): """ Combine args with json config :return: config dict """ import sys import argparse if len(sys.argv) == 1: print('\nQuick start usage: label-studio start my_project --init\n') available_templates = [os.path.basename(os.path.dirname(f)) for f in iter_config_templates()] def valid_filepath(filepath): path = os.path.abspath(os.path.expanduser(filepath)) if os.path.exists(path): return path raise FileNotFoundError(filepath) root_parser = argparse.ArgumentParser(add_help=False) root_parser.add_argument( '-b', '--no-browser', dest='no_browser', action='store_true', help='Do not open browser at label studio start') root_parser.add_argument( '-d', '--debug', dest='debug', action='store_true', help='Debug mode for Flask', default=None) root_parser.add_argument( '--force', dest='force', action='store_true', help='Force creation new resources if exist') root_parser.add_argument( '--root-dir', dest='root_dir', default='.', help='Projects root directory') root_parser.add_argument( '-v', '--verbose', dest='verbose', action='store_true', help='Increase output verbosity') root_parser.add_argument( '--template', dest='template', choices=available_templates, help='Choose from predefined project templates') root_parser.add_argument( '-c', '--config', dest='config_path', type=valid_filepath, help='Server config') root_parser.add_argument( '-l', '--label-config', dest='label_config', type=valid_filepath, help='Label config path') root_parser.add_argument( '-i', '--input-path', dest='input_path', type=valid_filepath, help='Input path to task file or directory with tasks') root_parser.add_argument( '--input-format', dest='input_format', choices=('json', 'json-dir', 'text', 'text-dir', 'image-dir', 'audio-dir'), default='json', help='Input tasks format. Unless you are using "json" or "json-dir" format, --label-config option is required') root_parser.add_argument( '-o', '--output-dir', dest='output_dir', type=valid_filepath, help='Output directory for completions') root_parser.add_argument( '--ml-backend-url', dest='ml_backend_url', help='Machine learning backend URL') root_parser.add_argument( '--ml-backend-name', dest='ml_backend_name', help='Machine learning backend name') root_parser.add_argument( '--sampling', dest='sampling', choices=['sequential', 'uniform'], default='uniform', help='Sampling type that defines tasks order' ) root_parser.add_argument( '-p', '--port', dest='port', default=8080, type=int, help='Server port') root_parser.add_argument( '--log-level', dest='log_level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], default=None, help='Logging level' ) parser = argparse.ArgumentParser(description='Label studio') subparsers = parser.add_subparsers(dest='command', help='Available commands') subparsers.required = True # init sub-command parser parser_init = subparsers.add_parser('init', help='Initialize Label Studio', parents=[root_parser]) parser_init.add_argument( 'project_name', help='Path to directory where project state will be initialized') # start sub-command parser parser_start = subparsers.add_parser('start', help='Start Label Studio server', parents=[root_parser]) parser_start.add_argument( 'project_name', help='Path to directory where project state has been initialized') parser_start.add_argument( '--init', dest='init', action='store_true', help='Initialize if project is not initialized yet') # start-multi-session sub-command parser parser_start_ms = subparsers.add_parser( 'start-multi-session', help='Start Label Studio server', parents=[root_parser]) args = parser.parse_args() if args.output_dir is not None: raise RuntimeError('"--output-dir" option is deprecated and has no effect.\n' 'All output results are saved to project_name/completions directory') label_config_explicitly_specified = hasattr(args, 'label_config') and args.label_config if args.template and not label_config_explicitly_specified: args.label_config = os.path.join(find_dir('examples'), args.template, 'config.xml') if not hasattr(args, 'label_config'): args.label_config = None return args
import os from label_studio.utils.io import find_dir from label_studio.utils.misc import iter_config_templates def parse_input_args(): """ Combine args with json config :return: config dict """ import sys import argparse if len(sys.argv) == 1: print('\nQuick start usage: label-studio start my_project --init\n') available_templates = [os.path.basename(os.path.dirname(f)) for f in iter_config_templates()] def valid_filepath(filepath): path = os.path.abspath(os.path.expanduser(filepath)) if os.path.exists(path): return path raise FileNotFoundError(filepath) root_parser = argparse.ArgumentParser(add_help=False) root_parser.add_argument( '-b', '--no-browser', dest='no_browser', action='store_true', help='Do not open browser at label studio start') root_parser.add_argument( '-d', '--debug', dest='debug', action='store_true', help='Debug mode for Flask', default=None) root_parser.add_argument( '--force', dest='force', action='store_true', help='Force creation new resources if exist') root_parser.add_argument( '--root-dir', dest='root_dir', default='.', help='Projects root directory') root_parser.add_argument( '-v', '--verbose', dest='verbose', action='store_true', help='Increase output verbosity') root_parser.add_argument( '--template', dest='template', choices=available_templates, help='Choose from predefined project templates') root_parser.add_argument( '-c', '--config', dest='config_path', type=valid_filepath, help='Server config') root_parser.add_argument( '-l', '--label-config', dest='label_config', type=valid_filepath, help='Label config path') root_parser.add_argument( '-i', '--input-path', dest='input_path', type=valid_filepath, help='Input path to task file or directory with tasks') root_parser.add_argument( '--input-format', dest='input_format', choices=('json', 'json-dir', 'text', 'text-dir', 'image-dir', 'audio-dir'), default='json', help='Input tasks format. Unless you are using "json" or "json-dir" format, --label-config option is required') root_parser.add_argument( '-o', '--output-dir', dest='output_dir', type=valid_filepath, help='Output directory for completions') root_parser.add_argument( '--ml-backend-url', dest='ml_backend_url', help='Machine learning backend URL') root_parser.add_argument( '--ml-backend-name', dest='ml_backend_name', help='Machine learning backend name') root_parser.add_argument( '--sampling', dest='sampling', choices=['sequential', 'uniform'], default='uniform', help='Sampling type that defines tasks order' ) root_parser.add_argument( '-p', '--port', dest='port', default=8080, type=int, help='Server port') root_parser.add_argument( '--log-level', dest='log_level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], default=None, help='Logging level' ) parser = argparse.ArgumentParser(description='Label studio') subparsers = parser.add_subparsers(dest='command', help='Available commands') subparsers.required = True # init sub-command parser parser_init = subparsers.add_parser('init', help='Initialize Label Studio', parents=[root_parser]) parser_init.add_argument( 'project_name', help='Path to directory where project state will be initialized') # start sub-command parser parser_start = subparsers.add_parser('start', help='Start Label Studio server', parents=[root_parser]) parser_start.add_argument( 'project_name', help='Path to directory where project state has been initialized') parser_start.add_argument( '--init', dest='init', action='store_true', help='Initialize if project is not initialized yet') # start-multi-session sub-command parser parser_start_ms = subparsers.add_parser( 'start-multi-session', help='Start Label Studio server', parents=[root_parser]) args = parser.parse_args() if args.output_dir is not None: raise RuntimeError('"--output-dir" option is deprecated and has no effect.\n' 'All output results are saved to project_name/completions directory') label_config_explicitly_specified = hasattr(args, 'label_config') and args.label_config if args.template and not label_config_explicitly_specified: args.label_config = os.path.join(find_dir('examples'), args.template, 'config.xml') if not hasattr(args, 'label_config'): args.label_config = None return args
en
0.362423
Combine args with json config :return: config dict # init sub-command parser # start sub-command parser # start-multi-session sub-command parser
2.107168
2
tridentstream/metadata/tag/handler.py
tridentstream/mediaserver
6
6624564
import logging from rest_framework import serializers from ...bases.metadata.linkingmetadata import LinkingMetadataHandlerPlugin from .filters import MetadataFilter from .models import ListingItemRelation, Tag logger = logging.getLogger(__name__) class TagSerializer(serializers.ModelSerializer): id = serializers.SerializerMethodField("get_metadata_identifier") type = serializers.SerializerMethodField("get_metadata_type") plugin_name = serializers.SerializerMethodField() class Meta: model = Tag fields = ("id", "type", "plugin_name", "tag_name") def get_metadata_identifier(self, obj): return obj.metadata.identifier def get_metadata_type(self, obj): return "metadata_%s" % obj.metadata.metadata_name def get_plugin_name(self, obj): return "tag" class TagMetadataHandlerPlugin(LinkingMetadataHandlerPlugin): plugin_name = "tag" priority = -10 prefetch_related = ["metadata"] serializer = TagSerializer model = Tag listing_item_relation_model = ListingItemRelation metadata_link_model = Tag metadata_embed_method = "relate" filter = MetadataFilter user_field = "user" __traits__ = ["tag"] def get_metadata(self, request, identifier): pass
import logging from rest_framework import serializers from ...bases.metadata.linkingmetadata import LinkingMetadataHandlerPlugin from .filters import MetadataFilter from .models import ListingItemRelation, Tag logger = logging.getLogger(__name__) class TagSerializer(serializers.ModelSerializer): id = serializers.SerializerMethodField("get_metadata_identifier") type = serializers.SerializerMethodField("get_metadata_type") plugin_name = serializers.SerializerMethodField() class Meta: model = Tag fields = ("id", "type", "plugin_name", "tag_name") def get_metadata_identifier(self, obj): return obj.metadata.identifier def get_metadata_type(self, obj): return "metadata_%s" % obj.metadata.metadata_name def get_plugin_name(self, obj): return "tag" class TagMetadataHandlerPlugin(LinkingMetadataHandlerPlugin): plugin_name = "tag" priority = -10 prefetch_related = ["metadata"] serializer = TagSerializer model = Tag listing_item_relation_model = ListingItemRelation metadata_link_model = Tag metadata_embed_method = "relate" filter = MetadataFilter user_field = "user" __traits__ = ["tag"] def get_metadata(self, request, identifier): pass
none
1
2.115383
2
wb_load_model.py
Tiago-Minuzzi/phd-proj
0
6624565
import os import sys import numpy as np import pandas as pd import tensorflow as tf from Bio import SeqIO from numpy import array from numpy import argmax from warnings import simplefilter from contextlib import redirect_stderr from sklearn.preprocessing import LabelEncoder from sklearn.preprocessing import OneHotEncoder # Hide warning messages from tensorflow.python.util import deprecation deprecation._PRINT_DEPRECATION_WARNINGS = False simplefilter(action='ignore', category=FutureWarning) os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' with redirect_stderr(open(os.devnull, "w")): from tensorflow.keras.models import load_model from keras.preprocessing.sequence import pad_sequences # Show full array and dataframe (without truncation) pd.set_option('display.max_rows', None) # Show full data frame (without truncation) np.set_printoptions(threshold=sys.maxsize) # Show full arrays (without truncation) # I/O files inp_arq = sys.argv[1] out_arq = sys.argv[2] if len(sys.argv) >= 3 else None # Check whether the element exists pad_ref = "fastas/pad_ref03.fasta" #tes_mdl = "models/model_var01.05.wb.hdf5" tes_mdl = "models/model_var01.05-01.wb.hdf5" # Pre-processing functions # Fasta to data frame def fasta_frame(fasta_file,label): identifiers = [] sequences = [] with open(fasta_file) as f_f: for seq_record in SeqIO.parse(f_f, 'fasta'): identifiers.append(seq_record.id) sequences.append(seq_record.seq.lower()) s1 = pd.Series(identifiers, name='ID') s2 = pd.Series(sequences, name='sequence') fasta_frame = pd.DataFrame(dict(ID=s1, sequence=s2)) fasta_frame['label']=label return(fasta_frame) # Sequence onehot encoding def ohe_fun(fasta_df): integer_encoder = LabelEncoder() one_hot_encoder = OneHotEncoder(categories='auto') input_features = [] for linha in fasta_df['sequence']: integer_encoded = integer_encoder.fit_transform(list(linha)) integer_encoded = np.array(integer_encoded).reshape(-1, 1) one_hot_encoded = one_hot_encoder.fit_transform(integer_encoded) input_features.append(one_hot_encoded.toarray()) input_features=pad_sequences(input_features, padding='post') input_features = np.stack(input_features) return(input_features) # Sequence flattening def flatten_sequence(pred_fasta_flat): dimensoes=pred_fasta_flat.shape n_samples=dimensoes[0] n_x=dimensoes[1] n_y=dimensoes[2] n_xy=(n_x * n_y) pred_fasta_flat=pred_fasta_flat.reshape(n_samples,n_xy) return(pred_fasta_flat) try: # Input pre-processing user_frame = fasta_frame(inp_arq,'?') pad_frame = fasta_frame(pad_ref,'?') u_f = [user_frame,pad_frame] user_df = pd.concat(u_f,ignore_index=True) # One hot encode and flat sequences user_df_ohe = ohe_fun(user_df) user_df_flat=flatten_sequence(user_df_ohe) # Remove pad_ref user_seq_to_pred = np.delete(user_df_flat, -1, 0) # Load CNN model and predict modelo = load_model(tes_mdl) res_prob = modelo.predict_proba(np.expand_dims(user_seq_to_pred, axis=2), batch_size = 2) res_label = modelo.predict_classes(np.expand_dims(user_seq_to_pred, axis=2), batch_size = 2) # Format results pd.set_option('display.float_format', lambda x: f'{x:.3f}') res_label_S = pd.Series(res_label) res_prob_0 = pd.Series((res_prob[:,0])*100) res_prob_1 = pd.Series((res_prob[:,1])*100) user_ids = pd.Series(user_df['ID'][:-1]) results_dict = { 'ID': user_ids, 'Not TE prob': res_prob_0, 'TE prob': res_prob_1, 'Classification': res_label_S } results_df = pd.DataFrame(results_dict) results_df.set_index('ID',inplace=True) results_df['Classification'].replace(0,'NT',inplace=True) results_df['Classification'].replace(1,'TE',inplace=True) # Show prediction results print("\n") print("# RESULTS") print(results_df) print("\n") # Write to file and show message if out_arq: results_df.to_csv(out_arq) print(f"File saved as {out_arq}") else: print("No output file created") except ValueError: print(f"\nThere is an invalid sequece in '{inp_arq}'.\n")
import os import sys import numpy as np import pandas as pd import tensorflow as tf from Bio import SeqIO from numpy import array from numpy import argmax from warnings import simplefilter from contextlib import redirect_stderr from sklearn.preprocessing import LabelEncoder from sklearn.preprocessing import OneHotEncoder # Hide warning messages from tensorflow.python.util import deprecation deprecation._PRINT_DEPRECATION_WARNINGS = False simplefilter(action='ignore', category=FutureWarning) os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' with redirect_stderr(open(os.devnull, "w")): from tensorflow.keras.models import load_model from keras.preprocessing.sequence import pad_sequences # Show full array and dataframe (without truncation) pd.set_option('display.max_rows', None) # Show full data frame (without truncation) np.set_printoptions(threshold=sys.maxsize) # Show full arrays (without truncation) # I/O files inp_arq = sys.argv[1] out_arq = sys.argv[2] if len(sys.argv) >= 3 else None # Check whether the element exists pad_ref = "fastas/pad_ref03.fasta" #tes_mdl = "models/model_var01.05.wb.hdf5" tes_mdl = "models/model_var01.05-01.wb.hdf5" # Pre-processing functions # Fasta to data frame def fasta_frame(fasta_file,label): identifiers = [] sequences = [] with open(fasta_file) as f_f: for seq_record in SeqIO.parse(f_f, 'fasta'): identifiers.append(seq_record.id) sequences.append(seq_record.seq.lower()) s1 = pd.Series(identifiers, name='ID') s2 = pd.Series(sequences, name='sequence') fasta_frame = pd.DataFrame(dict(ID=s1, sequence=s2)) fasta_frame['label']=label return(fasta_frame) # Sequence onehot encoding def ohe_fun(fasta_df): integer_encoder = LabelEncoder() one_hot_encoder = OneHotEncoder(categories='auto') input_features = [] for linha in fasta_df['sequence']: integer_encoded = integer_encoder.fit_transform(list(linha)) integer_encoded = np.array(integer_encoded).reshape(-1, 1) one_hot_encoded = one_hot_encoder.fit_transform(integer_encoded) input_features.append(one_hot_encoded.toarray()) input_features=pad_sequences(input_features, padding='post') input_features = np.stack(input_features) return(input_features) # Sequence flattening def flatten_sequence(pred_fasta_flat): dimensoes=pred_fasta_flat.shape n_samples=dimensoes[0] n_x=dimensoes[1] n_y=dimensoes[2] n_xy=(n_x * n_y) pred_fasta_flat=pred_fasta_flat.reshape(n_samples,n_xy) return(pred_fasta_flat) try: # Input pre-processing user_frame = fasta_frame(inp_arq,'?') pad_frame = fasta_frame(pad_ref,'?') u_f = [user_frame,pad_frame] user_df = pd.concat(u_f,ignore_index=True) # One hot encode and flat sequences user_df_ohe = ohe_fun(user_df) user_df_flat=flatten_sequence(user_df_ohe) # Remove pad_ref user_seq_to_pred = np.delete(user_df_flat, -1, 0) # Load CNN model and predict modelo = load_model(tes_mdl) res_prob = modelo.predict_proba(np.expand_dims(user_seq_to_pred, axis=2), batch_size = 2) res_label = modelo.predict_classes(np.expand_dims(user_seq_to_pred, axis=2), batch_size = 2) # Format results pd.set_option('display.float_format', lambda x: f'{x:.3f}') res_label_S = pd.Series(res_label) res_prob_0 = pd.Series((res_prob[:,0])*100) res_prob_1 = pd.Series((res_prob[:,1])*100) user_ids = pd.Series(user_df['ID'][:-1]) results_dict = { 'ID': user_ids, 'Not TE prob': res_prob_0, 'TE prob': res_prob_1, 'Classification': res_label_S } results_df = pd.DataFrame(results_dict) results_df.set_index('ID',inplace=True) results_df['Classification'].replace(0,'NT',inplace=True) results_df['Classification'].replace(1,'TE',inplace=True) # Show prediction results print("\n") print("# RESULTS") print(results_df) print("\n") # Write to file and show message if out_arq: results_df.to_csv(out_arq) print(f"File saved as {out_arq}") else: print("No output file created") except ValueError: print(f"\nThere is an invalid sequece in '{inp_arq}'.\n")
en
0.705455
# Hide warning messages # Show full array and dataframe (without truncation) # Show full data frame (without truncation) # Show full arrays (without truncation) # I/O files # Check whether the element exists #tes_mdl = "models/model_var01.05.wb.hdf5" # Pre-processing functions # Fasta to data frame # Sequence onehot encoding # Sequence flattening # Input pre-processing # One hot encode and flat sequences # Remove pad_ref # Load CNN model and predict # Format results # Show prediction results # Write to file and show message
2.229855
2
tests/async/test_dialog.py
tumregels/playwright-python
2
6624566
# Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import pytest from playwright.async_api import Dialog, Page async def test_should_fire(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) assert dialog.type == "alert" assert dialog.defaultValue == "" assert dialog.message == "yo" asyncio.create_task(dialog.accept()) page.on("dialog", on_dialog) await page.evaluate("alert('yo')") assert result async def test_should_allow_accepting_prompts(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) assert dialog.type == "prompt" assert dialog.defaultValue == "yes." assert dialog.message == "question?" asyncio.create_task(dialog.accept("answer!")) page.on("dialog", on_dialog) assert await page.evaluate("prompt('question?', 'yes.')") == "answer!" assert result async def test_should_dismiss_the_prompt(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) asyncio.create_task(dialog.dismiss()) page.on("dialog", on_dialog) assert await page.evaluate("prompt('question?')") is None assert result async def test_should_accept_the_confirm_prompt(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) asyncio.create_task(dialog.accept()) page.on("dialog", on_dialog) assert await page.evaluate("confirm('boolean?')") is True assert result async def test_should_dismiss_the_confirm_prompt(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) asyncio.create_task(dialog.dismiss()) page.on("dialog", on_dialog) assert await page.evaluate("confirm('boolean?')") is False assert result # TODO: Logger support not yet here # // it.fail(CHANNEL)('should log prompt actions', async({browser}) => { # // const messages = []; # // const context = await browser.newContext({ # // logger: { # // isEnabled: () => true, # // log: (name, severity, message) => messages.push(message), # // } # // }); # // const page = await context.newPage(); # // const promise = page.evaluate(() => confirm('01234567890123456789012345678901234567890123456789012345678901234567890123456789')); # // const dialog = await page.waitForEvent('dialog'); # // expect(messages.join()).toContain('confirm "0123456789012345678901234567890123456789012345678…" was shown'); # // await dialog.accept('123'); # // await promise; # // expect(messages.join()).toContain('confirm "0123456789012345678901234567890123456789012345678…" was accepted'); # // await context.close(); # // }); @pytest.mark.skip_browser("webkit") async def test_should_be_able_to_close_context_with_open_alert(browser): context = await browser.newContext() page = await context.newPage() alertFuture = asyncio.create_task(page.waitForEvent("dialog")) await page.evaluate("() => setTimeout(() => alert('hello'), 0)", None) await alertFuture await context.close()
# Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import pytest from playwright.async_api import Dialog, Page async def test_should_fire(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) assert dialog.type == "alert" assert dialog.defaultValue == "" assert dialog.message == "yo" asyncio.create_task(dialog.accept()) page.on("dialog", on_dialog) await page.evaluate("alert('yo')") assert result async def test_should_allow_accepting_prompts(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) assert dialog.type == "prompt" assert dialog.defaultValue == "yes." assert dialog.message == "question?" asyncio.create_task(dialog.accept("answer!")) page.on("dialog", on_dialog) assert await page.evaluate("prompt('question?', 'yes.')") == "answer!" assert result async def test_should_dismiss_the_prompt(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) asyncio.create_task(dialog.dismiss()) page.on("dialog", on_dialog) assert await page.evaluate("prompt('question?')") is None assert result async def test_should_accept_the_confirm_prompt(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) asyncio.create_task(dialog.accept()) page.on("dialog", on_dialog) assert await page.evaluate("confirm('boolean?')") is True assert result async def test_should_dismiss_the_confirm_prompt(page: Page, server): result = [] def on_dialog(dialog: Dialog): result.append(True) asyncio.create_task(dialog.dismiss()) page.on("dialog", on_dialog) assert await page.evaluate("confirm('boolean?')") is False assert result # TODO: Logger support not yet here # // it.fail(CHANNEL)('should log prompt actions', async({browser}) => { # // const messages = []; # // const context = await browser.newContext({ # // logger: { # // isEnabled: () => true, # // log: (name, severity, message) => messages.push(message), # // } # // }); # // const page = await context.newPage(); # // const promise = page.evaluate(() => confirm('01234567890123456789012345678901234567890123456789012345678901234567890123456789')); # // const dialog = await page.waitForEvent('dialog'); # // expect(messages.join()).toContain('confirm "0123456789012345678901234567890123456789012345678…" was shown'); # // await dialog.accept('123'); # // await promise; # // expect(messages.join()).toContain('confirm "0123456789012345678901234567890123456789012345678…" was accepted'); # // await context.close(); # // }); @pytest.mark.skip_browser("webkit") async def test_should_be_able_to_close_context_with_open_alert(browser): context = await browser.newContext() page = await context.newPage() alertFuture = asyncio.create_task(page.waitForEvent("dialog")) await page.evaluate("() => setTimeout(() => alert('hello'), 0)", None) await alertFuture await context.close()
en
0.488724
# Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # TODO: Logger support not yet here # // it.fail(CHANNEL)('should log prompt actions', async({browser}) => { # // const messages = []; # // const context = await browser.newContext({ # // logger: { # // isEnabled: () => true, # // log: (name, severity, message) => messages.push(message), # // } # // }); # // const page = await context.newPage(); # // const promise = page.evaluate(() => confirm('01234567890123456789012345678901234567890123456789012345678901234567890123456789')); # // const dialog = await page.waitForEvent('dialog'); # // expect(messages.join()).toContain('confirm "0123456789012345678901234567890123456789012345678…" was shown'); # // await dialog.accept('123'); # // await promise; # // expect(messages.join()).toContain('confirm "0123456789012345678901234567890123456789012345678…" was accepted'); # // await context.close(); # // });
2.177548
2
Bite 1. Sum n numbers.py
Guznin/PyBites
1
6624567
""" Write a function that can sum up numbers: It should receive a list of n numbers. If no argument is provided, return sum of numbers 1..100. Look closely to the type of the function's default argument .. """ def sum_numbers(numbers=None): if numbers == None: return sum(range(101)) else: return sum(numbers)
""" Write a function that can sum up numbers: It should receive a list of n numbers. If no argument is provided, return sum of numbers 1..100. Look closely to the type of the function's default argument .. """ def sum_numbers(numbers=None): if numbers == None: return sum(range(101)) else: return sum(numbers)
en
0.719425
Write a function that can sum up numbers: It should receive a list of n numbers. If no argument is provided, return sum of numbers 1..100. Look closely to the type of the function's default argument ..
4.212989
4
123 Best Time to Buy and Sell Stock III.py
scorpionpd/LeetCode-all
0
6624568
<reponame>scorpionpd/LeetCode-all """ Say you have an array for which the ith element is the price of a given stock on day i. Design an algorithm to find the maximum profit. You may complete at most two transactions. Note: You may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again). """ __author__ = 'Danyang' class Solution: def maxProfit(self, prices): """ Algorithm: dp 1-D dp. Given an i, split the whole array into TWO parts: [0... i] and [i+1... n-1], it generates two max value based on i, max_profit[0... i] and max_profit[i+1... n-1] global_max_profit = max(max_profit[0... i] + max_profit[i+1... n-1]), for all i. :param prices: list of integers :return: integer """ if len(prices) <= 1: return 0 # O(n) using dp forward = [0 for _ in xrange(len(prices))] # forward[i] for 0..i lowest_buy_price = prices[0] for i in xrange(1, len(prices)): # if i==0: # forward[i] = 0 # else: forward[i] = max(forward[i-1], prices[i]-lowest_buy_price) lowest_buy_price = min(prices[i], lowest_buy_price) backward = [0 for _ in xrange(len(prices))] # backward[i] for i..len-1 highest_sell_price = prices[-1] for i in xrange(len(prices)-2, -1, -1): # if i==len(prices)-1: # backward[i] = 0 # else: backward[i] = max(backward[i+1], highest_sell_price-prices[i]) highest_sell_price = max(prices[i], highest_sell_price) max_profit = 0 for i in xrange(len(prices)): max_profit = max(max_profit, forward[i]+backward[i]) return max_profit def maxProfit_error(self, prices): """ 2 transactions you may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again). :param prices: list of integers :return: integer """ if len(prices) <= 1: return 0 delta_prices = [] for i in xrange(1, len(prices)): delta_prices.append(prices[i]-prices[i-1]) # O(n) max_profits = [0, 0] max_sub_array = 0 current_sub_array = 0 for j in xrange(len(delta_prices)): if current_sub_array+delta_prices[j] >= 0: current_sub_array += delta_prices[j] max_sub_array = max(max_sub_array, current_sub_array) else: # keep two 2 if max_sub_array > max_profits[0]: max_profits[1] = max_profits[0] max_profits[0] = max_sub_array elif max_sub_array > max_profits[1]: max_profits[1] = max_sub_array max_sub_array = 0 current_sub_array = 0 return sum(max_profits)
""" Say you have an array for which the ith element is the price of a given stock on day i. Design an algorithm to find the maximum profit. You may complete at most two transactions. Note: You may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again). """ __author__ = 'Danyang' class Solution: def maxProfit(self, prices): """ Algorithm: dp 1-D dp. Given an i, split the whole array into TWO parts: [0... i] and [i+1... n-1], it generates two max value based on i, max_profit[0... i] and max_profit[i+1... n-1] global_max_profit = max(max_profit[0... i] + max_profit[i+1... n-1]), for all i. :param prices: list of integers :return: integer """ if len(prices) <= 1: return 0 # O(n) using dp forward = [0 for _ in xrange(len(prices))] # forward[i] for 0..i lowest_buy_price = prices[0] for i in xrange(1, len(prices)): # if i==0: # forward[i] = 0 # else: forward[i] = max(forward[i-1], prices[i]-lowest_buy_price) lowest_buy_price = min(prices[i], lowest_buy_price) backward = [0 for _ in xrange(len(prices))] # backward[i] for i..len-1 highest_sell_price = prices[-1] for i in xrange(len(prices)-2, -1, -1): # if i==len(prices)-1: # backward[i] = 0 # else: backward[i] = max(backward[i+1], highest_sell_price-prices[i]) highest_sell_price = max(prices[i], highest_sell_price) max_profit = 0 for i in xrange(len(prices)): max_profit = max(max_profit, forward[i]+backward[i]) return max_profit def maxProfit_error(self, prices): """ 2 transactions you may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again). :param prices: list of integers :return: integer """ if len(prices) <= 1: return 0 delta_prices = [] for i in xrange(1, len(prices)): delta_prices.append(prices[i]-prices[i-1]) # O(n) max_profits = [0, 0] max_sub_array = 0 current_sub_array = 0 for j in xrange(len(delta_prices)): if current_sub_array+delta_prices[j] >= 0: current_sub_array += delta_prices[j] max_sub_array = max(max_sub_array, current_sub_array) else: # keep two 2 if max_sub_array > max_profits[0]: max_profits[1] = max_profits[0] max_profits[0] = max_sub_array elif max_sub_array > max_profits[1]: max_profits[1] = max_sub_array max_sub_array = 0 current_sub_array = 0 return sum(max_profits)
en
0.786767
Say you have an array for which the ith element is the price of a given stock on day i. Design an algorithm to find the maximum profit. You may complete at most two transactions. Note: You may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again). Algorithm: dp 1-D dp. Given an i, split the whole array into TWO parts: [0... i] and [i+1... n-1], it generates two max value based on i, max_profit[0... i] and max_profit[i+1... n-1] global_max_profit = max(max_profit[0... i] + max_profit[i+1... n-1]), for all i. :param prices: list of integers :return: integer # O(n) using dp # forward[i] for 0..i # if i==0: # forward[i] = 0 # else: # backward[i] for i..len-1 # if i==len(prices)-1: # backward[i] = 0 # else: 2 transactions you may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again). :param prices: list of integers :return: integer # O(n) # keep two 2
3.699475
4
decida-por-mim-main/main.py
RamonBomfim/decida-por-mim
0
6624569
<reponame>RamonBomfim/decida-por-mim from time import sleep import random class DecidaPorMim(): def __init__(self): self.respostas = [ 'Não faço ideia.', 'Só sei que nada sei.', 'Tá maluco?', 'Com certeza... não.', 'É claro que sim.', 'Tá pegando fogo bicho!!!', 'Oloko meu.', 'Vamoooos aplaudiiirr!', 'Loucura, loucura, loucura.', 'Robooooooooooooooooooo!', ] def Menu(self): print('* Seja muito bem-vindo ao programa "Decida por mim". *') print() print() self.iniciar = input( 'Espero que se divirta! (Digite I para iniciar) ').lower() if self.iniciar == 'i': self.Iniciar() elif self.iniciar != 'i': print('Até breve!') def Iniciar(self): self.DuvidaDoUsuario() print('Pensando...') sleep(3) self.resposta = self.EscolhendoAResposta() print(self.resposta) sleep(2) self.DesejaContinuar() def DesejaContinuar(self): self.deseja_continuar = input( 'Deseja fazer-me outra pergunta?(s/n) ').lower() if self.deseja_continuar == 's': self.Iniciar() elif self.deseja_continuar == 'n': print('Até mais!') while True: break else: print('Por favor, digite "s" para sim ou "n" para não.') self.DesejaContinuar() def DuvidaDoUsuario(self): self.duvida = input("Faça-me uma pergunta: ") def EscolhendoAResposta(self): return random.choice(self.respostas) start = DecidaPorMim() start.Menu()
from time import sleep import random class DecidaPorMim(): def __init__(self): self.respostas = [ 'Não faço ideia.', 'Só sei que nada sei.', 'Tá maluco?', 'Com certeza... não.', 'É claro que sim.', 'Tá pegando fogo bicho!!!', 'Oloko meu.', 'Vamoooos aplaudiiirr!', 'Loucura, loucura, loucura.', 'Robooooooooooooooooooo!', ] def Menu(self): print('* Seja muito bem-vindo ao programa "Decida por mim". *') print() print() self.iniciar = input( 'Espero que se divirta! (Digite I para iniciar) ').lower() if self.iniciar == 'i': self.Iniciar() elif self.iniciar != 'i': print('Até breve!') def Iniciar(self): self.DuvidaDoUsuario() print('Pensando...') sleep(3) self.resposta = self.EscolhendoAResposta() print(self.resposta) sleep(2) self.DesejaContinuar() def DesejaContinuar(self): self.deseja_continuar = input( 'Deseja fazer-me outra pergunta?(s/n) ').lower() if self.deseja_continuar == 's': self.Iniciar() elif self.deseja_continuar == 'n': print('Até mais!') while True: break else: print('Por favor, digite "s" para sim ou "n" para não.') self.DesejaContinuar() def DuvidaDoUsuario(self): self.duvida = input("Faça-me uma pergunta: ") def EscolhendoAResposta(self): return random.choice(self.respostas) start = DecidaPorMim() start.Menu()
none
1
3.963308
4
main.py
ananyahjha93/challenges-in-disentangling
12
6624570
<reponame>ananyahjha93/challenges-in-disentangling import argparse from training import training_procedure parser = argparse.ArgumentParser() # add arguments parser.add_argument('--cuda', type=bool, default=True, help="run the following code on a GPU") parser.add_argument('--batch_size', type=int, default=64, help="batch size for training") parser.add_argument('--image_size', type=int, default=28, help="height and width of the image") parser.add_argument('--num_channels', type=int, default=1, help="number of channels in the image") parser.add_argument('--initial_learning_rate', type=float, default=0.0001, help="starting learning rate") parser.add_argument('--nv_dim', type=int, default=16, help="dimension of varying factor latent space") parser.add_argument('--nc_dim', type=int, default=16, help="dimension of common factor latent space") parser.add_argument('--num_classes', type=int, default=10, help="number of classes on which the data set trained") # arguments to control per iteration training of architecture parser.add_argument('--train_discriminator', type=bool, default=True, help="flag for discriminator training") parser.add_argument('--train_generator', type=bool, default=True, help="flag for generator training") parser.add_argument('--train_auto_encoder', type=bool, default=True, help="train the auto-encoder part") parser.add_argument('--generator_times', type=int, default=2, help="number of times the generator is run") parser.add_argument('--discriminator_times', type=int, default=1, help="number of times the discriminator is run") parser.add_argument( '--discriminator_limiting_accuracy', type=float, default=0.8, help="acc. at which discriminator is stopped training" ) parser.add_argument('--beta_1', type=float, default=0.5, help="default beta_1 val for adam") parser.add_argument('--beta_2', type=float, default=0.999, help="default beta_2 val for adam") # loss function coefficient parser.add_argument('--gen_coef', type=float, default=1., help="coefficient for the adversarial loss term") parser.add_argument('--disc_coef', type=float, default=1., help="coefficient for the adversarial loss term") parser.add_argument('--decorr_coef', type=float, default=1., help="coefficient for decorrelation loss term") # paths to save models parser.add_argument('--encoder_save', type=str, default='encoder', help="model save for encoder") parser.add_argument('--decoder_save', type=str, default='decoder', help="model save for decoder") parser.add_argument('--discriminator_save', type=str, default='discriminator', help="model save for discriminator") parser.add_argument('--log_file', type=str, default='log.txt', help="text file to save training logs") parser.add_argument('--load_saved', type=bool, default=False, help="flag to indicate if a saved model will be loaded") parser.add_argument('--start_epoch', type=int, default=0, help="flag to set the starting epoch for training") parser.add_argument('--end_epoch', type=int, default=50, help="flag to indicate the final epoch of training") FLAGS = parser.parse_args() if __name__ == '__main__': training_procedure(FLAGS)
import argparse from training import training_procedure parser = argparse.ArgumentParser() # add arguments parser.add_argument('--cuda', type=bool, default=True, help="run the following code on a GPU") parser.add_argument('--batch_size', type=int, default=64, help="batch size for training") parser.add_argument('--image_size', type=int, default=28, help="height and width of the image") parser.add_argument('--num_channels', type=int, default=1, help="number of channels in the image") parser.add_argument('--initial_learning_rate', type=float, default=0.0001, help="starting learning rate") parser.add_argument('--nv_dim', type=int, default=16, help="dimension of varying factor latent space") parser.add_argument('--nc_dim', type=int, default=16, help="dimension of common factor latent space") parser.add_argument('--num_classes', type=int, default=10, help="number of classes on which the data set trained") # arguments to control per iteration training of architecture parser.add_argument('--train_discriminator', type=bool, default=True, help="flag for discriminator training") parser.add_argument('--train_generator', type=bool, default=True, help="flag for generator training") parser.add_argument('--train_auto_encoder', type=bool, default=True, help="train the auto-encoder part") parser.add_argument('--generator_times', type=int, default=2, help="number of times the generator is run") parser.add_argument('--discriminator_times', type=int, default=1, help="number of times the discriminator is run") parser.add_argument( '--discriminator_limiting_accuracy', type=float, default=0.8, help="acc. at which discriminator is stopped training" ) parser.add_argument('--beta_1', type=float, default=0.5, help="default beta_1 val for adam") parser.add_argument('--beta_2', type=float, default=0.999, help="default beta_2 val for adam") # loss function coefficient parser.add_argument('--gen_coef', type=float, default=1., help="coefficient for the adversarial loss term") parser.add_argument('--disc_coef', type=float, default=1., help="coefficient for the adversarial loss term") parser.add_argument('--decorr_coef', type=float, default=1., help="coefficient for decorrelation loss term") # paths to save models parser.add_argument('--encoder_save', type=str, default='encoder', help="model save for encoder") parser.add_argument('--decoder_save', type=str, default='decoder', help="model save for decoder") parser.add_argument('--discriminator_save', type=str, default='discriminator', help="model save for discriminator") parser.add_argument('--log_file', type=str, default='log.txt', help="text file to save training logs") parser.add_argument('--load_saved', type=bool, default=False, help="flag to indicate if a saved model will be loaded") parser.add_argument('--start_epoch', type=int, default=0, help="flag to set the starting epoch for training") parser.add_argument('--end_epoch', type=int, default=50, help="flag to indicate the final epoch of training") FLAGS = parser.parse_args() if __name__ == '__main__': training_procedure(FLAGS)
en
0.637208
# add arguments # arguments to control per iteration training of architecture # loss function coefficient # paths to save models
2.72939
3
authors/apps/articles/tests/test_bookmarks.py
andela/Ah-backend-valkyrie
0
6624571
from django.test import TestCase from rest_framework.reverse import reverse from rest_framework import status from authors.apps.authentication.tests.base import BaseTestMethods from authors.apps.articles.models import Article from authors.apps.authentication.models import User class BookmarkTestCase(BaseTestMethods): def test_bookmark_article(self): # bookmark article response = self.bookmark_article() self.assertEqual(response.status_code, status.HTTP_201_CREATED ) self.assertEqual(response.data['article']['slug'], "test-article-today") def test_bookmark_article_with_wrong_slug(self): # create article self.create_article() article_slug = "whatsup" url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND ) self.assertEqual(response.data['detail'], "Not found.") def test_user_cannot_bookmark_again(self): # create article article = self.create_article() article_slug = article.data['slug'] url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE ) self.assertEqual(response.data['message'], "You cannot bookmark the same article twice") def test_unbookmark_article(self): # bookmark article response = self.bookmark_article() bookmark_id = response.data['id'] article_slug = self.create_article().data['slug'] # unbookmark article url = reverse("articles:unbookmark-articles", args=[article_slug, bookmark_id ]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.delete(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK ) self.assertEqual(response.data.get('status'), "unbookmarked") def test_unbookmark_not_existant_bookmark(self): # bookmark article self.bookmark_article() bookmark_id = 10 article_slug = self.create_article().data['slug'] # unbookmark article url = reverse("articles:unbookmark-articles", args=[article_slug, bookmark_id ]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.delete(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND ) def test_unbookmark_not_existant_article(self): # bookmark article response = self.bookmark_article() bookmark_id = response.data['id'] article_slug = "whatsup" # unbookmark article url = reverse("articles:unbookmark-articles", args=[article_slug, bookmark_id ]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.delete(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND ) self.assertEqual(response.data['detail'], "Not found.") def test_get_bookmarks(self): article = self.create_article() article_slug = article.data['slug'] # bookmark article url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') # get article url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.get(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK ) self.assertEqual(response.data['results'][0]['article']['slug'], "test-article-today") def test_get_bookmark(self): article = self.create_article() article_slug = article.data['slug'] # bookmark article url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') bookmark_id = response.data['id'] # get article url = reverse("articles:get-bookmark-articles", args=[bookmark_id]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.get(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK ) self.assertEqual(response.data['article']['slug'], "test-article-today")
from django.test import TestCase from rest_framework.reverse import reverse from rest_framework import status from authors.apps.authentication.tests.base import BaseTestMethods from authors.apps.articles.models import Article from authors.apps.authentication.models import User class BookmarkTestCase(BaseTestMethods): def test_bookmark_article(self): # bookmark article response = self.bookmark_article() self.assertEqual(response.status_code, status.HTTP_201_CREATED ) self.assertEqual(response.data['article']['slug'], "test-article-today") def test_bookmark_article_with_wrong_slug(self): # create article self.create_article() article_slug = "whatsup" url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND ) self.assertEqual(response.data['detail'], "Not found.") def test_user_cannot_bookmark_again(self): # create article article = self.create_article() article_slug = article.data['slug'] url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE ) self.assertEqual(response.data['message'], "You cannot bookmark the same article twice") def test_unbookmark_article(self): # bookmark article response = self.bookmark_article() bookmark_id = response.data['id'] article_slug = self.create_article().data['slug'] # unbookmark article url = reverse("articles:unbookmark-articles", args=[article_slug, bookmark_id ]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.delete(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK ) self.assertEqual(response.data.get('status'), "unbookmarked") def test_unbookmark_not_existant_bookmark(self): # bookmark article self.bookmark_article() bookmark_id = 10 article_slug = self.create_article().data['slug'] # unbookmark article url = reverse("articles:unbookmark-articles", args=[article_slug, bookmark_id ]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.delete(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND ) def test_unbookmark_not_existant_article(self): # bookmark article response = self.bookmark_article() bookmark_id = response.data['id'] article_slug = "whatsup" # unbookmark article url = reverse("articles:unbookmark-articles", args=[article_slug, bookmark_id ]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.delete(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND ) self.assertEqual(response.data['detail'], "Not found.") def test_get_bookmarks(self): article = self.create_article() article_slug = article.data['slug'] # bookmark article url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') # get article url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.get(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK ) self.assertEqual(response.data['results'][0]['article']['slug'], "test-article-today") def test_get_bookmark(self): article = self.create_article() article_slug = article.data['slug'] # bookmark article url = reverse("articles:bookmark-articles", args=[article_slug]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.post(url, data=self.article, format='json') bookmark_id = response.data['id'] # get article url = reverse("articles:get-bookmark-articles", args=[bookmark_id]) self.client.credentials( HTTP_AUTHORIZATION='Bearer ' + self.get_user_token()) response = self.client.get(url, data=self.article, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK ) self.assertEqual(response.data['article']['slug'], "test-article-today")
en
0.548345
# bookmark article # create article # create article # bookmark article # unbookmark article # bookmark article # unbookmark article # bookmark article # unbookmark article # bookmark article # get article # bookmark article # get article
2.328284
2
tests/test_realtime.py
xqdzn/pyzatt
69
6624572
<reponame>xqdzn/pyzatt<gh_stars>10-100 #!/usr/bin/env python import pytest import pyzatt.misc as misc import pyzatt.pyzatt as pyzatt import pyzatt.zkmodules.defs as DEFS """ Test script to test/show parsing functions of the realtime spec/lib. WARNING: Apply this test to devices that aren't under current use, if a deployed device is used, remember to upload the data to the device(Sync) using the ZKAccess software, that will overwrite any changes made by the script. Author: <NAME> <<EMAIL>> """ @pytest.mark.skip(reason="manual test") @pytest.mark.manual def test_realtime(parse_options): assert parse_options, "Invalid run settings" opts = parse_options ip_address = opts['ip-address'] # set the ip address of the device to test machine_port = 4370 z = pyzatt.ZKSS() misc.print_header("TEST OF REALTIME FUNCTIONS") # connection misc.print_header("1.Realtime Test") z.connect_net(ip_address, machine_port) # read user ids z.disable_device() z.read_all_user_id() z.enable_device() # enable the report of rt packets z.enable_realtime() misc.print_info("Ready to receive events from the machine") try: while True: # wait for event z.recv_event() ev = z.get_last_event() # process the event print("\n"+"#"*50) print("Received event") if ev == DEFS.EF_ALARM: print("EF_ALARM:") alarm_code = z.parse_alarm_type() # check alarm source if alarm_code == 0x3A: # misoperation print("Misoperation alarm!") elif alarm_code == 0x37: # tamper print("Tampering alarm!") elif alarm_code == 0x35: # exit button print("Exit button pressed!") elif alarm_code == 0x54: # door is closing print("Door is closing") elif alarm_code == 0xffffffff: # duress alarm durr_type = z.parse_duress_alarm()[0] if durr_type == 0x20: print("Duress alarm!") print("User index: %s, matching type: %i" % tuple(z.parse_duress_alarm()[1:])) elif durr_type == 0x22: print("Passback alarm!") else: print("Unknown duress alarm") else: print("Unknown alarm") elif ev == DEFS.EF_ATTLOG: print("EF_ATTLOG: New attendance entry") print("User id: %s, verify type %i, date: %s" % tuple(z.parse_event_attlog())) elif ev == DEFS.EF_FINGER: print("EF_FINGER: Finger placed on reader") elif ev == DEFS.EF_ENROLLUSER: print("EF_ENROLLUSER: Enrolled user") elif ev == DEFS.EF_ENROLLFINGER: print("EF_ENROLLFINGER: Enroll finger finished") print("Successful: %s, user ID: %s, finger index: %s, " "size fp template: %i" % tuple(z.parse_event_enroll_fp())) elif ev == DEFS.EF_BUTTON: print("EF_BUTTON: Pressed button") elif ev == DEFS.EF_UNLOCK: print("EF_UNLOCK: Unlock event") elif ev == DEFS.EF_VERIFY: print("EF_VERIFY: Verified user") user_sn = z.parse_verify_event() if user_sn == 0xffffffff: user_id = '-1' else: user_id = z.users[user_sn].user_id print("User id: %s" % user_id) elif ev == DEFS.EF_FPFTR: print("EF_FPFTR: ") print("Score: %i" % z.parse_score_fp_event()) else: print("Unknown event:") misc.print_hex(z.get_last_packet()) except KeyboardInterrupt: misc.print_info("\nExiting...") z.disconnect() if __name__ == "__main__": print("running manual test") opts = {'ip-address': "192.168.1.201"} test_realtime(opts)
#!/usr/bin/env python import pytest import pyzatt.misc as misc import pyzatt.pyzatt as pyzatt import pyzatt.zkmodules.defs as DEFS """ Test script to test/show parsing functions of the realtime spec/lib. WARNING: Apply this test to devices that aren't under current use, if a deployed device is used, remember to upload the data to the device(Sync) using the ZKAccess software, that will overwrite any changes made by the script. Author: <NAME> <<EMAIL>> """ @pytest.mark.skip(reason="manual test") @pytest.mark.manual def test_realtime(parse_options): assert parse_options, "Invalid run settings" opts = parse_options ip_address = opts['ip-address'] # set the ip address of the device to test machine_port = 4370 z = pyzatt.ZKSS() misc.print_header("TEST OF REALTIME FUNCTIONS") # connection misc.print_header("1.Realtime Test") z.connect_net(ip_address, machine_port) # read user ids z.disable_device() z.read_all_user_id() z.enable_device() # enable the report of rt packets z.enable_realtime() misc.print_info("Ready to receive events from the machine") try: while True: # wait for event z.recv_event() ev = z.get_last_event() # process the event print("\n"+"#"*50) print("Received event") if ev == DEFS.EF_ALARM: print("EF_ALARM:") alarm_code = z.parse_alarm_type() # check alarm source if alarm_code == 0x3A: # misoperation print("Misoperation alarm!") elif alarm_code == 0x37: # tamper print("Tampering alarm!") elif alarm_code == 0x35: # exit button print("Exit button pressed!") elif alarm_code == 0x54: # door is closing print("Door is closing") elif alarm_code == 0xffffffff: # duress alarm durr_type = z.parse_duress_alarm()[0] if durr_type == 0x20: print("Duress alarm!") print("User index: %s, matching type: %i" % tuple(z.parse_duress_alarm()[1:])) elif durr_type == 0x22: print("Passback alarm!") else: print("Unknown duress alarm") else: print("Unknown alarm") elif ev == DEFS.EF_ATTLOG: print("EF_ATTLOG: New attendance entry") print("User id: %s, verify type %i, date: %s" % tuple(z.parse_event_attlog())) elif ev == DEFS.EF_FINGER: print("EF_FINGER: Finger placed on reader") elif ev == DEFS.EF_ENROLLUSER: print("EF_ENROLLUSER: Enrolled user") elif ev == DEFS.EF_ENROLLFINGER: print("EF_ENROLLFINGER: Enroll finger finished") print("Successful: %s, user ID: %s, finger index: %s, " "size fp template: %i" % tuple(z.parse_event_enroll_fp())) elif ev == DEFS.EF_BUTTON: print("EF_BUTTON: Pressed button") elif ev == DEFS.EF_UNLOCK: print("EF_UNLOCK: Unlock event") elif ev == DEFS.EF_VERIFY: print("EF_VERIFY: Verified user") user_sn = z.parse_verify_event() if user_sn == 0xffffffff: user_id = '-1' else: user_id = z.users[user_sn].user_id print("User id: %s" % user_id) elif ev == DEFS.EF_FPFTR: print("EF_FPFTR: ") print("Score: %i" % z.parse_score_fp_event()) else: print("Unknown event:") misc.print_hex(z.get_last_packet()) except KeyboardInterrupt: misc.print_info("\nExiting...") z.disconnect() if __name__ == "__main__": print("running manual test") opts = {'ip-address': "192.168.1.201"} test_realtime(opts)
en
0.839684
#!/usr/bin/env python Test script to test/show parsing functions of the realtime spec/lib. WARNING: Apply this test to devices that aren't under current use, if a deployed device is used, remember to upload the data to the device(Sync) using the ZKAccess software, that will overwrite any changes made by the script. Author: <NAME> <<EMAIL>> # set the ip address of the device to test # connection # read user ids # enable the report of rt packets # wait for event # process the event # check alarm source # misoperation # tamper # exit button # door is closing # duress alarm
2.272666
2
robot-server/tests/runs/test_run_data_manager.py
Opentrons/protocol_framework
0
6624573
<reponame>Opentrons/protocol_framework<filename>robot-server/tests/runs/test_run_data_manager.py """Tests for RunDataManager.""" from typing import Optional import pytest from datetime import datetime from decoy import Decoy, matchers from opentrons.types import DeckSlotName from opentrons.protocol_runner import ProtocolRunResult from opentrons.protocol_engine import ( EngineStatus, StateSummary, commands, types as pe_types, CommandSlice, CurrentCommand, ErrorOccurrence, LoadedLabware, LoadedPipette, LoadedModule, LabwareOffset, ) from robot_server.protocols import ProtocolResource from robot_server.runs.engine_store import EngineStore, EngineConflictError from robot_server.runs.run_data_manager import RunDataManager, RunNotCurrentError from robot_server.runs.run_models import Run from robot_server.runs.run_store import ( RunStore, RunResource, RunNotFoundError, CommandNotFoundError, ) from robot_server.service.task_runner import TaskRunner @pytest.fixture def mock_engine_store(decoy: Decoy) -> EngineStore: """Get a mock EngineStore.""" mock = decoy.mock(cls=EngineStore) decoy.when(mock.current_run_id).then_return(None) return mock @pytest.fixture def mock_run_store(decoy: Decoy) -> RunStore: """Get a mock RunStore.""" return decoy.mock(cls=RunStore) @pytest.fixture() def mock_task_runner(decoy: Decoy) -> TaskRunner: """Get a mock background TaskRunner.""" return decoy.mock(cls=TaskRunner) @pytest.fixture def engine_state_summary() -> StateSummary: """Get a StateSummary value object.""" return StateSummary( status=EngineStatus.IDLE, errors=[ErrorOccurrence.construct(id="some-error-id")], # type: ignore[call-arg] labware=[LoadedLabware.construct(id="some-labware-id")], # type: ignore[call-arg] labwareOffsets=[LabwareOffset.construct(id="some-labware-offset-id")], # type: ignore[call-arg] pipettes=[LoadedPipette.construct(id="some-pipette-id")], # type: ignore[call-arg] modules=[LoadedModule.construct(id="some-module-id")], # type: ignore[call-arg] ) @pytest.fixture def run_resource() -> RunResource: """Get a StateSummary value object.""" return RunResource( run_id="hello from the other side", protocol_id=None, created_at=datetime(year=2022, month=2, day=2), actions=[], ) @pytest.fixture def run_command() -> commands.Command: """Get a ProtocolEngine Command value object.""" return commands.WaitForResume( id="command-id", key="command-key", createdAt=datetime(year=2021, month=1, day=1), status=commands.CommandStatus.SUCCEEDED, params=commands.WaitForResumeParams(message="Hello"), ) @pytest.fixture def subject( mock_engine_store: EngineStore, mock_run_store: RunStore, mock_task_runner: TaskRunner, ) -> RunDataManager: """Get a RunDataManager test subject.""" return RunDataManager( engine_store=mock_engine_store, run_store=mock_run_store, task_runner=mock_task_runner, ) async def test_create( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should create an engine and a persisted run resource.""" run_id = "hello world" created_at = datetime(year=2021, month=1, day=1) decoy.when( await mock_engine_store.create(run_id=run_id, labware_offsets=[], protocol=None) ).then_return(engine_state_summary) decoy.when( mock_run_store.insert( run_id=run_id, protocol_id=None, created_at=created_at, ) ).then_return(run_resource) result = await subject.create( run_id=run_id, created_at=created_at, labware_offsets=[], protocol=None, ) assert result == Run( id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, current=True, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_create_with_options( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should handle creation with a protocol and labware offsets.""" run_id = "hello world" created_at = datetime(year=2021, month=1, day=1) protocol = ProtocolResource( protocol_id="protocol-id", created_at=datetime(year=2022, month=2, day=2), source=None, # type: ignore[arg-type] protocol_key=None, ) labware_offset = pe_types.LabwareOffsetCreate( definitionUri="namespace/load_name/version", location=pe_types.LabwareOffsetLocation(slotName=DeckSlotName.SLOT_5), vector=pe_types.LabwareOffsetVector(x=1, y=2, z=3), ) decoy.when( await mock_engine_store.create( run_id=run_id, labware_offsets=[labware_offset], protocol=protocol, ) ).then_return(engine_state_summary) decoy.when( mock_run_store.insert( run_id=run_id, protocol_id="protocol-id", created_at=created_at, ) ).then_return(run_resource) result = await subject.create( run_id=run_id, created_at=created_at, labware_offsets=[labware_offset], protocol=protocol, ) assert result == Run( id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, current=True, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_create_engine_error( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should not create a resource if engine creation fails.""" run_id = "hello world" created_at = datetime(year=2021, month=1, day=1) decoy.when( await mock_engine_store.create(run_id, labware_offsets=[], protocol=None) ).then_raise(EngineConflictError("oh no")) with pytest.raises(EngineConflictError): await subject.create( run_id=run_id, created_at=created_at, labware_offsets=[], protocol=None, ) decoy.verify( mock_run_store.insert( run_id=run_id, created_at=matchers.Anything(), protocol_id=matchers.Anything(), ), times=0, ) async def test_get_current_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should get the current run from the engine.""" run_id = "hello world" decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(mock_engine_store.engine.state_view.get_summary()).then_return( engine_state_summary ) result = subject.get(run_id=run_id) assert result == Run( current=True, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) assert subject.current_run_id == run_id async def test_get_historical_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should get a historical run from the store.""" run_id = "hello world" decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) decoy.when(mock_run_store.get_state_summary(run_id=run_id)).then_return( engine_state_summary ) decoy.when(mock_engine_store.current_run_id).then_return("some other id") result = subject.get(run_id=run_id) assert result == Run( current=False, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_get_historical_run_no_data( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, run_resource: RunResource, ) -> None: """It should get a historical run from the store.""" run_id = "hello world" decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) decoy.when(mock_run_store.get_state_summary(run_id=run_id)).then_return(None) decoy.when(mock_engine_store.current_run_id).then_return("some other id") result = subject.get(run_id=run_id) assert result == Run( current=False, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=EngineStatus.STOPPED, errors=[], labware=[], labwareOffsets=[], pipettes=[], modules=[], ) async def test_get_all_runs( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should get all runs, including current and historical.""" current_run_data = StateSummary( status=EngineStatus.IDLE, errors=[ErrorOccurrence.construct(id="current-error-id")], # type: ignore[call-arg] labware=[LoadedLabware.construct(id="current-labware-id")], # type: ignore[call-arg] labwareOffsets=[LabwareOffset.construct(id="current-labware-offset-id")], # type: ignore[call-arg] pipettes=[LoadedPipette.construct(id="current-pipette-id")], # type: ignore[call-arg] modules=[LoadedModule.construct(id="current-module-id")], # type: ignore[call-arg] ) historical_run_data = StateSummary( status=EngineStatus.STOPPED, errors=[ErrorOccurrence.construct(id="old-error-id")], # type: ignore[call-arg] labware=[LoadedLabware.construct(id="old-labware-id")], # type: ignore[call-arg] labwareOffsets=[LabwareOffset.construct(id="old-labware-offset-id")], # type: ignore[call-arg] pipettes=[LoadedPipette.construct(id="old-pipette-id")], # type: ignore[call-arg] modules=[LoadedModule.construct(id="old-module-id")], # type: ignore[call-arg] ) current_run_resource = RunResource( run_id="current-run", protocol_id=None, created_at=datetime(year=2022, month=2, day=2), actions=[], ) historical_run_resource = RunResource( run_id="historical-run", protocol_id=None, created_at=datetime(year=2023, month=3, day=3), actions=[], ) decoy.when(mock_engine_store.current_run_id).then_return("current-run") decoy.when(mock_engine_store.engine.state_view.get_summary()).then_return( current_run_data ) decoy.when(mock_run_store.get_state_summary("historical-run")).then_return( historical_run_data ) decoy.when(mock_run_store.get_all()).then_return( [historical_run_resource, current_run_resource] ) result = subject.get_all() assert result == [ Run( current=False, id=historical_run_resource.run_id, protocolId=historical_run_resource.protocol_id, createdAt=historical_run_resource.created_at, actions=historical_run_resource.actions, status=historical_run_data.status, errors=historical_run_data.errors, labware=historical_run_data.labware, labwareOffsets=historical_run_data.labwareOffsets, pipettes=historical_run_data.pipettes, modules=historical_run_data.modules, ), Run( current=True, id=current_run_resource.run_id, protocolId=current_run_resource.protocol_id, createdAt=current_run_resource.created_at, actions=current_run_resource.actions, status=current_run_data.status, errors=current_run_data.errors, labware=current_run_data.labware, labwareOffsets=current_run_data.labwareOffsets, pipettes=current_run_data.pipettes, modules=current_run_data.modules, ), ] async def test_delete_current_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should delete the current run from the engine.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) await subject.delete(run_id=run_id) decoy.verify(await mock_engine_store.clear(), times=1) decoy.verify(mock_run_store.remove(run_id=run_id), times=0) async def test_delete_historical_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should delete a historical run from the store.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return("some other id") await subject.delete(run_id=run_id) decoy.verify(await mock_engine_store.clear(), times=0) decoy.verify(mock_run_store.remove(run_id=run_id), times=1) async def test_update_current( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should persist the current run and clear the engine on current=false.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(await mock_engine_store.clear()).then_return( ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( mock_run_store.update_run_state( run_id=run_id, summary=engine_state_summary, commands=[run_command], ) ).then_return(run_resource) result = await subject.update(run_id=run_id, current=False) assert result == Run( current=False, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) @pytest.mark.parametrize("current", [None, True]) async def test_update_current_noop( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, current: Optional[bool], ) -> None: """It should noop on current=None and current=True.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(mock_engine_store.engine.state_view.get_summary()).then_return( engine_state_summary ) decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) result = await subject.update(run_id=run_id, current=current) decoy.verify(await mock_engine_store.clear(), times=0) decoy.verify( mock_run_store.update_run_state( run_id=run_id, summary=matchers.Anything(), commands=matchers.Anything(), ), times=0, ) assert result == Run( current=True, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_update_current_not_allowed( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should noop on current=None.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return("some other id") with pytest.raises(RunNotCurrentError): await subject.update(run_id=run_id, current=False) async def test_create_archives_existing( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should persist the previously current run when a new run is created.""" run_id_old = "hello world" run_id_new = "hello is it me you're looking for" decoy.when(mock_engine_store.current_run_id).then_return(run_id_old) decoy.when(await mock_engine_store.clear()).then_return( ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( await mock_engine_store.create( run_id=run_id_new, labware_offsets=[], protocol=None, ) ).then_return(engine_state_summary) decoy.when( mock_run_store.insert( run_id=run_id_new, created_at=datetime(year=2021, month=1, day=1), protocol_id=None, ) ).then_return(run_resource) await subject.create( run_id=run_id_new, created_at=datetime(year=2021, month=1, day=1), labware_offsets=[], protocol=None, ) decoy.verify( mock_run_store.update_run_state( run_id=run_id_old, summary=engine_state_summary, commands=[run_command], ) ) def test_get_commands_slice_from_db( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, run_command: commands.Command, ) -> None: """Should get a sliced command list from run store.""" expected_commands_result = [ commands.WaitForResume( id="command-id-2", key="command-key", createdAt=datetime(year=2021, month=1, day=1), status=commands.CommandStatus.SUCCEEDED, params=commands.WaitForResumeParams(message="Hello"), ), run_command, ] expected_command_slice = CommandSlice( commands=expected_commands_result, cursor=1, total_length=3 ) decoy.when( mock_run_store.get_commands_slice(run_id="run_id", cursor=1, length=2) ).then_return(expected_command_slice) result = subject.get_commands_slice(run_id="run_id", cursor=1, length=2) assert expected_command_slice == result def test_get_commands_slice_current_run( decoy: Decoy, subject: RunDataManager, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get a sliced command list from engine store.""" expected_commands_result = [ commands.WaitForResume( id="command-id-2", key="command-key", createdAt=datetime(year=2021, month=1, day=1), status=commands.CommandStatus.SUCCEEDED, params=commands.WaitForResumeParams(message="Hello"), ), run_command, ] expected_command_slice = CommandSlice( commands=expected_commands_result, cursor=1, total_length=3 ) decoy.when(mock_engine_store.current_run_id).then_return("run-id") decoy.when( mock_engine_store.engine.state_view.commands.get_slice(1, 2) ).then_return(expected_command_slice) result = subject.get_commands_slice("run-id", 1, 2) assert expected_command_slice == result def test_get_commands_slice_from_db_run_not_found( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore ) -> None: """Should get a sliced command list from run store.""" decoy.when( mock_run_store.get_commands_slice(run_id="run-id", cursor=1, length=2) ).then_raise(RunNotFoundError(run_id="run-id")) with pytest.raises(RunNotFoundError): subject.get_commands_slice(run_id="run-id", cursor=1, length=2) def test_get_current_command( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get current command from engine store.""" expected_current = CurrentCommand( command_id=run_command.id, command_key=run_command.key, created_at=run_command.createdAt, index=0, ) decoy.when(mock_engine_store.current_run_id).then_return("run-id") decoy.when(mock_engine_store.engine.state_view.commands.get_current()).then_return( expected_current ) result = subject.get_current_command("run-id") assert result == expected_current def test_get_current_command_not_current_run( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, ) -> None: """Should return None because the run is not current.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") result = subject.get_current_command("run-id") assert result is None def test_get_command_from_engine( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("run-id") decoy.when( mock_engine_store.engine.state_view.commands.get("command-id") ).then_return(run_command) result = subject.get_command("run-id", "command-id") assert result == run_command def test_get_command_from_db( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") decoy.when( mock_run_store.get_command(run_id="run-id", command_id="command-id") ).then_return(run_command) result = subject.get_command("run-id", "command-id") assert result == run_command def test_get_command_from_db_run_not_found( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") decoy.when( mock_run_store.get_command(run_id="run-id", command_id="command-id") ).then_raise(RunNotFoundError("run-id")) with pytest.raises(RunNotFoundError): subject.get_command("run-id", "command-id") def test_get_command_from_db_command_not_found( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") decoy.when( mock_run_store.get_command(run_id="run-id", command_id="command-id") ).then_raise(CommandNotFoundError(command_id="command-id")) with pytest.raises(CommandNotFoundError): subject.get_command("run-id", "command-id")
"""Tests for RunDataManager.""" from typing import Optional import pytest from datetime import datetime from decoy import Decoy, matchers from opentrons.types import DeckSlotName from opentrons.protocol_runner import ProtocolRunResult from opentrons.protocol_engine import ( EngineStatus, StateSummary, commands, types as pe_types, CommandSlice, CurrentCommand, ErrorOccurrence, LoadedLabware, LoadedPipette, LoadedModule, LabwareOffset, ) from robot_server.protocols import ProtocolResource from robot_server.runs.engine_store import EngineStore, EngineConflictError from robot_server.runs.run_data_manager import RunDataManager, RunNotCurrentError from robot_server.runs.run_models import Run from robot_server.runs.run_store import ( RunStore, RunResource, RunNotFoundError, CommandNotFoundError, ) from robot_server.service.task_runner import TaskRunner @pytest.fixture def mock_engine_store(decoy: Decoy) -> EngineStore: """Get a mock EngineStore.""" mock = decoy.mock(cls=EngineStore) decoy.when(mock.current_run_id).then_return(None) return mock @pytest.fixture def mock_run_store(decoy: Decoy) -> RunStore: """Get a mock RunStore.""" return decoy.mock(cls=RunStore) @pytest.fixture() def mock_task_runner(decoy: Decoy) -> TaskRunner: """Get a mock background TaskRunner.""" return decoy.mock(cls=TaskRunner) @pytest.fixture def engine_state_summary() -> StateSummary: """Get a StateSummary value object.""" return StateSummary( status=EngineStatus.IDLE, errors=[ErrorOccurrence.construct(id="some-error-id")], # type: ignore[call-arg] labware=[LoadedLabware.construct(id="some-labware-id")], # type: ignore[call-arg] labwareOffsets=[LabwareOffset.construct(id="some-labware-offset-id")], # type: ignore[call-arg] pipettes=[LoadedPipette.construct(id="some-pipette-id")], # type: ignore[call-arg] modules=[LoadedModule.construct(id="some-module-id")], # type: ignore[call-arg] ) @pytest.fixture def run_resource() -> RunResource: """Get a StateSummary value object.""" return RunResource( run_id="hello from the other side", protocol_id=None, created_at=datetime(year=2022, month=2, day=2), actions=[], ) @pytest.fixture def run_command() -> commands.Command: """Get a ProtocolEngine Command value object.""" return commands.WaitForResume( id="command-id", key="command-key", createdAt=datetime(year=2021, month=1, day=1), status=commands.CommandStatus.SUCCEEDED, params=commands.WaitForResumeParams(message="Hello"), ) @pytest.fixture def subject( mock_engine_store: EngineStore, mock_run_store: RunStore, mock_task_runner: TaskRunner, ) -> RunDataManager: """Get a RunDataManager test subject.""" return RunDataManager( engine_store=mock_engine_store, run_store=mock_run_store, task_runner=mock_task_runner, ) async def test_create( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should create an engine and a persisted run resource.""" run_id = "hello world" created_at = datetime(year=2021, month=1, day=1) decoy.when( await mock_engine_store.create(run_id=run_id, labware_offsets=[], protocol=None) ).then_return(engine_state_summary) decoy.when( mock_run_store.insert( run_id=run_id, protocol_id=None, created_at=created_at, ) ).then_return(run_resource) result = await subject.create( run_id=run_id, created_at=created_at, labware_offsets=[], protocol=None, ) assert result == Run( id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, current=True, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_create_with_options( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should handle creation with a protocol and labware offsets.""" run_id = "hello world" created_at = datetime(year=2021, month=1, day=1) protocol = ProtocolResource( protocol_id="protocol-id", created_at=datetime(year=2022, month=2, day=2), source=None, # type: ignore[arg-type] protocol_key=None, ) labware_offset = pe_types.LabwareOffsetCreate( definitionUri="namespace/load_name/version", location=pe_types.LabwareOffsetLocation(slotName=DeckSlotName.SLOT_5), vector=pe_types.LabwareOffsetVector(x=1, y=2, z=3), ) decoy.when( await mock_engine_store.create( run_id=run_id, labware_offsets=[labware_offset], protocol=protocol, ) ).then_return(engine_state_summary) decoy.when( mock_run_store.insert( run_id=run_id, protocol_id="protocol-id", created_at=created_at, ) ).then_return(run_resource) result = await subject.create( run_id=run_id, created_at=created_at, labware_offsets=[labware_offset], protocol=protocol, ) assert result == Run( id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, current=True, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_create_engine_error( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should not create a resource if engine creation fails.""" run_id = "hello world" created_at = datetime(year=2021, month=1, day=1) decoy.when( await mock_engine_store.create(run_id, labware_offsets=[], protocol=None) ).then_raise(EngineConflictError("oh no")) with pytest.raises(EngineConflictError): await subject.create( run_id=run_id, created_at=created_at, labware_offsets=[], protocol=None, ) decoy.verify( mock_run_store.insert( run_id=run_id, created_at=matchers.Anything(), protocol_id=matchers.Anything(), ), times=0, ) async def test_get_current_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should get the current run from the engine.""" run_id = "hello world" decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(mock_engine_store.engine.state_view.get_summary()).then_return( engine_state_summary ) result = subject.get(run_id=run_id) assert result == Run( current=True, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) assert subject.current_run_id == run_id async def test_get_historical_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, engine_state_summary: StateSummary, run_resource: RunResource, ) -> None: """It should get a historical run from the store.""" run_id = "hello world" decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) decoy.when(mock_run_store.get_state_summary(run_id=run_id)).then_return( engine_state_summary ) decoy.when(mock_engine_store.current_run_id).then_return("some other id") result = subject.get(run_id=run_id) assert result == Run( current=False, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_get_historical_run_no_data( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, run_resource: RunResource, ) -> None: """It should get a historical run from the store.""" run_id = "hello world" decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) decoy.when(mock_run_store.get_state_summary(run_id=run_id)).then_return(None) decoy.when(mock_engine_store.current_run_id).then_return("some other id") result = subject.get(run_id=run_id) assert result == Run( current=False, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=EngineStatus.STOPPED, errors=[], labware=[], labwareOffsets=[], pipettes=[], modules=[], ) async def test_get_all_runs( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should get all runs, including current and historical.""" current_run_data = StateSummary( status=EngineStatus.IDLE, errors=[ErrorOccurrence.construct(id="current-error-id")], # type: ignore[call-arg] labware=[LoadedLabware.construct(id="current-labware-id")], # type: ignore[call-arg] labwareOffsets=[LabwareOffset.construct(id="current-labware-offset-id")], # type: ignore[call-arg] pipettes=[LoadedPipette.construct(id="current-pipette-id")], # type: ignore[call-arg] modules=[LoadedModule.construct(id="current-module-id")], # type: ignore[call-arg] ) historical_run_data = StateSummary( status=EngineStatus.STOPPED, errors=[ErrorOccurrence.construct(id="old-error-id")], # type: ignore[call-arg] labware=[LoadedLabware.construct(id="old-labware-id")], # type: ignore[call-arg] labwareOffsets=[LabwareOffset.construct(id="old-labware-offset-id")], # type: ignore[call-arg] pipettes=[LoadedPipette.construct(id="old-pipette-id")], # type: ignore[call-arg] modules=[LoadedModule.construct(id="old-module-id")], # type: ignore[call-arg] ) current_run_resource = RunResource( run_id="current-run", protocol_id=None, created_at=datetime(year=2022, month=2, day=2), actions=[], ) historical_run_resource = RunResource( run_id="historical-run", protocol_id=None, created_at=datetime(year=2023, month=3, day=3), actions=[], ) decoy.when(mock_engine_store.current_run_id).then_return("current-run") decoy.when(mock_engine_store.engine.state_view.get_summary()).then_return( current_run_data ) decoy.when(mock_run_store.get_state_summary("historical-run")).then_return( historical_run_data ) decoy.when(mock_run_store.get_all()).then_return( [historical_run_resource, current_run_resource] ) result = subject.get_all() assert result == [ Run( current=False, id=historical_run_resource.run_id, protocolId=historical_run_resource.protocol_id, createdAt=historical_run_resource.created_at, actions=historical_run_resource.actions, status=historical_run_data.status, errors=historical_run_data.errors, labware=historical_run_data.labware, labwareOffsets=historical_run_data.labwareOffsets, pipettes=historical_run_data.pipettes, modules=historical_run_data.modules, ), Run( current=True, id=current_run_resource.run_id, protocolId=current_run_resource.protocol_id, createdAt=current_run_resource.created_at, actions=current_run_resource.actions, status=current_run_data.status, errors=current_run_data.errors, labware=current_run_data.labware, labwareOffsets=current_run_data.labwareOffsets, pipettes=current_run_data.pipettes, modules=current_run_data.modules, ), ] async def test_delete_current_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should delete the current run from the engine.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) await subject.delete(run_id=run_id) decoy.verify(await mock_engine_store.clear(), times=1) decoy.verify(mock_run_store.remove(run_id=run_id), times=0) async def test_delete_historical_run( decoy: Decoy, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should delete a historical run from the store.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return("some other id") await subject.delete(run_id=run_id) decoy.verify(await mock_engine_store.clear(), times=0) decoy.verify(mock_run_store.remove(run_id=run_id), times=1) async def test_update_current( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should persist the current run and clear the engine on current=false.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(await mock_engine_store.clear()).then_return( ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( mock_run_store.update_run_state( run_id=run_id, summary=engine_state_summary, commands=[run_command], ) ).then_return(run_resource) result = await subject.update(run_id=run_id, current=False) assert result == Run( current=False, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) @pytest.mark.parametrize("current", [None, True]) async def test_update_current_noop( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, current: Optional[bool], ) -> None: """It should noop on current=None and current=True.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(mock_engine_store.engine.state_view.get_summary()).then_return( engine_state_summary ) decoy.when(mock_run_store.get(run_id=run_id)).then_return(run_resource) result = await subject.update(run_id=run_id, current=current) decoy.verify(await mock_engine_store.clear(), times=0) decoy.verify( mock_run_store.update_run_state( run_id=run_id, summary=matchers.Anything(), commands=matchers.Anything(), ), times=0, ) assert result == Run( current=True, id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, actions=run_resource.actions, status=engine_state_summary.status, errors=engine_state_summary.errors, labware=engine_state_summary.labware, labwareOffsets=engine_state_summary.labwareOffsets, pipettes=engine_state_summary.pipettes, modules=engine_state_summary.modules, ) async def test_update_current_not_allowed( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should noop on current=None.""" run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return("some other id") with pytest.raises(RunNotCurrentError): await subject.update(run_id=run_id, current=False) async def test_create_archives_existing( decoy: Decoy, engine_state_summary: StateSummary, run_resource: RunResource, run_command: commands.Command, mock_engine_store: EngineStore, mock_run_store: RunStore, subject: RunDataManager, ) -> None: """It should persist the previously current run when a new run is created.""" run_id_old = "hello world" run_id_new = "hello is it me you're looking for" decoy.when(mock_engine_store.current_run_id).then_return(run_id_old) decoy.when(await mock_engine_store.clear()).then_return( ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( await mock_engine_store.create( run_id=run_id_new, labware_offsets=[], protocol=None, ) ).then_return(engine_state_summary) decoy.when( mock_run_store.insert( run_id=run_id_new, created_at=datetime(year=2021, month=1, day=1), protocol_id=None, ) ).then_return(run_resource) await subject.create( run_id=run_id_new, created_at=datetime(year=2021, month=1, day=1), labware_offsets=[], protocol=None, ) decoy.verify( mock_run_store.update_run_state( run_id=run_id_old, summary=engine_state_summary, commands=[run_command], ) ) def test_get_commands_slice_from_db( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, run_command: commands.Command, ) -> None: """Should get a sliced command list from run store.""" expected_commands_result = [ commands.WaitForResume( id="command-id-2", key="command-key", createdAt=datetime(year=2021, month=1, day=1), status=commands.CommandStatus.SUCCEEDED, params=commands.WaitForResumeParams(message="Hello"), ), run_command, ] expected_command_slice = CommandSlice( commands=expected_commands_result, cursor=1, total_length=3 ) decoy.when( mock_run_store.get_commands_slice(run_id="run_id", cursor=1, length=2) ).then_return(expected_command_slice) result = subject.get_commands_slice(run_id="run_id", cursor=1, length=2) assert expected_command_slice == result def test_get_commands_slice_current_run( decoy: Decoy, subject: RunDataManager, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get a sliced command list from engine store.""" expected_commands_result = [ commands.WaitForResume( id="command-id-2", key="command-key", createdAt=datetime(year=2021, month=1, day=1), status=commands.CommandStatus.SUCCEEDED, params=commands.WaitForResumeParams(message="Hello"), ), run_command, ] expected_command_slice = CommandSlice( commands=expected_commands_result, cursor=1, total_length=3 ) decoy.when(mock_engine_store.current_run_id).then_return("run-id") decoy.when( mock_engine_store.engine.state_view.commands.get_slice(1, 2) ).then_return(expected_command_slice) result = subject.get_commands_slice("run-id", 1, 2) assert expected_command_slice == result def test_get_commands_slice_from_db_run_not_found( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore ) -> None: """Should get a sliced command list from run store.""" decoy.when( mock_run_store.get_commands_slice(run_id="run-id", cursor=1, length=2) ).then_raise(RunNotFoundError(run_id="run-id")) with pytest.raises(RunNotFoundError): subject.get_commands_slice(run_id="run-id", cursor=1, length=2) def test_get_current_command( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get current command from engine store.""" expected_current = CurrentCommand( command_id=run_command.id, command_key=run_command.key, created_at=run_command.createdAt, index=0, ) decoy.when(mock_engine_store.current_run_id).then_return("run-id") decoy.when(mock_engine_store.engine.state_view.commands.get_current()).then_return( expected_current ) result = subject.get_current_command("run-id") assert result == expected_current def test_get_current_command_not_current_run( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, ) -> None: """Should return None because the run is not current.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") result = subject.get_current_command("run-id") assert result is None def test_get_command_from_engine( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("run-id") decoy.when( mock_engine_store.engine.state_view.commands.get("command-id") ).then_return(run_command) result = subject.get_command("run-id", "command-id") assert result == run_command def test_get_command_from_db( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") decoy.when( mock_run_store.get_command(run_id="run-id", command_id="command-id") ).then_return(run_command) result = subject.get_command("run-id", "command-id") assert result == run_command def test_get_command_from_db_run_not_found( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") decoy.when( mock_run_store.get_command(run_id="run-id", command_id="command-id") ).then_raise(RunNotFoundError("run-id")) with pytest.raises(RunNotFoundError): subject.get_command("run-id", "command-id") def test_get_command_from_db_command_not_found( decoy: Decoy, subject: RunDataManager, mock_run_store: RunStore, mock_engine_store: EngineStore, run_command: commands.Command, ) -> None: """Should get command by id from engine store.""" decoy.when(mock_engine_store.current_run_id).then_return("not-run-id") decoy.when( mock_run_store.get_command(run_id="run-id", command_id="command-id") ).then_raise(CommandNotFoundError(command_id="command-id")) with pytest.raises(CommandNotFoundError): subject.get_command("run-id", "command-id")
en
0.822096
Tests for RunDataManager. Get a mock EngineStore. Get a mock RunStore. Get a mock background TaskRunner. Get a StateSummary value object. # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] Get a StateSummary value object. Get a ProtocolEngine Command value object. Get a RunDataManager test subject. It should create an engine and a persisted run resource. It should handle creation with a protocol and labware offsets. # type: ignore[arg-type] It should not create a resource if engine creation fails. It should get the current run from the engine. It should get a historical run from the store. It should get a historical run from the store. It should get all runs, including current and historical. # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] # type: ignore[call-arg] It should delete the current run from the engine. It should delete a historical run from the store. It should persist the current run and clear the engine on current=false. It should noop on current=None and current=True. It should noop on current=None. It should persist the previously current run when a new run is created. Should get a sliced command list from run store. Should get a sliced command list from engine store. Should get a sliced command list from run store. Should get current command from engine store. Should return None because the run is not current. Should get command by id from engine store. Should get command by id from engine store. Should get command by id from engine store. Should get command by id from engine store.
2.286645
2
eos_potential.py
sotzee/quaryonic_eos
1
6624574
<filename>eos_potential.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 2 15:11:14 2020 @author: sotzee """ import numpy as np from sympy import symbols, diff,lambdify import unitconvert from config import saturation_density class Fermions(object): ns=saturation_density def __init__(self,args): self.name, self.m, self.g=args #m in unit MeV, g is degenracy in spin or isospin... def set_mass(self,mass): self.m=mass def chi(self,x): return self.g*(x*(1+x**2)**0.5*(2*x**2+1)-np.log(x+(1+x**2)**0.5))/(16*np.pi**2) def phi(self,x): #x=kF/m demensionless return self.g*(x*(1+x**2)**0.5*(2*x**2-3)+3*np.log(x+(1+x**2)**0.5))/(48*np.pi**2) def psi(self,x): return self.g*(4*x**5/(1+x**2)**0.5-3*x*(1+x**2)**0.5*(2*x**2-3)-9*np.log(x+(1+x**2)**0.5))/(72*np.pi**2) def eosDensity_from_x(self,x,x0=0): return unitconvert.toMevfm(self.m**4*(self.chi(x)-self.chi(x0)),'mev4') def eosPressure_from_x(self,x,x0=0): return unitconvert.toMevfm(self.m**4*(self.phi(x)-self.phi(x0)),'mev4') def eosN3d2Edn2_from_x(self,x,x0=0): return unitconvert.toMevfm(self.m**4*(self.psi(x)-self.psi(x0)),'mev4') def eosCs2(self,x): return (2*self.eosPressure_from_x(x)+self.eosN3d2Edn2_from_x(x))/(self.eosDensity_from_x(x)+self.eosPressure_from_x(x)) def eosBaryonDensity_from_x(self,x,x0=0): return unitconvert.toMevfm(self.g*((x*self.m)**3-(x0*self.m)**3)/(6*np.pi**2),'mev4') def eosChempo_from_x(self,x): return self.m*(x**2+1)**0.5 def eosX_from_n(self,n): return np.sign(n)*np.abs(unitconvert.toMev4(n,'mevfm')*(6*np.pi**2/(self.g*self.m**3)))**(1/3) class Potential_single(object): ns=saturation_density def __init__(self,args,sym_list,mean_potential_expr): self.args=args args_sym_list=sym_list[:-1] mean_potential_expr_subs=mean_potential_expr.subs(zip(args_sym_list,args)) self.mean_potential_E=lambdify(sym_list[-1],mean_potential_expr_subs) self.mean_potential_dEdn=lambdify(sym_list[-1],diff(mean_potential_expr_subs,sym_list[-1])) self.mean_potential_d2Edn2=lambdify(sym_list[-1],diff(mean_potential_expr_subs,sym_list[-1],2)) self.E=self.mean_potential_E(self.ns) self.L=3*self.ns*self.mean_potential_dEdn(self.ns) self.K=9*self.ns**2*self.mean_potential_d2Edn2(self.ns) def eosDensity_from_n(self,n): return n*self.mean_potential_E(n) def eosPressure_from_n(self,n): return n**2*self.mean_potential_dEdn(n) def eosChempo_from_n(self,n): return (self.eosDensity_from_n(n)+self.eosPressure_from_n(n))/n crust_core_density=0.4*saturation_density #Not used in quarkyonic EOS proton=Fermions(['proton',unitconvert.m_p_MeV,2]) neutron=Fermions(['neutron',unitconvert.m_n_MeV,2]) n0ns=np.array([0.4*saturation_density,saturation_density]) xs_p_sym=proton.eosX_from_n(n0ns/2) xs_n_sym=neutron.eosX_from_n(n0ns/2) xs_pnm=neutron.eosX_from_n(n0ns) E_kin_sym=(proton.eosDensity_from_x(xs_p_sym)+neutron.eosDensity_from_x(xs_n_sym))/n0ns L_kin_sym=3*(proton.eosPressure_from_x(xs_p_sym)+neutron.eosPressure_from_x(xs_n_sym))/n0ns K_kin_sym=9*(proton.eosN3d2Edn2_from_x(xs_p_sym)+neutron.eosN3d2Edn2_from_x(xs_n_sym))/n0ns ELK_kin_sym=np.array([E_kin_sym,L_kin_sym,K_kin_sym]) E_kin_pnm=neutron.eosDensity_from_x(xs_pnm)/n0ns L_kin_pnm=3*neutron.eosPressure_from_x(xs_pnm)/n0ns K_kin_pnm=9*neutron.eosN3d2Edn2_from_x(xs_pnm)/n0ns ELK_kin_pnm=np.array([E_kin_pnm,L_kin_pnm,K_kin_pnm]) def V_Lattimer(n_s,a,b,gamma,n): return a*(n/n_s)+b*(n/n_s)**gamma def fit_lattimer_pnm(para,ELgamma): Potential_Lattimer_pnm=Potential_single(np.concatenate((para,[ELgamma[2]])),syms_Lattimer,V_Lattimer_expr) EL_potential_pnm=np.array([Potential_Lattimer_pnm.E,Potential_Lattimer_pnm.L]) return ELK_kin_pnm[:2,1]+EL_potential_pnm-np.array(ELgamma[:2]) sym_a, sym_b, sym_d, sym_gamma, sym_alpha, sym_beta, sym_n= symbols('a b d gamma alpha beta n', real=True) syms_Lattimer=[sym_a, sym_b, sym_gamma, sym_n] V_Lattimer_expr=V_Lattimer(saturation_density, sym_a, sym_b, sym_gamma, sym_n)
<filename>eos_potential.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 2 15:11:14 2020 @author: sotzee """ import numpy as np from sympy import symbols, diff,lambdify import unitconvert from config import saturation_density class Fermions(object): ns=saturation_density def __init__(self,args): self.name, self.m, self.g=args #m in unit MeV, g is degenracy in spin or isospin... def set_mass(self,mass): self.m=mass def chi(self,x): return self.g*(x*(1+x**2)**0.5*(2*x**2+1)-np.log(x+(1+x**2)**0.5))/(16*np.pi**2) def phi(self,x): #x=kF/m demensionless return self.g*(x*(1+x**2)**0.5*(2*x**2-3)+3*np.log(x+(1+x**2)**0.5))/(48*np.pi**2) def psi(self,x): return self.g*(4*x**5/(1+x**2)**0.5-3*x*(1+x**2)**0.5*(2*x**2-3)-9*np.log(x+(1+x**2)**0.5))/(72*np.pi**2) def eosDensity_from_x(self,x,x0=0): return unitconvert.toMevfm(self.m**4*(self.chi(x)-self.chi(x0)),'mev4') def eosPressure_from_x(self,x,x0=0): return unitconvert.toMevfm(self.m**4*(self.phi(x)-self.phi(x0)),'mev4') def eosN3d2Edn2_from_x(self,x,x0=0): return unitconvert.toMevfm(self.m**4*(self.psi(x)-self.psi(x0)),'mev4') def eosCs2(self,x): return (2*self.eosPressure_from_x(x)+self.eosN3d2Edn2_from_x(x))/(self.eosDensity_from_x(x)+self.eosPressure_from_x(x)) def eosBaryonDensity_from_x(self,x,x0=0): return unitconvert.toMevfm(self.g*((x*self.m)**3-(x0*self.m)**3)/(6*np.pi**2),'mev4') def eosChempo_from_x(self,x): return self.m*(x**2+1)**0.5 def eosX_from_n(self,n): return np.sign(n)*np.abs(unitconvert.toMev4(n,'mevfm')*(6*np.pi**2/(self.g*self.m**3)))**(1/3) class Potential_single(object): ns=saturation_density def __init__(self,args,sym_list,mean_potential_expr): self.args=args args_sym_list=sym_list[:-1] mean_potential_expr_subs=mean_potential_expr.subs(zip(args_sym_list,args)) self.mean_potential_E=lambdify(sym_list[-1],mean_potential_expr_subs) self.mean_potential_dEdn=lambdify(sym_list[-1],diff(mean_potential_expr_subs,sym_list[-1])) self.mean_potential_d2Edn2=lambdify(sym_list[-1],diff(mean_potential_expr_subs,sym_list[-1],2)) self.E=self.mean_potential_E(self.ns) self.L=3*self.ns*self.mean_potential_dEdn(self.ns) self.K=9*self.ns**2*self.mean_potential_d2Edn2(self.ns) def eosDensity_from_n(self,n): return n*self.mean_potential_E(n) def eosPressure_from_n(self,n): return n**2*self.mean_potential_dEdn(n) def eosChempo_from_n(self,n): return (self.eosDensity_from_n(n)+self.eosPressure_from_n(n))/n crust_core_density=0.4*saturation_density #Not used in quarkyonic EOS proton=Fermions(['proton',unitconvert.m_p_MeV,2]) neutron=Fermions(['neutron',unitconvert.m_n_MeV,2]) n0ns=np.array([0.4*saturation_density,saturation_density]) xs_p_sym=proton.eosX_from_n(n0ns/2) xs_n_sym=neutron.eosX_from_n(n0ns/2) xs_pnm=neutron.eosX_from_n(n0ns) E_kin_sym=(proton.eosDensity_from_x(xs_p_sym)+neutron.eosDensity_from_x(xs_n_sym))/n0ns L_kin_sym=3*(proton.eosPressure_from_x(xs_p_sym)+neutron.eosPressure_from_x(xs_n_sym))/n0ns K_kin_sym=9*(proton.eosN3d2Edn2_from_x(xs_p_sym)+neutron.eosN3d2Edn2_from_x(xs_n_sym))/n0ns ELK_kin_sym=np.array([E_kin_sym,L_kin_sym,K_kin_sym]) E_kin_pnm=neutron.eosDensity_from_x(xs_pnm)/n0ns L_kin_pnm=3*neutron.eosPressure_from_x(xs_pnm)/n0ns K_kin_pnm=9*neutron.eosN3d2Edn2_from_x(xs_pnm)/n0ns ELK_kin_pnm=np.array([E_kin_pnm,L_kin_pnm,K_kin_pnm]) def V_Lattimer(n_s,a,b,gamma,n): return a*(n/n_s)+b*(n/n_s)**gamma def fit_lattimer_pnm(para,ELgamma): Potential_Lattimer_pnm=Potential_single(np.concatenate((para,[ELgamma[2]])),syms_Lattimer,V_Lattimer_expr) EL_potential_pnm=np.array([Potential_Lattimer_pnm.E,Potential_Lattimer_pnm.L]) return ELK_kin_pnm[:2,1]+EL_potential_pnm-np.array(ELgamma[:2]) sym_a, sym_b, sym_d, sym_gamma, sym_alpha, sym_beta, sym_n= symbols('a b d gamma alpha beta n', real=True) syms_Lattimer=[sym_a, sym_b, sym_gamma, sym_n] V_Lattimer_expr=V_Lattimer(saturation_density, sym_a, sym_b, sym_gamma, sym_n)
en
0.469129
#!/usr/bin/env python3 # -*- coding: utf-8 -*- Created on Tue Jun 2 15:11:14 2020 @author: sotzee #m in unit MeV, g is degenracy in spin or isospin... #x=kF/m demensionless #Not used in quarkyonic EOS
2.588003
3
bank2ynab/bank_process.py
Kukkerem/bank2ynab
0
6624575
#!/usr/bin/env python3 # # bank2ynab.py # # Searches specified folder or default download folder for exported # bank transaction file (.csv format) & adjusts format for YNAB import # Please see here for details: https://github.com/torbengb/bank2ynab # # MIT License: https://github.com/torbengb/bank2ynab/blob/master/LICENSE # # DISCLAIMER: Please use at your own risk. This tool is neither officially # supported by YNAB (the company) nor by YNAB (the software) in any way. # Use of this tool could introduce problems into your budget that YNAB, # through its official support channels, will not be able to troubleshoot # or fix. See also the full MIT licence. # # # don't edit below here unless you know what you're doing! from os.path import abspath, join, dirname, basename import os import importlib import re from datetime import datetime import logging import b2y_utilities # configure our logger logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) # Classes doing the actual work class B2YBank(object): """Object parsing and outputting data for a specific bank. This can be subclassed to handle formats requiring special handling, overriding any of get_files(), read_data() or write_data().""" def __init__(self, config_object): """ :param config_object: dict containing config parameters """ self.name = config_object.get("bank_name", "DEFAULT") self.config = config_object def get_files(self): """find the transaction file :return: list of matching files found """ ext = self.config["ext"] file_pattern = self.config["input_filename"] prefix = self.config["fixed_prefix"] regex_active = self.config["regex"] files = list() missing_dir = False try_path = self.config["path"] path = "" if file_pattern != "": try: path = b2y_utilities.find_directory(try_path) except FileNotFoundError: missing_dir = True path = b2y_utilities.find_directory("") path = abspath(path) try: directory_list = os.listdir(path) except FileNotFoundError: directory_list = os.listdir(".") if regex_active is True: files = [ join(path, f) for f in directory_list if f.endswith(ext) if re.match(file_pattern + r".*\.", f) if prefix not in f ] else: files = [ join(path, f) for f in directory_list if f.endswith(ext) if f.startswith(file_pattern) if prefix not in f ] if not files and missing_dir: s = ( "\nFormat: {}\n\nError: Can't find download path: {}" "\nTrying default path instead:\t {}" ) logging.error(s.format(self.name, try_path, path)) return files def read_data(self, file_path): """extract data from given transaction file :param file_path: path to file :return: list of cleaned data rows """ delim = self.config["input_delimiter"] output_columns = self.config["output_columns"] header_rows = int(self.config["header_rows"]) footer_rows = int(self.config["footer_rows"]) cd_flags = self.config["cd_flags"] date_format = self.config["date_format"] fill_memo = self.config["payee_to_memo"] output_data = [] # give plugins a chance to pre-process the file self._preprocess_file(file_path) # get total number of rows in transaction file using a generator with b2y_utilities.EncodingCsvReader( file_path, delimiter=delim ) as row_count_reader: row_count = sum(1 for row in row_count_reader) with b2y_utilities.EncodingCsvReader( file_path, delimiter=delim ) as transaction_reader: # make each row of our new transaction file for line, row in enumerate(transaction_reader): # skip header & footer rows if header_rows <= line <= (row_count - footer_rows): # skip blank rows if len(row) == 0: continue # process Inflow or Outflow flags row = self._cd_flag_process(row, cd_flags) # fix the date format row = self._fix_date(row, date_format) # create our output_row fixed_row = self._fix_row(row) # convert negative inflows to standard outflows fixed_row = self._fix_outflow(fixed_row) # convert positive outflows to standard inflows fixed_row = self._fix_inflow(fixed_row) # fill in blank memo fields fixed_row = self._auto_memo(fixed_row, fill_memo) # convert decimal point fixed_row = self._fix_decimal_point(fixed_row) # remove extra characters in the inflow and outflow fixed_row = self._clean_monetary_values(fixed_row) # check our row isn't a null transaction if self._valid_row(fixed_row) is True: output_data.append(fixed_row) # add in column headers line_count = len(output_data) logging.info("Parsed {} lines".format(line_count)) if line_count > 0: output_data.insert(0, output_columns) return output_data def _preprocess_file(self, file_path): """ exists solely to be used by plugins for pre-processing a file that otherwise can be read normally (e.g. weird format) :param file_path: path to file """ # intentionally empty - the plugins can use this function return def _fix_row(self, row): """ rearrange a row of our file to match expected output format, optionally combining multiple input columns into a single output column :param row: list of values :return: list of values in correct output format """ output = [] for header in self.config["output_columns"]: # find all input columns with data for this output column indices = filter( lambda i: self.config["input_columns"][i] == header, range(len(self.config["input_columns"])), ) # fetch data from those input columns if they are not empty, # and merge them cell_parts = [] for i in indices: try: if row[i].lstrip(): cell_parts.append(row[i].lstrip()) except IndexError: pass cell = " ".join(cell_parts) output.append(cell) return output def _fix_outflow(self, row): """ convert negative inflow into positive outflow :param row: list of values :return: list of values with corrected outflow column """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") inflow = row[inflow_index] if inflow.startswith("-"): row[inflow_index] = "" row[outflow_index] = inflow[1:] return row def _fix_inflow(self, row): """ convert positive outflow into inflow :param row: list of values :return: list of values with corrected outflow column """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") outflow = row[outflow_index] if outflow.startswith("+"): row[outflow_index] = "" row[inflow_index] = outflow[1:] return row def _fix_decimal_point(self, row): """ convert , to . in inflow and outflow strings then remove every instance of . except last one :param row: list of values """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") inflow = row[inflow_index].replace(",", ".") outflow = row[outflow_index].replace(",", ".") dot_count = inflow.count(".") - 1 row[inflow_index] = inflow.replace(".", "", dot_count) dot_count = outflow.count(".") - 1 row[outflow_index] = outflow.replace(".", "", dot_count) return row def _clean_monetary_values(self, row): """ remove any characters from inflow or outflow strings except digits and '.' :param row: list of values """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") row[inflow_index] = re.sub(r"[^\d\.]", "", row[inflow_index]) row[outflow_index] = re.sub(r"[^\d\.]", "", row[outflow_index]) return row def _valid_row(self, row): """if our row doesn't have an inflow, outflow or a valid date, mark as invalid :param row: list of values """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") if row[inflow_index] == "" and row[outflow_index] == "": return False # check that date matches YYYY-MM-DD format date_index = self.config["output_columns"].index("Date") if not re.fullmatch(r"\d{4}-\d{2}-\d{2}", row[date_index]): return False return True def _auto_memo(self, row, fill_memo): """auto fill empty memo field with payee info :param row: list of values :param fill_memo: boolean """ if fill_memo: payee_index = self.config["output_columns"].index("Payee") memo_index = self.config["output_columns"].index("Memo") if row[memo_index] == "": row[memo_index] = row[payee_index] return row def _fix_date(self, row, date_format): """fix date format when required convert date to YYYY-MM-DD :param row: list of values :param date_format: date format string """ if not (date_format): return row date_col = self.config["input_columns"].index("Date") try: if row[date_col] == "": return row # parse our date according to provided formatting string input_date = datetime.strptime(row[date_col].strip(), date_format) # do our actual date processing output_date = datetime.strftime(input_date, "%Y-%m-%d") row[date_col] = output_date except (ValueError, IndexError): pass return row def _cd_flag_process(self, row, cd_flags): """fix rows where inflow or outflow is indicated by a flag in a separate column :param row: list of values :param cd_flags: list of parameters for applying indicators """ if len(cd_flags) == 3: indicator_col = int(cd_flags[0]) outflow_flag = cd_flags[2] inflow_col = self.config["input_columns"].index("Inflow") # if this row is indicated to be outflow, make inflow negative if row[indicator_col] == outflow_flag: row[inflow_col] = "-" + row[inflow_col] return row def write_data(self, filename, data): """write out the new CSV file :param filename: path to output file :param data: cleaned data ready to output """ target_dir = dirname(filename) target_fname = basename(filename)[:-4] new_filename = "{}{}.csv".format( self.config["fixed_prefix"], target_fname ) while os.path.isfile(new_filename): counter = 1 new_filename = "{}{}_{}.csv".format( self.config["fixed_prefix"], target_fname, counter ) counter += 1 target_filename = join(target_dir, new_filename) logging.info("Writing output file: {}".format(target_filename)) with b2y_utilities.EncodingCsvWriter(target_filename) as writer: for row in data: writer.writerow(row) return target_filename def build_bank(bank_config): """ Factory method loading the correct class for a given configuration. """ plugin_module = bank_config.get("plugin", None) if plugin_module: p_mod = importlib.import_module("plugins.{}".format(plugin_module)) if not hasattr(p_mod, "build_bank"): s = ( "The specified plugin {}.py".format(plugin_module) + "does not contain the required " "build_bank(config) method." ) raise ImportError(s) bank = p_mod.build_bank(bank_config) return bank else: return B2YBank(bank_config) class Bank2Ynab(object): """Main program instance, responsible for gathering configuration, creating the right object for each bank, and triggering elaboration.""" def __init__(self, config_object): self.banks = [] self.transaction_data = {} for section in config_object.sections(): bank_config = b2y_utilities.fix_conf_params(config_object, section) bank_object = build_bank(bank_config) self.banks.append(bank_object) def run(self): """ Main program flow """ # initialize variables for summary: files_processed = 0 # process account for each config file for bank in self.banks: # find all applicable files files = bank.get_files() bank_name = bank.name for src_file in files: logging.info( "\nParsing input file: {} (format: {})".format( src_file, bank_name ) ) # increment for the summary: files_processed += 1 # create cleaned csv for each file output = bank.read_data(src_file) if output != []: bank.write_data(src_file, output) # save transaction data for each bank to object self.transaction_data[bank_name] = output # delete original csv file if bank.config["delete_original"] is True: logging.info( "Removing input file: {}".format(src_file) ) os.remove(src_file) else: logging.info( "No output data from this file for this bank." ) logging.info("\nDone! {} files processed.\n".format(files_processed))
#!/usr/bin/env python3 # # bank2ynab.py # # Searches specified folder or default download folder for exported # bank transaction file (.csv format) & adjusts format for YNAB import # Please see here for details: https://github.com/torbengb/bank2ynab # # MIT License: https://github.com/torbengb/bank2ynab/blob/master/LICENSE # # DISCLAIMER: Please use at your own risk. This tool is neither officially # supported by YNAB (the company) nor by YNAB (the software) in any way. # Use of this tool could introduce problems into your budget that YNAB, # through its official support channels, will not be able to troubleshoot # or fix. See also the full MIT licence. # # # don't edit below here unless you know what you're doing! from os.path import abspath, join, dirname, basename import os import importlib import re from datetime import datetime import logging import b2y_utilities # configure our logger logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) # Classes doing the actual work class B2YBank(object): """Object parsing and outputting data for a specific bank. This can be subclassed to handle formats requiring special handling, overriding any of get_files(), read_data() or write_data().""" def __init__(self, config_object): """ :param config_object: dict containing config parameters """ self.name = config_object.get("bank_name", "DEFAULT") self.config = config_object def get_files(self): """find the transaction file :return: list of matching files found """ ext = self.config["ext"] file_pattern = self.config["input_filename"] prefix = self.config["fixed_prefix"] regex_active = self.config["regex"] files = list() missing_dir = False try_path = self.config["path"] path = "" if file_pattern != "": try: path = b2y_utilities.find_directory(try_path) except FileNotFoundError: missing_dir = True path = b2y_utilities.find_directory("") path = abspath(path) try: directory_list = os.listdir(path) except FileNotFoundError: directory_list = os.listdir(".") if regex_active is True: files = [ join(path, f) for f in directory_list if f.endswith(ext) if re.match(file_pattern + r".*\.", f) if prefix not in f ] else: files = [ join(path, f) for f in directory_list if f.endswith(ext) if f.startswith(file_pattern) if prefix not in f ] if not files and missing_dir: s = ( "\nFormat: {}\n\nError: Can't find download path: {}" "\nTrying default path instead:\t {}" ) logging.error(s.format(self.name, try_path, path)) return files def read_data(self, file_path): """extract data from given transaction file :param file_path: path to file :return: list of cleaned data rows """ delim = self.config["input_delimiter"] output_columns = self.config["output_columns"] header_rows = int(self.config["header_rows"]) footer_rows = int(self.config["footer_rows"]) cd_flags = self.config["cd_flags"] date_format = self.config["date_format"] fill_memo = self.config["payee_to_memo"] output_data = [] # give plugins a chance to pre-process the file self._preprocess_file(file_path) # get total number of rows in transaction file using a generator with b2y_utilities.EncodingCsvReader( file_path, delimiter=delim ) as row_count_reader: row_count = sum(1 for row in row_count_reader) with b2y_utilities.EncodingCsvReader( file_path, delimiter=delim ) as transaction_reader: # make each row of our new transaction file for line, row in enumerate(transaction_reader): # skip header & footer rows if header_rows <= line <= (row_count - footer_rows): # skip blank rows if len(row) == 0: continue # process Inflow or Outflow flags row = self._cd_flag_process(row, cd_flags) # fix the date format row = self._fix_date(row, date_format) # create our output_row fixed_row = self._fix_row(row) # convert negative inflows to standard outflows fixed_row = self._fix_outflow(fixed_row) # convert positive outflows to standard inflows fixed_row = self._fix_inflow(fixed_row) # fill in blank memo fields fixed_row = self._auto_memo(fixed_row, fill_memo) # convert decimal point fixed_row = self._fix_decimal_point(fixed_row) # remove extra characters in the inflow and outflow fixed_row = self._clean_monetary_values(fixed_row) # check our row isn't a null transaction if self._valid_row(fixed_row) is True: output_data.append(fixed_row) # add in column headers line_count = len(output_data) logging.info("Parsed {} lines".format(line_count)) if line_count > 0: output_data.insert(0, output_columns) return output_data def _preprocess_file(self, file_path): """ exists solely to be used by plugins for pre-processing a file that otherwise can be read normally (e.g. weird format) :param file_path: path to file """ # intentionally empty - the plugins can use this function return def _fix_row(self, row): """ rearrange a row of our file to match expected output format, optionally combining multiple input columns into a single output column :param row: list of values :return: list of values in correct output format """ output = [] for header in self.config["output_columns"]: # find all input columns with data for this output column indices = filter( lambda i: self.config["input_columns"][i] == header, range(len(self.config["input_columns"])), ) # fetch data from those input columns if they are not empty, # and merge them cell_parts = [] for i in indices: try: if row[i].lstrip(): cell_parts.append(row[i].lstrip()) except IndexError: pass cell = " ".join(cell_parts) output.append(cell) return output def _fix_outflow(self, row): """ convert negative inflow into positive outflow :param row: list of values :return: list of values with corrected outflow column """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") inflow = row[inflow_index] if inflow.startswith("-"): row[inflow_index] = "" row[outflow_index] = inflow[1:] return row def _fix_inflow(self, row): """ convert positive outflow into inflow :param row: list of values :return: list of values with corrected outflow column """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") outflow = row[outflow_index] if outflow.startswith("+"): row[outflow_index] = "" row[inflow_index] = outflow[1:] return row def _fix_decimal_point(self, row): """ convert , to . in inflow and outflow strings then remove every instance of . except last one :param row: list of values """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") inflow = row[inflow_index].replace(",", ".") outflow = row[outflow_index].replace(",", ".") dot_count = inflow.count(".") - 1 row[inflow_index] = inflow.replace(".", "", dot_count) dot_count = outflow.count(".") - 1 row[outflow_index] = outflow.replace(".", "", dot_count) return row def _clean_monetary_values(self, row): """ remove any characters from inflow or outflow strings except digits and '.' :param row: list of values """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") row[inflow_index] = re.sub(r"[^\d\.]", "", row[inflow_index]) row[outflow_index] = re.sub(r"[^\d\.]", "", row[outflow_index]) return row def _valid_row(self, row): """if our row doesn't have an inflow, outflow or a valid date, mark as invalid :param row: list of values """ inflow_index = self.config["output_columns"].index("Inflow") outflow_index = self.config["output_columns"].index("Outflow") if row[inflow_index] == "" and row[outflow_index] == "": return False # check that date matches YYYY-MM-DD format date_index = self.config["output_columns"].index("Date") if not re.fullmatch(r"\d{4}-\d{2}-\d{2}", row[date_index]): return False return True def _auto_memo(self, row, fill_memo): """auto fill empty memo field with payee info :param row: list of values :param fill_memo: boolean """ if fill_memo: payee_index = self.config["output_columns"].index("Payee") memo_index = self.config["output_columns"].index("Memo") if row[memo_index] == "": row[memo_index] = row[payee_index] return row def _fix_date(self, row, date_format): """fix date format when required convert date to YYYY-MM-DD :param row: list of values :param date_format: date format string """ if not (date_format): return row date_col = self.config["input_columns"].index("Date") try: if row[date_col] == "": return row # parse our date according to provided formatting string input_date = datetime.strptime(row[date_col].strip(), date_format) # do our actual date processing output_date = datetime.strftime(input_date, "%Y-%m-%d") row[date_col] = output_date except (ValueError, IndexError): pass return row def _cd_flag_process(self, row, cd_flags): """fix rows where inflow or outflow is indicated by a flag in a separate column :param row: list of values :param cd_flags: list of parameters for applying indicators """ if len(cd_flags) == 3: indicator_col = int(cd_flags[0]) outflow_flag = cd_flags[2] inflow_col = self.config["input_columns"].index("Inflow") # if this row is indicated to be outflow, make inflow negative if row[indicator_col] == outflow_flag: row[inflow_col] = "-" + row[inflow_col] return row def write_data(self, filename, data): """write out the new CSV file :param filename: path to output file :param data: cleaned data ready to output """ target_dir = dirname(filename) target_fname = basename(filename)[:-4] new_filename = "{}{}.csv".format( self.config["fixed_prefix"], target_fname ) while os.path.isfile(new_filename): counter = 1 new_filename = "{}{}_{}.csv".format( self.config["fixed_prefix"], target_fname, counter ) counter += 1 target_filename = join(target_dir, new_filename) logging.info("Writing output file: {}".format(target_filename)) with b2y_utilities.EncodingCsvWriter(target_filename) as writer: for row in data: writer.writerow(row) return target_filename def build_bank(bank_config): """ Factory method loading the correct class for a given configuration. """ plugin_module = bank_config.get("plugin", None) if plugin_module: p_mod = importlib.import_module("plugins.{}".format(plugin_module)) if not hasattr(p_mod, "build_bank"): s = ( "The specified plugin {}.py".format(plugin_module) + "does not contain the required " "build_bank(config) method." ) raise ImportError(s) bank = p_mod.build_bank(bank_config) return bank else: return B2YBank(bank_config) class Bank2Ynab(object): """Main program instance, responsible for gathering configuration, creating the right object for each bank, and triggering elaboration.""" def __init__(self, config_object): self.banks = [] self.transaction_data = {} for section in config_object.sections(): bank_config = b2y_utilities.fix_conf_params(config_object, section) bank_object = build_bank(bank_config) self.banks.append(bank_object) def run(self): """ Main program flow """ # initialize variables for summary: files_processed = 0 # process account for each config file for bank in self.banks: # find all applicable files files = bank.get_files() bank_name = bank.name for src_file in files: logging.info( "\nParsing input file: {} (format: {})".format( src_file, bank_name ) ) # increment for the summary: files_processed += 1 # create cleaned csv for each file output = bank.read_data(src_file) if output != []: bank.write_data(src_file, output) # save transaction data for each bank to object self.transaction_data[bank_name] = output # delete original csv file if bank.config["delete_original"] is True: logging.info( "Removing input file: {}".format(src_file) ) os.remove(src_file) else: logging.info( "No output data from this file for this bank." ) logging.info("\nDone! {} files processed.\n".format(files_processed))
en
0.735597
#!/usr/bin/env python3 # # bank2ynab.py # # Searches specified folder or default download folder for exported # bank transaction file (.csv format) & adjusts format for YNAB import # Please see here for details: https://github.com/torbengb/bank2ynab # # MIT License: https://github.com/torbengb/bank2ynab/blob/master/LICENSE # # DISCLAIMER: Please use at your own risk. This tool is neither officially # supported by YNAB (the company) nor by YNAB (the software) in any way. # Use of this tool could introduce problems into your budget that YNAB, # through its official support channels, will not be able to troubleshoot # or fix. See also the full MIT licence. # # # don't edit below here unless you know what you're doing! # configure our logger # Classes doing the actual work Object parsing and outputting data for a specific bank. This can be subclassed to handle formats requiring special handling, overriding any of get_files(), read_data() or write_data(). :param config_object: dict containing config parameters find the transaction file :return: list of matching files found extract data from given transaction file :param file_path: path to file :return: list of cleaned data rows # give plugins a chance to pre-process the file # get total number of rows in transaction file using a generator # make each row of our new transaction file # skip header & footer rows # skip blank rows # process Inflow or Outflow flags # fix the date format # create our output_row # convert negative inflows to standard outflows # convert positive outflows to standard inflows # fill in blank memo fields # convert decimal point # remove extra characters in the inflow and outflow # check our row isn't a null transaction # add in column headers exists solely to be used by plugins for pre-processing a file that otherwise can be read normally (e.g. weird format) :param file_path: path to file # intentionally empty - the plugins can use this function rearrange a row of our file to match expected output format, optionally combining multiple input columns into a single output column :param row: list of values :return: list of values in correct output format # find all input columns with data for this output column # fetch data from those input columns if they are not empty, # and merge them convert negative inflow into positive outflow :param row: list of values :return: list of values with corrected outflow column convert positive outflow into inflow :param row: list of values :return: list of values with corrected outflow column convert , to . in inflow and outflow strings then remove every instance of . except last one :param row: list of values remove any characters from inflow or outflow strings except digits and '.' :param row: list of values if our row doesn't have an inflow, outflow or a valid date, mark as invalid :param row: list of values # check that date matches YYYY-MM-DD format auto fill empty memo field with payee info :param row: list of values :param fill_memo: boolean fix date format when required convert date to YYYY-MM-DD :param row: list of values :param date_format: date format string # parse our date according to provided formatting string # do our actual date processing fix rows where inflow or outflow is indicated by a flag in a separate column :param row: list of values :param cd_flags: list of parameters for applying indicators # if this row is indicated to be outflow, make inflow negative write out the new CSV file :param filename: path to output file :param data: cleaned data ready to output Factory method loading the correct class for a given configuration. Main program instance, responsible for gathering configuration, creating the right object for each bank, and triggering elaboration. Main program flow # initialize variables for summary: # process account for each config file # find all applicable files # increment for the summary: # create cleaned csv for each file # save transaction data for each bank to object # delete original csv file
2.726023
3
advance/fasle1-4.py
naeimnb/pythonexersices
0
6624576
def is_capitalized(in_string): part1, part2 = in_string[0], in_string[1:len(in_string)] # print(part1, part2) if part1.isupper() == True and part2.islower() == True: return 1 else: return 0 index_words = [] _in = [] _in = input().split(".") cursor = 0 for sentence in _in: word_list_of_sentence = sentence.split() for i in range(1, len(word_list_of_sentence)): word = word_list_of_sentence[i] if(word[len(word) - 1] == ","): word = word[0:len(word) - 1] if is_capitalized(word): index_words.append((str(i + cursor + 1), word)) cursor = cursor + len(word_list_of_sentence) if len(index_words) == 0: print("None") else: for element in index_words: print(element[0] + ":" + element[1])
def is_capitalized(in_string): part1, part2 = in_string[0], in_string[1:len(in_string)] # print(part1, part2) if part1.isupper() == True and part2.islower() == True: return 1 else: return 0 index_words = [] _in = [] _in = input().split(".") cursor = 0 for sentence in _in: word_list_of_sentence = sentence.split() for i in range(1, len(word_list_of_sentence)): word = word_list_of_sentence[i] if(word[len(word) - 1] == ","): word = word[0:len(word) - 1] if is_capitalized(word): index_words.append((str(i + cursor + 1), word)) cursor = cursor + len(word_list_of_sentence) if len(index_words) == 0: print("None") else: for element in index_words: print(element[0] + ":" + element[1])
en
0.099692
# print(part1, part2)
3.966763
4
old_versions/travel.old.py
Pebecko/Reknamorcen
0
6624577
<filename>old_versions/travel.old.py from fight import * class RoomTypes: def get_coordinates(self): if player.last_direction is "North": player.y -= 1 elif player.last_direction is "East": player.x += 1 elif player.last_direction is "South": player.y += 1 elif player.last_direction is "West": player.x -= 1 return def room_pattern_one(self, last_dir_old1, last_dir_new1): while True: if player.last_direction is None: slow_print("Můžete jít pouze rovně, zmáčkněte enter až budete připraveni.\n") elif player.last_direction is "{}".format(last_dir_old1): slow_print("Můžete jít pouze zpět, zmáčkněte enter až budete připraveni.\n") direction_choice = base_options() if direction_choice != "skip": break player.last_direction = "{}".format(last_dir_new1) return self.get_coordinates() def room_pattern_two(self, msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2): while True: if player.last_direction is None: slow_print("{}".format(msg1)) direction_choice = base_options() if direction_choice is "{}".format(pl_opt1): player.last_direction = "{}".format(last_dir_new1) break elif direction_choice is "{}".format(pl_opt2): player.last_direction = "{}".format(last_dir_new2) break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is "{}".format(last_dir_old1): slow_print("{}".format(msg2)) direction_choice = base_options() if direction_choice is "{}".format(pl_opt3): player.last_direction = "{}".format(last_dir_new1) break elif direction_choice is "{}".format(pl_opt4): player.last_direction = "{}".format(last_dir_new2) break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is "{}".format(last_dir_old2): slow_print("{}".format(msg3)) direction_choice = base_options() if direction_choice is "{}".format(pl_opt5): player.last_direction = "{}".format(last_dir_new2) break elif direction_choice is "{}".format(pl_opt6): player.last_direction = "{}".format(last_dir_new1) break elif direction_choice != "skip": wrong_input(0) return self.get_coordinates() def room_pattern_three(self, msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3): while True: if player.last_direction is None: slow_print(msg1) direction_choice = base_options() if direction_choice is pl_opt1: player.last_direction = last_dir_new1 break elif direction_choice is pl_opt2: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt3: player.last_direction = last_dir_new3 break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is last_dir_old1: slow_print(msg2) direction_choice = base_options() if direction_choice is pl_opt4: player.last_direction = last_dir_new1 break elif direction_choice is pl_opt5: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt6: player.last_direction = last_dir_new3 break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is last_dir_old2: slow_print(msg3) direction_choice = base_options() if direction_choice is pl_opt7: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt8: player.last_direction = last_dir_new1 break elif direction_choice is pl_opt9: player.last_direction = last_dir_new3 break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is last_dir_old3: slow_print(msg4) direction_choice = base_options() if direction_choice is pl_opt10: player.last_direction = last_dir_new3 break elif direction_choice is pl_opt11: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt12: player.last_direction = last_dir_new1 break elif direction_choice != "skip": wrong_input(0) return self.get_coordinates() def room_type_n(self): last_dir_old1 = "South" last_dir_new1 = "North" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_e(self): last_dir_old1 = "West" last_dir_new1 = "East" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_s(self): last_dir_old1 = "North" last_dir_new1 = "South" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_w(self): last_dir_old1 = "East" last_dir_new1 = "West" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_ne(self): msg1 = "Můžete jít [r]ovně, nebo v[p]ravo.\n" pl_opt1 = "r" last_dir_new1 = "North" pl_opt2 = "p" last_dir_new2 = "East" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt3 = "z" pl_opt4 = "l" last_dir_old2 = "West" msg3 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt5 = "z" pl_opt6 = "p" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_ns(self): msg1 = "Můžete jít [r]ovně, nebo v[z]ad.\n" pl_opt1 = "r" last_dir_new1 = "North" pl_opt2 = "z" last_dir_new2 = "South" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt3 = "z" pl_opt4 = "r" last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt5 = "z" pl_opt6 = "r" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_nw(self): msg1 = "Můžete jít [r]ovně, nebo v[l]evo.\n" pl_opt1 = "r" last_dir_new1 = "North" pl_opt2 = "l" last_dir_new2 = "West" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt3 = "z" pl_opt4 = "p" last_dir_old2 = "East" msg3 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt5 = "z" pl_opt6 = "l" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_es(self): msg1 = "Můžete jít [r]ovně, nebo v[p]ravo.\n" pl_opt1 = "r" last_dir_new1 = "East" pl_opt2 = "p" last_dir_new2 = "South" last_dir_old1 = "West" msg2 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt3 = "z" pl_opt4 = "l" last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt5 = "z" pl_opt6 = "p" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_ew(self): msg1 = "Můžete jít [r]ovně, nebo v[z]ad.\n" pl_opt1 = "r" last_dir_new1 = "East" pl_opt2 = "z" last_dir_new2 = "West" last_dir_old1 = "West" msg2 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt3 = "z" pl_opt4 = "r" last_dir_old2 = "East" msg3 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt5 = "z" pl_opt6 = "r" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_sw(self): msg1 = "Můžete jít [r]ovně, nebo v[p]ravo.\n" pl_opt1 = "r" last_dir_new1 = "South" pl_opt2 = "p" last_dir_new2 = "West" last_dir_old1 = "North" msg2 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt3 = "z" pl_opt4 = "l" last_dir_old2 = "East" msg3 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt5 = "z" pl_opt6 = "p" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_nes(self): msg1 = "Můžete jít [r]ovně, v[p]ravo, nebo v[z]ad.\n" pl_opt1 = "r" # 1 last_dir_new1 = "North" pl_opt2 = "p" # 2 last_dir_new2 = "East" pl_opt3 = "z" # 3 last_dir_new3 = "South" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, v[l]evo, nebo [r]ovně.\n" pl_opt4 = "z" # 1 pl_opt5 = "l" # 2 pl_opt6 = "r" # 3 last_dir_old2 = "West" msg3 = "Můžete jít [z]pět, v[p]ravo, nebo v[l]evo.\n" pl_opt7 = "z" # 2 pl_opt8 = "p" # 1 pl_opt9 = "l" # 3 last_dir_old3 = "North" msg4 = "Můžete jít [z]pět, [r]ovně, nebo v[p]ravo.\n" pl_opt10 = "z" # 3 pl_opt11 = "p" # 2 pl_opt12 = "r" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_new(self): msg1 = "Můžete jít [r]ovně, v[p]ravo, nebo v[l]evo.\n" pl_opt1 = "r" # 1 last_dir_new1 = "North" pl_opt2 = "p" # 2 last_dir_new2 = "East" pl_opt3 = "l" # 3 last_dir_new3 = "West" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, v[l]evo, nebo v[p]ravo.\n" pl_opt4 = "z" # 1 pl_opt5 = "l" # 2 pl_opt6 = "p" # 3 last_dir_old2 = "West" msg3 = "Můžete jít [z]pět, v[p]ravo, nebo [r]ovně.\n" pl_opt7 = "z" # 2 pl_opt8 = "p" # 1 pl_opt9 = "r" # 3 last_dir_old3 = "East" msg4 = "Můžete jít [z]pět, [r]ovně, nebo v[l]evo.\n" pl_opt10 = "z" # 3 pl_opt11 = "r" # 2 pl_opt12 = "l" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_nsw(self): msg1 = "Můžete jít [r]ovně, v[l]evo, nebo v[z]ad.\n" pl_opt1 = "r" # 1 last_dir_new1 = "North" pl_opt2 = "l" # 2 last_dir_new2 = "West" pl_opt3 = "z" # 3 last_dir_new3 = "South" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, v[p]ravo, nebo [r]ovně.\n" pl_opt4 = "z" # 1 pl_opt5 = "p" # 2 pl_opt6 = "r" # 3 last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, [r]ovně, nebo v[l]evo.\n" pl_opt7 = "l" # 2 pl_opt8 = "r" # 1 pl_opt9 = "z" # 3 last_dir_old3 = "East" msg4 = "Můžete jít [z]pět, v[p]ravo, nebo v[l]evo.\n" pl_opt10 = "p" # 3 pl_opt11 = "z" # 2 pl_opt12 = "l" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_esw(self): msg1 = "Můžete jít [r]ovně, v[p]ravo, nebo v[z]ad.\n" pl_opt1 = "r" # 1 last_dir_new1 = "East" pl_opt2 = "p" # 2 last_dir_new2 = "South" pl_opt3 = "z" # 3 last_dir_new3 = "West" last_dir_old1 = "West" msg2 = "Můžete jít [z]pět, v[l]evo, nebo [r]ovně.\n" pl_opt4 = "z" # 1 pl_opt5 = "l" # 2 pl_opt6 = "r" # 3 last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, [r]ovně, nebo v[l]evo.\n" pl_opt7 = "z" # 2 pl_opt8 = "p" # 1 pl_opt9 = "l" # 3 last_dir_old3 = "East" msg4 = "Můžete jít [z]pět, v[p]ravo, nebo [r]ovně.\n" pl_opt10 = "z" # 3 pl_opt11 = "p" # 2 pl_opt12 = "r" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_nesw(self): while True: if player.last_direction is None: slow_print("Můžete jít [r]ovně, v[p]ravo, v[l]evo, nebo v[z]ad.\n") direction_choice = input() if direction_choice is "r": player.last_direction = "North" break elif direction_choice is "p": player.last_direction = "East" break elif direction_choice is "z": player.last_direction = "South" break elif direction_choice is "l": player.last_direction = "West" break else: wrong_input(0) elif player.last_direction is "North": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "South" break elif direction_choice is "l": player.last_direction = "West" break elif direction_choice is "r": player.last_direction = "North" break elif direction_choice is "p": player.last_direction = "East" break else: wrong_input(0) elif player.last_direction is "East": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "West" break elif direction_choice is "l": player.last_direction = "North" break elif direction_choice is "r": player.last_direction = "East" break elif direction_choice is "p": player.last_direction = "South" break else: wrong_input(0) elif player.last_direction is "South": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "North" break elif direction_choice is "l": player.last_direction = "East" break elif direction_choice is "r": player.last_direction = "South" break elif direction_choice is "p": player.last_direction = "West" break else: wrong_input(0) elif player.last_direction is "West": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "East" break elif direction_choice is "l": player.last_direction = "South" break elif direction_choice is "r": player.last_direction = "West" break elif direction_choice is "p": player.last_direction = "North" break else: wrong_input(0) return self.get_coordinates() class Rooms: fighting = Fight() r_types = RoomTypes() room_six_health_potions = True room_two_fight = False # 1 room_four_fight = True # 2 room_six_fight = True # 2 room_seven_fight = False # 1 room_eight_fight = True # 3 room_twelve_fight = False # 1 def room_one_spawn_one(self): self.r_types.room_type_n() player.last_fight = False return def room_two(self): if self.room_two_fight is True: self.fighting.main_(1) self.room_two_fight = False player.last_fight = True self.r_types.room_type_sw() return def room_three(self): player.last_fight = False self.r_types.room_type_nesw() return def room_four(self): if self.room_four_fight is True: self.fighting.main_(2) self.room_four_fight = False player.last_fight = True self.r_types.room_type_ns() return def room_five(self): player.last_fight = False self.r_types.room_type_ew() return def room_six(self): if self.room_six_fight is True: self.fighting.main_(2) self.room_six_fight = False player.last_fight = True if self.room_six_health_potions is True: while True: slow_print("Na zemi leží 2 léčící lektvary. Chcete si je [v]zít, nebo [n]e?\n") potion_choice = base_options() if potion_choice == "v": player.health_potions += 2 self.room_six_health_potions = False break elif potion_choice == "n": break elif potion_choice != "skip": wrong_input(0) self.r_types.room_type_e() return def room_seven(self): if self.room_seven_fight is True: self.fighting.main_(1) self.room_seven_fight = False player.last_fight = True self.r_types.room_type_ns() return def room_eight(self): if self.room_eight_fight is True: self.fighting.main_(3) self.room_eight_fight = False player.last_fight = True self.r_types.room_type_nsw() return def room_nine(self): player.last_fight = False self.r_types.room_type_n() return def room_ten_exit(self): player.last_fight = False slow_print("Vstoupili jste do další místnosti, vidíte před sebou schody vedoucí směrem dolů...\n") time.sleep(2) shutdown() self.r_types.room_type_e() return def room_eleven(self): player.last_fight = False self.r_types.room_type_ns() return def room_twelve(self): if self.room_twelve_fight is True: self.fighting.main_(1) self.room_twelve_fight = False player.last_fight = True self.r_types.room_type_sw() return def room_thirteen_spawn_two(self): player.last_fight = False self.r_types.room_type_e() return class RoomChanging: rooms = Rooms() def room_picking(self): while True: if player.x == 0 and player.y == 2: self.rooms.room_six() elif player.x == 1 and player.y == 2: self.rooms.room_five() elif player.x == 2 and player.y == 2: self.rooms.room_three() elif player.x == 2 and player.y == 1: self.rooms.room_four() elif player.x == 2 and player.y == 0: self.rooms.room_eleven() elif player.x == 2 and player.y == -1: self.rooms.room_twelve() elif player.x == 1 and player.y == -1: self.rooms.room_thirteen_spawn_two() elif player.x == 3 and player.y == 2: self.rooms.room_two() elif player.x == 3 and player.y == 3: self.rooms.room_one_spawn_one() elif player.x == 2 and player.y == 3: self.rooms.room_seven() elif player.x == 2 and player.y == 4: self.rooms.room_eight() elif player.x == 2 and player.y == 5: self.rooms.room_nine() elif player.x == 1 and player.y == 4: self.rooms.room_ten_exit() else: # control part print("Na x - {} a y - {} nic není.".format(player.x, player.y)) path = RoomChanging() path.room_picking() input()
<filename>old_versions/travel.old.py from fight import * class RoomTypes: def get_coordinates(self): if player.last_direction is "North": player.y -= 1 elif player.last_direction is "East": player.x += 1 elif player.last_direction is "South": player.y += 1 elif player.last_direction is "West": player.x -= 1 return def room_pattern_one(self, last_dir_old1, last_dir_new1): while True: if player.last_direction is None: slow_print("Můžete jít pouze rovně, zmáčkněte enter až budete připraveni.\n") elif player.last_direction is "{}".format(last_dir_old1): slow_print("Můžete jít pouze zpět, zmáčkněte enter až budete připraveni.\n") direction_choice = base_options() if direction_choice != "skip": break player.last_direction = "{}".format(last_dir_new1) return self.get_coordinates() def room_pattern_two(self, msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2): while True: if player.last_direction is None: slow_print("{}".format(msg1)) direction_choice = base_options() if direction_choice is "{}".format(pl_opt1): player.last_direction = "{}".format(last_dir_new1) break elif direction_choice is "{}".format(pl_opt2): player.last_direction = "{}".format(last_dir_new2) break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is "{}".format(last_dir_old1): slow_print("{}".format(msg2)) direction_choice = base_options() if direction_choice is "{}".format(pl_opt3): player.last_direction = "{}".format(last_dir_new1) break elif direction_choice is "{}".format(pl_opt4): player.last_direction = "{}".format(last_dir_new2) break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is "{}".format(last_dir_old2): slow_print("{}".format(msg3)) direction_choice = base_options() if direction_choice is "{}".format(pl_opt5): player.last_direction = "{}".format(last_dir_new2) break elif direction_choice is "{}".format(pl_opt6): player.last_direction = "{}".format(last_dir_new1) break elif direction_choice != "skip": wrong_input(0) return self.get_coordinates() def room_pattern_three(self, msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3): while True: if player.last_direction is None: slow_print(msg1) direction_choice = base_options() if direction_choice is pl_opt1: player.last_direction = last_dir_new1 break elif direction_choice is pl_opt2: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt3: player.last_direction = last_dir_new3 break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is last_dir_old1: slow_print(msg2) direction_choice = base_options() if direction_choice is pl_opt4: player.last_direction = last_dir_new1 break elif direction_choice is pl_opt5: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt6: player.last_direction = last_dir_new3 break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is last_dir_old2: slow_print(msg3) direction_choice = base_options() if direction_choice is pl_opt7: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt8: player.last_direction = last_dir_new1 break elif direction_choice is pl_opt9: player.last_direction = last_dir_new3 break elif direction_choice != "skip": wrong_input(0) elif player.last_direction is last_dir_old3: slow_print(msg4) direction_choice = base_options() if direction_choice is pl_opt10: player.last_direction = last_dir_new3 break elif direction_choice is pl_opt11: player.last_direction = last_dir_new2 break elif direction_choice is pl_opt12: player.last_direction = last_dir_new1 break elif direction_choice != "skip": wrong_input(0) return self.get_coordinates() def room_type_n(self): last_dir_old1 = "South" last_dir_new1 = "North" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_e(self): last_dir_old1 = "West" last_dir_new1 = "East" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_s(self): last_dir_old1 = "North" last_dir_new1 = "South" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_w(self): last_dir_old1 = "East" last_dir_new1 = "West" return self.room_pattern_one(last_dir_old1, last_dir_new1) def room_type_ne(self): msg1 = "Můžete jít [r]ovně, nebo v[p]ravo.\n" pl_opt1 = "r" last_dir_new1 = "North" pl_opt2 = "p" last_dir_new2 = "East" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt3 = "z" pl_opt4 = "l" last_dir_old2 = "West" msg3 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt5 = "z" pl_opt6 = "p" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_ns(self): msg1 = "Můžete jít [r]ovně, nebo v[z]ad.\n" pl_opt1 = "r" last_dir_new1 = "North" pl_opt2 = "z" last_dir_new2 = "South" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt3 = "z" pl_opt4 = "r" last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt5 = "z" pl_opt6 = "r" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_nw(self): msg1 = "Můžete jít [r]ovně, nebo v[l]evo.\n" pl_opt1 = "r" last_dir_new1 = "North" pl_opt2 = "l" last_dir_new2 = "West" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt3 = "z" pl_opt4 = "p" last_dir_old2 = "East" msg3 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt5 = "z" pl_opt6 = "l" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_es(self): msg1 = "Můžete jít [r]ovně, nebo v[p]ravo.\n" pl_opt1 = "r" last_dir_new1 = "East" pl_opt2 = "p" last_dir_new2 = "South" last_dir_old1 = "West" msg2 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt3 = "z" pl_opt4 = "l" last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt5 = "z" pl_opt6 = "p" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_ew(self): msg1 = "Můžete jít [r]ovně, nebo v[z]ad.\n" pl_opt1 = "r" last_dir_new1 = "East" pl_opt2 = "z" last_dir_new2 = "West" last_dir_old1 = "West" msg2 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt3 = "z" pl_opt4 = "r" last_dir_old2 = "East" msg3 = "Můžete jít [z]pět, nebo [r]ovně.\n" pl_opt5 = "z" pl_opt6 = "r" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_sw(self): msg1 = "Můžete jít [r]ovně, nebo v[p]ravo.\n" pl_opt1 = "r" last_dir_new1 = "South" pl_opt2 = "p" last_dir_new2 = "West" last_dir_old1 = "North" msg2 = "Můžete jít [z]pět, nebo v[l]evo.\n" pl_opt3 = "z" pl_opt4 = "l" last_dir_old2 = "East" msg3 = "Můžete jít [z]pět, nebo v[p]ravo.\n" pl_opt5 = "z" pl_opt6 = "p" return self.room_pattern_two(msg1, msg2, msg3, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, last_dir_old1, last_dir_old2, last_dir_new1, last_dir_new2) def room_type_nes(self): msg1 = "Můžete jít [r]ovně, v[p]ravo, nebo v[z]ad.\n" pl_opt1 = "r" # 1 last_dir_new1 = "North" pl_opt2 = "p" # 2 last_dir_new2 = "East" pl_opt3 = "z" # 3 last_dir_new3 = "South" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, v[l]evo, nebo [r]ovně.\n" pl_opt4 = "z" # 1 pl_opt5 = "l" # 2 pl_opt6 = "r" # 3 last_dir_old2 = "West" msg3 = "Můžete jít [z]pět, v[p]ravo, nebo v[l]evo.\n" pl_opt7 = "z" # 2 pl_opt8 = "p" # 1 pl_opt9 = "l" # 3 last_dir_old3 = "North" msg4 = "Můžete jít [z]pět, [r]ovně, nebo v[p]ravo.\n" pl_opt10 = "z" # 3 pl_opt11 = "p" # 2 pl_opt12 = "r" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_new(self): msg1 = "Můžete jít [r]ovně, v[p]ravo, nebo v[l]evo.\n" pl_opt1 = "r" # 1 last_dir_new1 = "North" pl_opt2 = "p" # 2 last_dir_new2 = "East" pl_opt3 = "l" # 3 last_dir_new3 = "West" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, v[l]evo, nebo v[p]ravo.\n" pl_opt4 = "z" # 1 pl_opt5 = "l" # 2 pl_opt6 = "p" # 3 last_dir_old2 = "West" msg3 = "Můžete jít [z]pět, v[p]ravo, nebo [r]ovně.\n" pl_opt7 = "z" # 2 pl_opt8 = "p" # 1 pl_opt9 = "r" # 3 last_dir_old3 = "East" msg4 = "Můžete jít [z]pět, [r]ovně, nebo v[l]evo.\n" pl_opt10 = "z" # 3 pl_opt11 = "r" # 2 pl_opt12 = "l" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_nsw(self): msg1 = "Můžete jít [r]ovně, v[l]evo, nebo v[z]ad.\n" pl_opt1 = "r" # 1 last_dir_new1 = "North" pl_opt2 = "l" # 2 last_dir_new2 = "West" pl_opt3 = "z" # 3 last_dir_new3 = "South" last_dir_old1 = "South" msg2 = "Můžete jít [z]pět, v[p]ravo, nebo [r]ovně.\n" pl_opt4 = "z" # 1 pl_opt5 = "p" # 2 pl_opt6 = "r" # 3 last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, [r]ovně, nebo v[l]evo.\n" pl_opt7 = "l" # 2 pl_opt8 = "r" # 1 pl_opt9 = "z" # 3 last_dir_old3 = "East" msg4 = "Můžete jít [z]pět, v[p]ravo, nebo v[l]evo.\n" pl_opt10 = "p" # 3 pl_opt11 = "z" # 2 pl_opt12 = "l" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_esw(self): msg1 = "Můžete jít [r]ovně, v[p]ravo, nebo v[z]ad.\n" pl_opt1 = "r" # 1 last_dir_new1 = "East" pl_opt2 = "p" # 2 last_dir_new2 = "South" pl_opt3 = "z" # 3 last_dir_new3 = "West" last_dir_old1 = "West" msg2 = "Můžete jít [z]pět, v[l]evo, nebo [r]ovně.\n" pl_opt4 = "z" # 1 pl_opt5 = "l" # 2 pl_opt6 = "r" # 3 last_dir_old2 = "North" msg3 = "Můžete jít [z]pět, [r]ovně, nebo v[l]evo.\n" pl_opt7 = "z" # 2 pl_opt8 = "p" # 1 pl_opt9 = "l" # 3 last_dir_old3 = "East" msg4 = "Můžete jít [z]pět, v[p]ravo, nebo [r]ovně.\n" pl_opt10 = "z" # 3 pl_opt11 = "p" # 2 pl_opt12 = "r" # 1 return self.room_pattern_three(msg1, msg2, msg3, msg4, pl_opt1, pl_opt2, pl_opt3, pl_opt4, pl_opt5, pl_opt6, pl_opt7, pl_opt8, pl_opt9, pl_opt10, pl_opt11, pl_opt12, last_dir_old1, last_dir_old2, last_dir_old3, last_dir_new1, last_dir_new2, last_dir_new3) def room_type_nesw(self): while True: if player.last_direction is None: slow_print("Můžete jít [r]ovně, v[p]ravo, v[l]evo, nebo v[z]ad.\n") direction_choice = input() if direction_choice is "r": player.last_direction = "North" break elif direction_choice is "p": player.last_direction = "East" break elif direction_choice is "z": player.last_direction = "South" break elif direction_choice is "l": player.last_direction = "West" break else: wrong_input(0) elif player.last_direction is "North": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "South" break elif direction_choice is "l": player.last_direction = "West" break elif direction_choice is "r": player.last_direction = "North" break elif direction_choice is "p": player.last_direction = "East" break else: wrong_input(0) elif player.last_direction is "East": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "West" break elif direction_choice is "l": player.last_direction = "North" break elif direction_choice is "r": player.last_direction = "East" break elif direction_choice is "p": player.last_direction = "South" break else: wrong_input(0) elif player.last_direction is "South": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "North" break elif direction_choice is "l": player.last_direction = "East" break elif direction_choice is "r": player.last_direction = "South" break elif direction_choice is "p": player.last_direction = "West" break else: wrong_input(0) elif player.last_direction is "West": slow_print("Můžete jít [z]pět, v[l]evo, v[p]ravo, nebo [r]ovně.\n") direction_choice = input() if direction_choice is "z": player.last_direction = "East" break elif direction_choice is "l": player.last_direction = "South" break elif direction_choice is "r": player.last_direction = "West" break elif direction_choice is "p": player.last_direction = "North" break else: wrong_input(0) return self.get_coordinates() class Rooms: fighting = Fight() r_types = RoomTypes() room_six_health_potions = True room_two_fight = False # 1 room_four_fight = True # 2 room_six_fight = True # 2 room_seven_fight = False # 1 room_eight_fight = True # 3 room_twelve_fight = False # 1 def room_one_spawn_one(self): self.r_types.room_type_n() player.last_fight = False return def room_two(self): if self.room_two_fight is True: self.fighting.main_(1) self.room_two_fight = False player.last_fight = True self.r_types.room_type_sw() return def room_three(self): player.last_fight = False self.r_types.room_type_nesw() return def room_four(self): if self.room_four_fight is True: self.fighting.main_(2) self.room_four_fight = False player.last_fight = True self.r_types.room_type_ns() return def room_five(self): player.last_fight = False self.r_types.room_type_ew() return def room_six(self): if self.room_six_fight is True: self.fighting.main_(2) self.room_six_fight = False player.last_fight = True if self.room_six_health_potions is True: while True: slow_print("Na zemi leží 2 léčící lektvary. Chcete si je [v]zít, nebo [n]e?\n") potion_choice = base_options() if potion_choice == "v": player.health_potions += 2 self.room_six_health_potions = False break elif potion_choice == "n": break elif potion_choice != "skip": wrong_input(0) self.r_types.room_type_e() return def room_seven(self): if self.room_seven_fight is True: self.fighting.main_(1) self.room_seven_fight = False player.last_fight = True self.r_types.room_type_ns() return def room_eight(self): if self.room_eight_fight is True: self.fighting.main_(3) self.room_eight_fight = False player.last_fight = True self.r_types.room_type_nsw() return def room_nine(self): player.last_fight = False self.r_types.room_type_n() return def room_ten_exit(self): player.last_fight = False slow_print("Vstoupili jste do další místnosti, vidíte před sebou schody vedoucí směrem dolů...\n") time.sleep(2) shutdown() self.r_types.room_type_e() return def room_eleven(self): player.last_fight = False self.r_types.room_type_ns() return def room_twelve(self): if self.room_twelve_fight is True: self.fighting.main_(1) self.room_twelve_fight = False player.last_fight = True self.r_types.room_type_sw() return def room_thirteen_spawn_two(self): player.last_fight = False self.r_types.room_type_e() return class RoomChanging: rooms = Rooms() def room_picking(self): while True: if player.x == 0 and player.y == 2: self.rooms.room_six() elif player.x == 1 and player.y == 2: self.rooms.room_five() elif player.x == 2 and player.y == 2: self.rooms.room_three() elif player.x == 2 and player.y == 1: self.rooms.room_four() elif player.x == 2 and player.y == 0: self.rooms.room_eleven() elif player.x == 2 and player.y == -1: self.rooms.room_twelve() elif player.x == 1 and player.y == -1: self.rooms.room_thirteen_spawn_two() elif player.x == 3 and player.y == 2: self.rooms.room_two() elif player.x == 3 and player.y == 3: self.rooms.room_one_spawn_one() elif player.x == 2 and player.y == 3: self.rooms.room_seven() elif player.x == 2 and player.y == 4: self.rooms.room_eight() elif player.x == 2 and player.y == 5: self.rooms.room_nine() elif player.x == 1 and player.y == 4: self.rooms.room_ten_exit() else: # control part print("Na x - {} a y - {} nic není.".format(player.x, player.y)) path = RoomChanging() path.room_picking() input()
en
0.360159
# 1 # 2 # 3 # 1 # 2 # 3 # 2 # 1 # 3 # 3 # 2 # 1 # 1 # 2 # 3 # 1 # 2 # 3 # 2 # 1 # 3 # 3 # 2 # 1 # 1 # 2 # 3 # 1 # 2 # 3 # 2 # 1 # 3 # 3 # 2 # 1 # 1 # 2 # 3 # 1 # 2 # 3 # 2 # 1 # 3 # 3 # 2 # 1 # 1 # 2 # 2 # 1 # 3 # 1 # control part
2.74231
3
lib/modes/mode_youtube.py
okonomichiyaki/parrot.py
80
6624578
<reponame>okonomichiyaki/parrot.py from lib.detection_strategies import single_tap_detection, loud_detection, medium_detection, percentage_detection import threading import numpy as np import pyautogui from pyautogui import press, hotkey, click, scroll, typewrite, moveRel, moveTo, position from time import sleep from subprocess import call from lib.system_toggles import toggle_eyetracker, turn_on_sound, mute_sound, toggle_speechrec import os class YoutubeMode: def __init__(self, modeSwitcher): self.mode = "regular" self.modeSwitcher = modeSwitcher def start( self ): turn_on_sound() moveTo( 500, 500 ) click() press('space') press('space') moveTo(500, 2000) press('f') def handle_input( self, dataDicts ): if( percentage_detection(dataDicts, "whistle", 90 ) or percentage_detection(dataDicts, "bell", 90 ) ): self.modeSwitcher.switchMode('browse') def exit( self ): self.mode = "regular" mute_sound() press('space') press('esc')
from lib.detection_strategies import single_tap_detection, loud_detection, medium_detection, percentage_detection import threading import numpy as np import pyautogui from pyautogui import press, hotkey, click, scroll, typewrite, moveRel, moveTo, position from time import sleep from subprocess import call from lib.system_toggles import toggle_eyetracker, turn_on_sound, mute_sound, toggle_speechrec import os class YoutubeMode: def __init__(self, modeSwitcher): self.mode = "regular" self.modeSwitcher = modeSwitcher def start( self ): turn_on_sound() moveTo( 500, 500 ) click() press('space') press('space') moveTo(500, 2000) press('f') def handle_input( self, dataDicts ): if( percentage_detection(dataDicts, "whistle", 90 ) or percentage_detection(dataDicts, "bell", 90 ) ): self.modeSwitcher.switchMode('browse') def exit( self ): self.mode = "regular" mute_sound() press('space') press('esc')
none
1
2.456192
2
python/paddle/fluid/tests/unittests/test_sequence_expand.py
javakian/Paddle
0
6624579
<filename>python/paddle/fluid/tests/unittests/test_sequence_expand.py # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np from op_test import OpTest class TestSequenceExpand(OpTest): def set_data(self): x_data = np.random.uniform(0.1, 1, [3, 1]).astype('float32') y_data = np.random.uniform(0.1, 1, [8, 1]).astype('float32') y_lod = [[1, 3, 4]] self.inputs = {'X': x_data, 'Y': (y_data, y_lod)} def compute(self): x = self.inputs['X'] x_data, x_lod = x if type(x) == tuple else (x, None) y_data, y_lod = self.inputs['Y'] if hasattr(self, 'attrs'): ref_level = self.attrs['ref_level'] else: ref_level = len(y_lod) - 1 out = np.zeros(shape=((0, ) + x_data.shape[1:]), dtype=x_data.dtype) if x_lod is None: # x_idx = [i for i in xrange(x_data.shape[0] + 1)] x_idx = [1] * x_data.shape[0] else: x_idx = x_lod[0] out_lod = [[]] offset = 0 for i in range(len(y_lod[ref_level])): repeat_num = y_lod[ref_level][i] x_len = x_idx[i] if repeat_num > 0: x_sub = x_data[offset:(offset + x_len), :] stacked_x_sub = x_sub for r in range(repeat_num - 1): stacked_x_sub = np.vstack((stacked_x_sub, x_sub)) out = np.vstack((out, stacked_x_sub)) if x_lod is not None: for j in range(repeat_num): out_lod[0].append(x_len) offset += x_len if x_lod is None: self.outputs = {'Out': out} else: self.outputs = {'Out': (out, out_lod)} def setUp(self): self.op_type = 'sequence_expand' self.set_data() self.compute() def test_check_output(self): self.check_output(check_dygraph=False) def test_check_grad(self): self.check_grad(["X"], "Out", check_dygraph=False) class TestSequenceExpandCase1(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [5, 1]).astype('float32') y_data = np.random.uniform(0.1, 1, [13, 1]).astype('float32') y_lod = [[2, 3], [2, 2, 3, 3, 3]] self.inputs = {'X': x_data, 'Y': (y_data, y_lod)} self.attrs = {'ref_level': 1} class TestSequenceExpandCase2(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [1, 2, 2]).astype('float32') x_lod = [[1]] y_data = np.random.uniform(0.1, 1, [2, 2, 2]).astype('float32') y_lod = [[2], [1, 1]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} self.attrs = {'ref_level': 0} class TestSequenceExpandCase3(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [4, 1]).astype('float32') x_lod = [[1, 1, 1, 1]] y_data = np.random.uniform(0.1, 1, [8, 1]).astype('float32') y_lod = [[2, 2, 2, 2]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} class TestSequenceExpandCase4(TestSequenceExpand): def set_data(self): data = np.random.uniform(0.1, 1, [5 * 2, 1]) x_data = np.array(data).reshape([5, 2]).astype('float32') x_lod = [[2, 3]] y_data = np.random.uniform(0.1, 1, [5, 1]).astype('float32') y_lod = [[2], [2, 3]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} class TestSequenceExpandCase5(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [6, 1]).astype('float32') y_data = np.random.uniform(0.1, 1, [13, 1]).astype('float32') y_lod = [[2, 4], [2, 2, 3, 0, 3, 3]] self.inputs = {'X': x_data, 'Y': (y_data, y_lod)} self.attrs = {'ref_level': 1} class TestSequenceExpandCase6(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [4, 1]).astype('float32') x_lod = [[1, 1, 0, 1, 1]] y_data = np.random.uniform(0.1, 1, [8, 1]).astype('float32') y_lod = [[0, 2, 4, 2, 0]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} if __name__ == '__main__': unittest.main()
<filename>python/paddle/fluid/tests/unittests/test_sequence_expand.py # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np from op_test import OpTest class TestSequenceExpand(OpTest): def set_data(self): x_data = np.random.uniform(0.1, 1, [3, 1]).astype('float32') y_data = np.random.uniform(0.1, 1, [8, 1]).astype('float32') y_lod = [[1, 3, 4]] self.inputs = {'X': x_data, 'Y': (y_data, y_lod)} def compute(self): x = self.inputs['X'] x_data, x_lod = x if type(x) == tuple else (x, None) y_data, y_lod = self.inputs['Y'] if hasattr(self, 'attrs'): ref_level = self.attrs['ref_level'] else: ref_level = len(y_lod) - 1 out = np.zeros(shape=((0, ) + x_data.shape[1:]), dtype=x_data.dtype) if x_lod is None: # x_idx = [i for i in xrange(x_data.shape[0] + 1)] x_idx = [1] * x_data.shape[0] else: x_idx = x_lod[0] out_lod = [[]] offset = 0 for i in range(len(y_lod[ref_level])): repeat_num = y_lod[ref_level][i] x_len = x_idx[i] if repeat_num > 0: x_sub = x_data[offset:(offset + x_len), :] stacked_x_sub = x_sub for r in range(repeat_num - 1): stacked_x_sub = np.vstack((stacked_x_sub, x_sub)) out = np.vstack((out, stacked_x_sub)) if x_lod is not None: for j in range(repeat_num): out_lod[0].append(x_len) offset += x_len if x_lod is None: self.outputs = {'Out': out} else: self.outputs = {'Out': (out, out_lod)} def setUp(self): self.op_type = 'sequence_expand' self.set_data() self.compute() def test_check_output(self): self.check_output(check_dygraph=False) def test_check_grad(self): self.check_grad(["X"], "Out", check_dygraph=False) class TestSequenceExpandCase1(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [5, 1]).astype('float32') y_data = np.random.uniform(0.1, 1, [13, 1]).astype('float32') y_lod = [[2, 3], [2, 2, 3, 3, 3]] self.inputs = {'X': x_data, 'Y': (y_data, y_lod)} self.attrs = {'ref_level': 1} class TestSequenceExpandCase2(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [1, 2, 2]).astype('float32') x_lod = [[1]] y_data = np.random.uniform(0.1, 1, [2, 2, 2]).astype('float32') y_lod = [[2], [1, 1]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} self.attrs = {'ref_level': 0} class TestSequenceExpandCase3(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [4, 1]).astype('float32') x_lod = [[1, 1, 1, 1]] y_data = np.random.uniform(0.1, 1, [8, 1]).astype('float32') y_lod = [[2, 2, 2, 2]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} class TestSequenceExpandCase4(TestSequenceExpand): def set_data(self): data = np.random.uniform(0.1, 1, [5 * 2, 1]) x_data = np.array(data).reshape([5, 2]).astype('float32') x_lod = [[2, 3]] y_data = np.random.uniform(0.1, 1, [5, 1]).astype('float32') y_lod = [[2], [2, 3]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} class TestSequenceExpandCase5(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [6, 1]).astype('float32') y_data = np.random.uniform(0.1, 1, [13, 1]).astype('float32') y_lod = [[2, 4], [2, 2, 3, 0, 3, 3]] self.inputs = {'X': x_data, 'Y': (y_data, y_lod)} self.attrs = {'ref_level': 1} class TestSequenceExpandCase6(TestSequenceExpand): def set_data(self): x_data = np.random.uniform(0.1, 1, [4, 1]).astype('float32') x_lod = [[1, 1, 0, 1, 1]] y_data = np.random.uniform(0.1, 1, [8, 1]).astype('float32') y_lod = [[0, 2, 4, 2, 0]] self.inputs = {'X': (x_data, x_lod), 'Y': (y_data, y_lod)} if __name__ == '__main__': unittest.main()
en
0.846132
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # x_idx = [i for i in xrange(x_data.shape[0] + 1)]
2.401684
2
recip/util/Config.py
anthonybuckle/Reciprocity-Core
0
6624580
from recip.util import DataType import json import os class Config: with open('recip/config/config.json', 'r') as configFile: config = json.load(configFile) def setKeyValue(key, value): Config.config[key] = value def getIntValue(key, base=10): return DataType.asInt(Config.config[key], base) def getBoolValue(key): return DataType.asBool(Config.config[key]) def getDecimalValue(key): return DataType.asDecimal(Config.config[key]) def getBytesValue(key, hexStr=True): if hexStr: return DataType.fromHex(Config.config[key]) else: return DataType.serialize(Config.config[key]) def getFilePath(directoryKey, fileKey): directory = getValue(directoryKey) file = getValue(fileKey) if not os.path.exists(directory): os.makedirs(directory) return os.path.join(directory, file) def getValues(key): values = getValue(key) return values.split(',') def getValue(key): return Config.config[key]
from recip.util import DataType import json import os class Config: with open('recip/config/config.json', 'r') as configFile: config = json.load(configFile) def setKeyValue(key, value): Config.config[key] = value def getIntValue(key, base=10): return DataType.asInt(Config.config[key], base) def getBoolValue(key): return DataType.asBool(Config.config[key]) def getDecimalValue(key): return DataType.asDecimal(Config.config[key]) def getBytesValue(key, hexStr=True): if hexStr: return DataType.fromHex(Config.config[key]) else: return DataType.serialize(Config.config[key]) def getFilePath(directoryKey, fileKey): directory = getValue(directoryKey) file = getValue(fileKey) if not os.path.exists(directory): os.makedirs(directory) return os.path.join(directory, file) def getValues(key): values = getValue(key) return values.split(',') def getValue(key): return Config.config[key]
none
1
2.670625
3
tensorflow-client/python/mnist.py
fwz-fpga/inference-client
20
6624581
<gh_stars>10-100 #!/usr/bin/env python import sys import numpy as np import tensorflow as tf import requests from tensorflow_serving.apis import predict_pb2 from google.protobuf.json_format import MessageToDict def sendRequest(url): request = predict_pb2.PredictRequest() response = predict_pb2.PredictResponse() request.model_spec.name = 'mnist' request.model_spec.signature_name = 'predict_images' array = np.random.ranf(784).reshape(1,784).astype(np.float32) request.inputs['images'].CopyFrom( tf.make_tensor_proto(array)) data = request.SerializeToString() data_type = "application/proto" #!!! Add Appcode here token = "<PASSWORD>_CODE" headers = { # !!! set content type 'content-type': data_type, # !!! replace your token 'Authorization': "AppCode " + token } res = requests.post(url, data, headers=headers, verify=False) if (res.status_code == 200 and res.headers['Content-Type'] == data_type): # print res.content response.ParseFromString(res.content) print(MessageToDict(response)) else: # handle error msg print(res.headers['X-Ddy-Error-Message']) print(res.content) if __name__ == '__main__': url = sys.argv[1] sendRequest(url)
#!/usr/bin/env python import sys import numpy as np import tensorflow as tf import requests from tensorflow_serving.apis import predict_pb2 from google.protobuf.json_format import MessageToDict def sendRequest(url): request = predict_pb2.PredictRequest() response = predict_pb2.PredictResponse() request.model_spec.name = 'mnist' request.model_spec.signature_name = 'predict_images' array = np.random.ranf(784).reshape(1,784).astype(np.float32) request.inputs['images'].CopyFrom( tf.make_tensor_proto(array)) data = request.SerializeToString() data_type = "application/proto" #!!! Add Appcode here token = "<PASSWORD>_CODE" headers = { # !!! set content type 'content-type': data_type, # !!! replace your token 'Authorization': "AppCode " + token } res = requests.post(url, data, headers=headers, verify=False) if (res.status_code == 200 and res.headers['Content-Type'] == data_type): # print res.content response.ParseFromString(res.content) print(MessageToDict(response)) else: # handle error msg print(res.headers['X-Ddy-Error-Message']) print(res.content) if __name__ == '__main__': url = sys.argv[1] sendRequest(url)
en
0.192514
#!/usr/bin/env python #!!! Add Appcode here # !!! set content type # !!! replace your token # print res.content # handle error msg
2.570224
3
flexget/plugins/plugin_include.py
tvcsantos/Flexget
0
6624582
from __future__ import unicode_literals, division, absolute_import import logging import os import yaml from flexget import plugin from flexget.config_schema import one_or_more, process_config from flexget.event import event from flexget.utils.tools import MergeException, merge_dict_from_to log = logging.getLogger('include') class PluginInclude(object): """ Include configuration from another yaml file. Example:: include: series.yml File content must be valid for a task configuration """ schema = one_or_more({'type': 'string'}) @plugin.priority(254) def on_task_start(self, task, config): if not config: return files = config if isinstance(config, basestring): files = [config] for name in files: name = os.path.expanduser(name) if not os.path.isabs(name): name = os.path.join(task.manager.config_base, name) include = yaml.load(file(name)) errors = process_config(include, plugin.plugin_schemas(context='task')) if errors: log.error('Included file %s has invalid config:' % name) for error in errors: log.error('[%s] %s', error.json_pointer, error.message) task.abort('Invalid config in included file %s' % name) log.debug('Merging %s into task %s' % (name, task.name)) # merge try: merge_dict_from_to(include, task.config) except MergeException: raise plugin.PluginError('Failed to merge include file to task %s, incompatible datatypes' % task.name) @event('plugin.register') def register_plugin(): plugin.register(PluginInclude, 'include', api_ver=2, builtin=True)
from __future__ import unicode_literals, division, absolute_import import logging import os import yaml from flexget import plugin from flexget.config_schema import one_or_more, process_config from flexget.event import event from flexget.utils.tools import MergeException, merge_dict_from_to log = logging.getLogger('include') class PluginInclude(object): """ Include configuration from another yaml file. Example:: include: series.yml File content must be valid for a task configuration """ schema = one_or_more({'type': 'string'}) @plugin.priority(254) def on_task_start(self, task, config): if not config: return files = config if isinstance(config, basestring): files = [config] for name in files: name = os.path.expanduser(name) if not os.path.isabs(name): name = os.path.join(task.manager.config_base, name) include = yaml.load(file(name)) errors = process_config(include, plugin.plugin_schemas(context='task')) if errors: log.error('Included file %s has invalid config:' % name) for error in errors: log.error('[%s] %s', error.json_pointer, error.message) task.abort('Invalid config in included file %s' % name) log.debug('Merging %s into task %s' % (name, task.name)) # merge try: merge_dict_from_to(include, task.config) except MergeException: raise plugin.PluginError('Failed to merge include file to task %s, incompatible datatypes' % task.name) @event('plugin.register') def register_plugin(): plugin.register(PluginInclude, 'include', api_ver=2, builtin=True)
en
0.75432
Include configuration from another yaml file. Example:: include: series.yml File content must be valid for a task configuration # merge
2.130388
2
hdfs/scripts/configure-hadoop.py
reynoldsm88/uzumaki
0
6624583
<reponame>reynoldsm88/uzumaki import os import argparse hdfs_config_dir = os.environ[ "HADOOP_PREFIX" ] + "/etc/hadoop" def write_core_site( hostname ): core_site_template = read_file_as_string( "/tmp/core-site-template.xml" ) core_site_config = core_site_template.replace("${hostname}", hostname ) write_file( hdfs_config_dir + "/core-site.xml", core_site_config ) def write_hdfs_site( hostname ): core_site_template = read_file_as_string( "/tmp/hdfs-site-template.xml" ) core_site_config = core_site_template.replace( "${hostname}", hostname ) write_file( hdfs_config_dir + "/hdfs-site.xml", core_site_config ) def read_file_as_string( filename ): file = open( filename ) return file.read() def write_file( path, content ): file = open( path, "w" ) file.write( content ) file.close() def configure_hdfs(): parser = argparse.ArgumentParser() parser.add_argument( "-hostname" ) args = parser.parse_args() hostname = args.hostname write_core_site( hostname ) write_hdfs_site( hostname ) if __name__ == "__main__": configure_hdfs()
import os import argparse hdfs_config_dir = os.environ[ "HADOOP_PREFIX" ] + "/etc/hadoop" def write_core_site( hostname ): core_site_template = read_file_as_string( "/tmp/core-site-template.xml" ) core_site_config = core_site_template.replace("${hostname}", hostname ) write_file( hdfs_config_dir + "/core-site.xml", core_site_config ) def write_hdfs_site( hostname ): core_site_template = read_file_as_string( "/tmp/hdfs-site-template.xml" ) core_site_config = core_site_template.replace( "${hostname}", hostname ) write_file( hdfs_config_dir + "/hdfs-site.xml", core_site_config ) def read_file_as_string( filename ): file = open( filename ) return file.read() def write_file( path, content ): file = open( path, "w" ) file.write( content ) file.close() def configure_hdfs(): parser = argparse.ArgumentParser() parser.add_argument( "-hostname" ) args = parser.parse_args() hostname = args.hostname write_core_site( hostname ) write_hdfs_site( hostname ) if __name__ == "__main__": configure_hdfs()
none
1
2.615505
3
smore/models/box.py
isabella232/smore
78
6624584
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function import logging import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import time import pdb from smore.models.kg_reasoning import KGReasoning from smore.common.modules import Identity from smore.common.embedding.sparse_embed import SparseEmbedding from smore.common.torchext.ext_ops import box_dist_in, box_dist_out class BoxOffsetIntersection(nn.Module): def __init__(self, dim): super(BoxOffsetIntersection, self).__init__() self.dim = dim self.layers = nn.Parameter(torch.zeros(self.dim*2+2, self.dim)) nn.init.xavier_uniform_(self.layers[:self.dim*2, :]) def forward(self, embeddings): w1, w2, b1, b2 = torch.split(self.layers, [self.dim, self.dim, 1, 1], dim=0) layer1_act = F.relu(F.linear(embeddings, w1, b1.view(-1))) layer1_mean = torch.mean(layer1_act, dim=0) gate = torch.sigmoid(F.linear(layer1_mean, w2, b2.view(-1))) offset, _ = torch.min(embeddings, dim=0) return offset * gate class CenterIntersection(nn.Module): def __init__(self, dim): super(CenterIntersection, self).__init__() self.dim = dim self.layers = nn.Parameter(torch.zeros(self.dim*2+2, self.dim)) nn.init.xavier_uniform_(self.layers[:self.dim*2, :]) def forward(self, embeddings): w1, w2, b1, b2 = torch.split(self.layers, [self.dim, self.dim, 1, 1], dim=0) layer1_act = F.relu(F.linear(embeddings, w1, b1.view(-1))) # (num_conj, dim) attention = F.softmax(F.linear(layer1_act, w2, b2.view(-1)), dim=0) # (num_conj, dim) embedding = torch.sum(attention * embeddings, dim=0) return embedding class BoxReasoning(KGReasoning): def __init__(self, nentity, nrelation, hidden_dim, gamma, optim_mode, batch_size, test_batch_size=1, sparse_embeddings=None, sparse_device='gpu', use_cuda=False, query_name_dict=None, box_mode=None,logit_impl='native'): super(BoxReasoning, self).__init__(nentity=nentity, nrelation=nrelation, hidden_dim=hidden_dim, gamma=gamma, optim_mode=optim_mode, batch_size=batch_size, test_batch_size=test_batch_size, sparse_embeddings=sparse_embeddings, sparse_device=sparse_device, use_cuda=use_cuda, query_name_dict=query_name_dict, logit_impl=logit_impl) self.geo = 'box' self.entity_embedding = SparseEmbedding(nentity, self.entity_dim) activation, cen = box_mode self.cen = cen # hyperparameter that balances the in-box distance and the out-box distance if activation == 'none': self.func = Identity elif activation == 'relu': self.func = F.relu elif activation == 'softplus': self.func = F.softplus self.offset_embedding = SparseEmbedding(nrelation, self.entity_dim) self.center_net = CenterIntersection(self.entity_dim) self.offset_net = BoxOffsetIntersection(self.entity_dim) self.num_embedding_component = 2 self.init_params() def named_sparse_embeddings(self): list_sparse = super(BoxReasoning, self).named_sparse_embeddings() if 'r' in self.sparse_embeddings: list_sparse.append(("offset_embedding", self.offset_embedding)) return list_sparse def named_dense_embedding_params(self): pgen = super(BoxReasoning, self).named_dense_embedding_params() for name, param in pgen: yield name, param if 'r' not in self.sparse_embeddings: for name, param in self.offset_embedding.named_parameters(): yield name, param def to_device(self, device): super(BoxReasoning, self).to_device(device) self.center_net = self.center_net.to(device) self.offset_net = self.offset_net.to(device) self.zero_offset_tensor = torch.zeros([self.batch_size, 1, self.entity_dim]).to(device) self.empty_logit_tensor = torch.tensor([]).to(device) if 'r' not in self.sparse_embeddings or self.sparse_device == 'gpu': self.offset_embedding = self.offset_embedding.cuda(device) def init_params(self): super(BoxReasoning, self).init_params() self.offset_embedding.init_params(0, self.embedding_range) def share_memory(self): super(BoxReasoning, self).share_memory() self.center_net.share_memory() self.offset_net.share_memory() self.offset_embedding.share_memory() def relation_projection(self, cur_embedding, relation_ids): relation_embedding = self.relation_embedding(relation_ids).unsqueeze(1) offset_embedding = self.offset_embedding(relation_ids).unsqueeze(1) return [cur_embedding[0] + relation_embedding, cur_embedding[1] + self.func(offset_embedding)] def retrieve_embedding(self, entity_ids): ''' Retrieve the entity embeddings given the entity indices Params: entity_ids: a list of entities indices ''' embedding = self.entity_embedding(entity_ids) offset_embedding = torch.zeros_like(embedding).to(embedding.device) return [embedding.unsqueeze(1), offset_embedding.unsqueeze(1)] def intersection_between_stacked_embedding(self, stacked_embedding_list): embedding, offset_embedding = torch.chunk(stacked_embedding_list, 2, dim=-1) embedding = self.center_net(embedding) # [32, 6, 16] offset_embedding = self.offset_net(offset_embedding) return [embedding, offset_embedding] def native_cal_logit(self, entity_embedding, entity_feat, query_embedding): assert entity_feat is None query_center_embedding, query_offset_embedding = query_embedding delta = (entity_embedding.unsqueeze(1) - query_center_embedding).abs() distance_out = F.relu(delta - query_offset_embedding) distance_in = torch.min(delta, query_offset_embedding) logit = self.gamma - torch.norm(distance_out, p=1, dim=-1) - self.cen * torch.norm(distance_in, p=1, dim=-1) logit = torch.max(logit, dim=1)[0] return logit def custom_cal_logit(self, entity_embedding, entity_feat, query_embedding): assert entity_feat is None query_center_embedding, query_offset_embedding = query_embedding d1 = box_dist_out(entity_embedding, query_center_embedding, query_offset_embedding) d2 = box_dist_in(entity_embedding, query_center_embedding, query_offset_embedding) logit = self.gamma - d1 - self.cen * d2 logit = torch.max(logit, dim=1)[0] return logit
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function import logging import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import time import pdb from smore.models.kg_reasoning import KGReasoning from smore.common.modules import Identity from smore.common.embedding.sparse_embed import SparseEmbedding from smore.common.torchext.ext_ops import box_dist_in, box_dist_out class BoxOffsetIntersection(nn.Module): def __init__(self, dim): super(BoxOffsetIntersection, self).__init__() self.dim = dim self.layers = nn.Parameter(torch.zeros(self.dim*2+2, self.dim)) nn.init.xavier_uniform_(self.layers[:self.dim*2, :]) def forward(self, embeddings): w1, w2, b1, b2 = torch.split(self.layers, [self.dim, self.dim, 1, 1], dim=0) layer1_act = F.relu(F.linear(embeddings, w1, b1.view(-1))) layer1_mean = torch.mean(layer1_act, dim=0) gate = torch.sigmoid(F.linear(layer1_mean, w2, b2.view(-1))) offset, _ = torch.min(embeddings, dim=0) return offset * gate class CenterIntersection(nn.Module): def __init__(self, dim): super(CenterIntersection, self).__init__() self.dim = dim self.layers = nn.Parameter(torch.zeros(self.dim*2+2, self.dim)) nn.init.xavier_uniform_(self.layers[:self.dim*2, :]) def forward(self, embeddings): w1, w2, b1, b2 = torch.split(self.layers, [self.dim, self.dim, 1, 1], dim=0) layer1_act = F.relu(F.linear(embeddings, w1, b1.view(-1))) # (num_conj, dim) attention = F.softmax(F.linear(layer1_act, w2, b2.view(-1)), dim=0) # (num_conj, dim) embedding = torch.sum(attention * embeddings, dim=0) return embedding class BoxReasoning(KGReasoning): def __init__(self, nentity, nrelation, hidden_dim, gamma, optim_mode, batch_size, test_batch_size=1, sparse_embeddings=None, sparse_device='gpu', use_cuda=False, query_name_dict=None, box_mode=None,logit_impl='native'): super(BoxReasoning, self).__init__(nentity=nentity, nrelation=nrelation, hidden_dim=hidden_dim, gamma=gamma, optim_mode=optim_mode, batch_size=batch_size, test_batch_size=test_batch_size, sparse_embeddings=sparse_embeddings, sparse_device=sparse_device, use_cuda=use_cuda, query_name_dict=query_name_dict, logit_impl=logit_impl) self.geo = 'box' self.entity_embedding = SparseEmbedding(nentity, self.entity_dim) activation, cen = box_mode self.cen = cen # hyperparameter that balances the in-box distance and the out-box distance if activation == 'none': self.func = Identity elif activation == 'relu': self.func = F.relu elif activation == 'softplus': self.func = F.softplus self.offset_embedding = SparseEmbedding(nrelation, self.entity_dim) self.center_net = CenterIntersection(self.entity_dim) self.offset_net = BoxOffsetIntersection(self.entity_dim) self.num_embedding_component = 2 self.init_params() def named_sparse_embeddings(self): list_sparse = super(BoxReasoning, self).named_sparse_embeddings() if 'r' in self.sparse_embeddings: list_sparse.append(("offset_embedding", self.offset_embedding)) return list_sparse def named_dense_embedding_params(self): pgen = super(BoxReasoning, self).named_dense_embedding_params() for name, param in pgen: yield name, param if 'r' not in self.sparse_embeddings: for name, param in self.offset_embedding.named_parameters(): yield name, param def to_device(self, device): super(BoxReasoning, self).to_device(device) self.center_net = self.center_net.to(device) self.offset_net = self.offset_net.to(device) self.zero_offset_tensor = torch.zeros([self.batch_size, 1, self.entity_dim]).to(device) self.empty_logit_tensor = torch.tensor([]).to(device) if 'r' not in self.sparse_embeddings or self.sparse_device == 'gpu': self.offset_embedding = self.offset_embedding.cuda(device) def init_params(self): super(BoxReasoning, self).init_params() self.offset_embedding.init_params(0, self.embedding_range) def share_memory(self): super(BoxReasoning, self).share_memory() self.center_net.share_memory() self.offset_net.share_memory() self.offset_embedding.share_memory() def relation_projection(self, cur_embedding, relation_ids): relation_embedding = self.relation_embedding(relation_ids).unsqueeze(1) offset_embedding = self.offset_embedding(relation_ids).unsqueeze(1) return [cur_embedding[0] + relation_embedding, cur_embedding[1] + self.func(offset_embedding)] def retrieve_embedding(self, entity_ids): ''' Retrieve the entity embeddings given the entity indices Params: entity_ids: a list of entities indices ''' embedding = self.entity_embedding(entity_ids) offset_embedding = torch.zeros_like(embedding).to(embedding.device) return [embedding.unsqueeze(1), offset_embedding.unsqueeze(1)] def intersection_between_stacked_embedding(self, stacked_embedding_list): embedding, offset_embedding = torch.chunk(stacked_embedding_list, 2, dim=-1) embedding = self.center_net(embedding) # [32, 6, 16] offset_embedding = self.offset_net(offset_embedding) return [embedding, offset_embedding] def native_cal_logit(self, entity_embedding, entity_feat, query_embedding): assert entity_feat is None query_center_embedding, query_offset_embedding = query_embedding delta = (entity_embedding.unsqueeze(1) - query_center_embedding).abs() distance_out = F.relu(delta - query_offset_embedding) distance_in = torch.min(delta, query_offset_embedding) logit = self.gamma - torch.norm(distance_out, p=1, dim=-1) - self.cen * torch.norm(distance_in, p=1, dim=-1) logit = torch.max(logit, dim=1)[0] return logit def custom_cal_logit(self, entity_embedding, entity_feat, query_embedding): assert entity_feat is None query_center_embedding, query_offset_embedding = query_embedding d1 = box_dist_out(entity_embedding, query_center_embedding, query_offset_embedding) d2 = box_dist_in(entity_embedding, query_center_embedding, query_offset_embedding) logit = self.gamma - d1 - self.cen * d2 logit = torch.max(logit, dim=1)[0] return logit
en
0.818863
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # (num_conj, dim) # (num_conj, dim) # hyperparameter that balances the in-box distance and the out-box distance Retrieve the entity embeddings given the entity indices Params: entity_ids: a list of entities indices # [32, 6, 16]
1.800499
2
SetShowAnswer.py
Colin-Fredericks/hx-util
0
6624585
<filename>SetShowAnswer.py<gh_stars>0 # import XML libraries import xml.etree.ElementTree as ET import sys import os import argparse instructions = """ To use: python3 SetShowAnswer.py show_answer_value path/to/problem/folder show_answer_value can be one of the usual edX set: Always Answered Attempted Closed Finished CorrectOrPastDue PastDue Never It can also be delete or default, in which case all show_answer values are removed and the course-wide default takes over. Options: -h Print help message and exit. Last update: March 15th 2018 """ # Here are all the options for show_answer values: allAnswerValues = [ 'always', 'answered', 'attempted', 'closed', 'finished', 'correctorpastdue', 'pastdue', 'never' ] parser = argparse.ArgumentParser(usage=instructions, add_help=False) parser.add_argument('-h', '--help', action='store_true') parser.add_argument('answerSetting', default='finished') parser.add_argument('directory', default='.') args = parser.parse_args() if args.help: sys.exit(instructions) answerSetting = args.answerSetting.lower() if not os.path.exists(args.directory): sys.exit('Directory not found: ' + args.directory) numfiles = 0 # Walk through the problems folder for dirpath, dirnames, filenames in os.walk(args.directory): for eachfile in filenames: # Get the XML for each file tree = ET.parse(os.path.join(dirpath, eachfile)) root = tree.getroot() # If this isn't a problem file, skip it. if root.tag != 'problem': continue # Set the showanswer value if answerSetting in allAnswerValues: root.set('showanswer', answerSetting) elif answerSetting == 'default' or answerSetting == 'delete': try: del root.attrib['showanswer'] except: pass else: sys.exit('Invalid showanswer setting.') # Save the file tree.write(os.path.join(dirpath, eachfile), encoding='UTF-8', xml_declaration=False) numfiles += 1 if numfiles == 0: print('No files found - wrong or empty directory?') else: print('Show Answer options set for ' + str(numfiles) + ' files.')
<filename>SetShowAnswer.py<gh_stars>0 # import XML libraries import xml.etree.ElementTree as ET import sys import os import argparse instructions = """ To use: python3 SetShowAnswer.py show_answer_value path/to/problem/folder show_answer_value can be one of the usual edX set: Always Answered Attempted Closed Finished CorrectOrPastDue PastDue Never It can also be delete or default, in which case all show_answer values are removed and the course-wide default takes over. Options: -h Print help message and exit. Last update: March 15th 2018 """ # Here are all the options for show_answer values: allAnswerValues = [ 'always', 'answered', 'attempted', 'closed', 'finished', 'correctorpastdue', 'pastdue', 'never' ] parser = argparse.ArgumentParser(usage=instructions, add_help=False) parser.add_argument('-h', '--help', action='store_true') parser.add_argument('answerSetting', default='finished') parser.add_argument('directory', default='.') args = parser.parse_args() if args.help: sys.exit(instructions) answerSetting = args.answerSetting.lower() if not os.path.exists(args.directory): sys.exit('Directory not found: ' + args.directory) numfiles = 0 # Walk through the problems folder for dirpath, dirnames, filenames in os.walk(args.directory): for eachfile in filenames: # Get the XML for each file tree = ET.parse(os.path.join(dirpath, eachfile)) root = tree.getroot() # If this isn't a problem file, skip it. if root.tag != 'problem': continue # Set the showanswer value if answerSetting in allAnswerValues: root.set('showanswer', answerSetting) elif answerSetting == 'default' or answerSetting == 'delete': try: del root.attrib['showanswer'] except: pass else: sys.exit('Invalid showanswer setting.') # Save the file tree.write(os.path.join(dirpath, eachfile), encoding='UTF-8', xml_declaration=False) numfiles += 1 if numfiles == 0: print('No files found - wrong or empty directory?') else: print('Show Answer options set for ' + str(numfiles) + ' files.')
en
0.808292
# import XML libraries To use: python3 SetShowAnswer.py show_answer_value path/to/problem/folder show_answer_value can be one of the usual edX set: Always Answered Attempted Closed Finished CorrectOrPastDue PastDue Never It can also be delete or default, in which case all show_answer values are removed and the course-wide default takes over. Options: -h Print help message and exit. Last update: March 15th 2018 # Here are all the options for show_answer values: # Walk through the problems folder # Get the XML for each file # If this isn't a problem file, skip it. # Set the showanswer value # Save the file
3.249408
3
py/trash/008-6_lgb0.py
KazukiOnodera/Microsoft-Malware-Prediction
24
6624586
<reponame>KazukiOnodera/Microsoft-Malware-Prediction #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed Feb 20 09:29:01 2019 @author: Kazuki """ import numpy as np import pandas as pd import os, gc from glob import glob from tqdm import tqdm import sys sys.path.append(f'/home/{os.environ.get("USER")}/PythonLibrary') import lgbextension as ex import lightgbm as lgb from multiprocessing import cpu_count from sklearn.metrics import roc_auc_score import utils , utils_cat utils.start(__file__) #============================================================================== SEED = np.random.randint(9999) print('SEED:', SEED) DROP = [ ] NFOLD = 5 LOOP = 1 param = { 'objective': 'binary', 'metric': 'auc', 'learning_rate': 0.05, 'max_depth': -1, 'num_leaves': 2**6 -1, 'max_bin': 127, 'min_child_weight': 10, 'min_data_in_leaf': 150, 'reg_lambda': 0.5, # L2 regularization term on weights. 'reg_alpha': 0.5, # L1 regularization term on weights. 'colsample_bytree': 0.9, 'subsample': 0.7, # 'nthread': 32, 'nthread': cpu_count(), 'bagging_freq': 1, 'verbose':-1, } NROUND = 500 ESR = 50 VERBOSE_EVAL = 25 feature_size = 30 file_tr = '../data/f008/train_f008_0.f' file_te = '../data/f008/test_f008_0.f' outpath_tr = '../data/train_f008_0.f' outpath_te = '../data/test_f008_0.f' # ============================================================================= # load # ============================================================================= X_train = pd.read_feather(file_tr).sample(frac=0.5, random_state=SEED) y_train = utils.load_target().sample(frac=0.5, random_state=SEED)['HasDetections'] if len(DROP)>0: X_train.drop(DROP, axis=1, inplace=True) if X_train.columns.duplicated().sum()>0: raise Exception(f'duplicated!: { X_train.columns[X_train.columns.duplicated()] }') print('no dup :) ') print(f'X_train.shape {X_train.shape}') #print(f'X_valid.shape {X_valid.shape}') gc.collect() CAT = list( set(X_train.columns)&set(utils_cat.ALL)) print(f'CAT: {CAT}') # ============================================================================= # hold out # ============================================================================= dtrain = lgb.Dataset(X_train, y_train.values, categorical_feature=CAT, free_raw_data=False) #dvalid = lgb.Dataset(X_valid, y_valid.values, # categorical_feature=CAT, # free_raw_data=False) gc.collect() model = lgb.train(params=param, train_set=dtrain, num_boost_round=NROUND, # valid_sets=[dtrain, dvalid], # valid_names=['train','valid'], # feval=ex.eval_auc, categorical_feature=CAT, # early_stopping_rounds=ESR, verbose_eval=VERBOSE_EVAL) imp = ex.getImp(model) imp['split'] /= imp['split'].max() imp['gain'] /= imp['gain'].max() imp['total'] = imp['split'] + imp['gain'] imp.sort_values('total', ascending=False, inplace=True) imp.reset_index(drop=True, inplace=True) imp.to_csv(f'LOG/imp_{__file__}.csv', index=False) # ============================================================================= # # ============================================================================= #imp = pd.read_csv('LOG/imp_005-1_agg_each_lgb_0.py.csv') COL = imp.head(feature_size).feature.tolist() X_train = pd.read_feather(file_tr)[COL] X_train.to_feather(outpath_tr) del X_train; gc.collect() X_test = pd.read_feather(file_te)[COL] X_test.to_feather(outpath_te) #============================================================================== utils.end(__file__) #utils.stop_instance()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed Feb 20 09:29:01 2019 @author: Kazuki """ import numpy as np import pandas as pd import os, gc from glob import glob from tqdm import tqdm import sys sys.path.append(f'/home/{os.environ.get("USER")}/PythonLibrary') import lgbextension as ex import lightgbm as lgb from multiprocessing import cpu_count from sklearn.metrics import roc_auc_score import utils , utils_cat utils.start(__file__) #============================================================================== SEED = np.random.randint(9999) print('SEED:', SEED) DROP = [ ] NFOLD = 5 LOOP = 1 param = { 'objective': 'binary', 'metric': 'auc', 'learning_rate': 0.05, 'max_depth': -1, 'num_leaves': 2**6 -1, 'max_bin': 127, 'min_child_weight': 10, 'min_data_in_leaf': 150, 'reg_lambda': 0.5, # L2 regularization term on weights. 'reg_alpha': 0.5, # L1 regularization term on weights. 'colsample_bytree': 0.9, 'subsample': 0.7, # 'nthread': 32, 'nthread': cpu_count(), 'bagging_freq': 1, 'verbose':-1, } NROUND = 500 ESR = 50 VERBOSE_EVAL = 25 feature_size = 30 file_tr = '../data/f008/train_f008_0.f' file_te = '../data/f008/test_f008_0.f' outpath_tr = '../data/train_f008_0.f' outpath_te = '../data/test_f008_0.f' # ============================================================================= # load # ============================================================================= X_train = pd.read_feather(file_tr).sample(frac=0.5, random_state=SEED) y_train = utils.load_target().sample(frac=0.5, random_state=SEED)['HasDetections'] if len(DROP)>0: X_train.drop(DROP, axis=1, inplace=True) if X_train.columns.duplicated().sum()>0: raise Exception(f'duplicated!: { X_train.columns[X_train.columns.duplicated()] }') print('no dup :) ') print(f'X_train.shape {X_train.shape}') #print(f'X_valid.shape {X_valid.shape}') gc.collect() CAT = list( set(X_train.columns)&set(utils_cat.ALL)) print(f'CAT: {CAT}') # ============================================================================= # hold out # ============================================================================= dtrain = lgb.Dataset(X_train, y_train.values, categorical_feature=CAT, free_raw_data=False) #dvalid = lgb.Dataset(X_valid, y_valid.values, # categorical_feature=CAT, # free_raw_data=False) gc.collect() model = lgb.train(params=param, train_set=dtrain, num_boost_round=NROUND, # valid_sets=[dtrain, dvalid], # valid_names=['train','valid'], # feval=ex.eval_auc, categorical_feature=CAT, # early_stopping_rounds=ESR, verbose_eval=VERBOSE_EVAL) imp = ex.getImp(model) imp['split'] /= imp['split'].max() imp['gain'] /= imp['gain'].max() imp['total'] = imp['split'] + imp['gain'] imp.sort_values('total', ascending=False, inplace=True) imp.reset_index(drop=True, inplace=True) imp.to_csv(f'LOG/imp_{__file__}.csv', index=False) # ============================================================================= # # ============================================================================= #imp = pd.read_csv('LOG/imp_005-1_agg_each_lgb_0.py.csv') COL = imp.head(feature_size).feature.tolist() X_train = pd.read_feather(file_tr)[COL] X_train.to_feather(outpath_tr) del X_train; gc.collect() X_test = pd.read_feather(file_te)[COL] X_test.to_feather(outpath_te) #============================================================================== utils.end(__file__) #utils.stop_instance()
en
0.394219
#!/usr/bin/env python3 # -*- coding: utf-8 -*- Created on Wed Feb 20 09:29:01 2019 @author: Kazuki #============================================================================== # L2 regularization term on weights. # L1 regularization term on weights. # 'nthread': 32, # ============================================================================= # load # ============================================================================= #print(f'X_valid.shape {X_valid.shape}') # ============================================================================= # hold out # ============================================================================= #dvalid = lgb.Dataset(X_valid, y_valid.values, # categorical_feature=CAT, # free_raw_data=False) # valid_sets=[dtrain, dvalid], # valid_names=['train','valid'], # feval=ex.eval_auc, # early_stopping_rounds=ESR, # ============================================================================= # # ============================================================================= #imp = pd.read_csv('LOG/imp_005-1_agg_each_lgb_0.py.csv') #============================================================================== #utils.stop_instance()
1.751443
2
SAI/flexsai/p4/backend/output_stage/P4_api_SAI.py
bocon13/stratum-sonic
0
6624587
import os from subprocess import call, Popen, PIPE import re import os import sys import json from datetime import datetime from shutil import copy2 from glob import glob import P4_aux as aux def api_set_lib_paths(lib, template_dir): lib.h_template_path = os.path.join(template_dir, 'sai_lib_template.h') lib.name = lib.name.split('lib')[-1] lib.inc_path = os.path.join(lib.output_path, 'sai_inc') lib.h_path = os.path.join(lib.inc_path, 'sai' + lib.name + '.h') lib.src_path = os.path.join(lib.output_path, 'sai_src') lib.c_path = os.path.join(lib.src_path, 'libsai_'+lib.name +'.c') #---------------------------------------------------------------------------- # header files gen def sai_write_table_id_enum(lib): enum_txt ='' for table in lib.tables: enum_txt += '/** SAI extension table {name} in pipe {pipe}*/\n'.format(name = table.cname,pipe=table.flexPipe) enum_txt += ' SAI_{pipe}_{name},\n\n'.format(name = table.cname.upper(),pipe=table.flexPipe.upper()) return enum_txt def create_header(brief='', type='', flags='', objects='', condition='', params_out=[], params_in=[], params_inout=[], return_='', tabs=0, isvlan=0, default=''): hdr = ' '*tabs + '/**\n' + ' '*tabs + ' * @brief %s\n' % brief if (type or flags or objects or condition or params_out or params_in or return_): hdr += ' '*tabs + ' *\n' if type: hdr += ' '*tabs + ' * @type %s\n' % type if flags: hdr += ' '*tabs + ' * @flags %s\n' % flags if default: hdr += ' '*tabs + ' * @default %s\n' % default if isvlan == 1: hdr += ' '*tabs + ' * @isvlan false\n' if isvlan == 2: hdr += ' '*tabs + ' * @isvlan true\n' if objects: hdr += ' '*tabs + ' * @objects %s\n' % objects if condition: hdr += ' '*tabs + ' * @condition %s\n' % condition for param in params_in: hdr += ' '*tabs + ' * @param[in] %s\n' % param for param in params_out: hdr += ' '*tabs + ' * @param[out] %s\n' % param for param in params_inout: hdr += ' '*tabs + ' * @param[inout] %s\n' % param if return_: hdr += ' '*tabs + ' *\n' + ' '*tabs + ' * @return %s\n' % return_ hdr += ' '*tabs + ' */\n' return hdr def sai_create_action_type_enum(table): enum_txt = create_header(brief='Attribute data for #SAI_%s_ENTRY_ATTR_ACTION' % table.cname.upper()) enum_txt+='typedef enum _sai_%s_entry_action_t\n{\n'%table.cname.lower() # enum_txt+=' SAI_%s_ENTRY_ACTION_NO_ACTION,\n\n' % table.cname.upper() for action_name,action_id in zip(table.cactions,table.action_ids): # enum_txt+=' /** upon table entry hit, invoke action %s */\n'%action_name # if action_name != 'NoAction': enum_txt+=' SAI_%s_ENTRY_ACTION_%s,\n\n' % (table.cname.upper(), action_name.upper()) enum_txt+='} sai_%s_entry_action_t;\n\n'%table.cname return enum_txt def get_sai_key(lib, key): sai_key_dict = lib.sai_keys[key] if 'sai_object_type' in sai_key_dict: sai_object_type = sai_key_dict['sai_object_type'] else: sai_object_type = '' return sai_key_dict['sai_name'], sai_key_dict['sai_type'], sai_object_type def sai_write_table_action_enum(lib): enum_txt='' for table in lib.tables: enum_txt+=sai_create_action_type_enum(table) return enum_txt def get_action_def(lib, action_id): for action_def in lib.p4_action_def: if action_def['id'] == action_id: return action_def return None def sai_write_table_attr(lib): enum_txt='' for table in lib.tables: attr_prefix = 'SAI_%s_ENTRY_ATTR' % table.cname.upper() enum_txt += create_header(brief=('Attribute ID for %s' % table.cname)) enum_txt += 'typedef enum _sai_%s_entry_attr_t\n{\n' % (table.cname) enum_txt += create_header(brief='Start of attributes', tabs=1) enum_txt += ' %s_START,\n\n' % attr_prefix enum_txt += create_header(brief = 'Action', type='sai_%s_entry_action_t' % table.cname, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1) enum_txt += ' %s_ACTION = %s_START,\n\n' % (attr_prefix, attr_prefix) if ('ternary' in table.key_types): enum_txt += create_header(brief = 'Rule priority in table', type='sai_uint32_t', flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1) enum_txt += ' %s_PRIORITY,\n\n' % attr_prefix for key_type, key in zip(table.key_types,table.key_fields): sai_key_name, sai_key_type, sai_object_type = get_sai_key(lib, key) isvlan = 0 if sai_key_type == 'sai_uint16_t': isvlan = 1 #TODO check for 2 if key_type == 'exact': enum_txt += create_header(brief = 'Matched key %s' % sai_key_name, type=sai_key_type, objects=sai_object_type, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s,\n\n' % (attr_prefix, sai_key_name.upper()) if key_type == 'ternary': enum_txt += create_header(brief = 'Matched key %s (key)' % sai_key_name, type=sai_key_type, objects=sai_object_type, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s_KEY,\n\n' % (attr_prefix, sai_key_name.upper()) enum_txt += create_header(brief = 'Matched key %s (mask)' % sai_key_name, type=sai_key_type, objects=sai_object_type, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s_MASK,\n\n' % (attr_prefix, sai_key_name.upper()) if ('exact' in table.key_types): enum_txt += create_header(brief = 'Is default entry', type='bool', default='false', flags='CREATE_ONLY', tabs=1) enum_txt += ' %s_IS_DEFAULT,\n\n' % attr_prefix for action_name, action_id in zip(table.cactions, table.action_ids): action_def = get_action_def(lib, action_id) if action_def['primitives']: for primitive in action_def['primitives']: op = primitive['op'] # if op == 'hit_counter': # TODO Counter # enum_txt += create_header(brief='Action %s hit counter' % action_name, flags='CREATE_AND_SET', type='sai_object_id_t', objects='SAI_OBJECT_TYPE_COUNTER', tabs=1) # enum_txt += ' %s_%s_COUNTER,\n\n' % (attr_prefix, action_name.upper()) sai_action = lib.sai_actions[op] if 'sai_params' in sai_action: for sai_param in sai_action['sai_params']: isvlan = 0 # 0 - no tag, 1 - tag false, 2 - tag true if sai_param['type'] == 'sai_uint16_t': isvlan = 1 # TODO: add check if needs vlan true enum_txt += create_header(brief='Action %s parameter %s' % (action_name, sai_param['name']), type=sai_param['type'], objects=sai_param['object_type'], condition='%s_ACTION == SAI_%s_ENTRY_ACTION_%s' % (attr_prefix, table.cname.upper(), action_name.upper()), flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s,\n\n' % (attr_prefix, sai_param['name'].upper()) enum_txt += create_header(brief='End of attributes', tabs=1) enum_txt += ' %s_END,\n\n' % attr_prefix enum_txt += ' /** Custom range base value */\n' enum_txt += ' %s_CUSTOM_RANGE_START = 0x10000000,\n\n' % attr_prefix enum_txt += ' /** End of custom range base */\n' enum_txt += ' %s_CUSTOM_RANGE_END,\n\n' % attr_prefix enum_txt += '} sai_%s_entry_attr_t;\n\n' % (table.cname) return enum_txt def sai_write_table_api_fn(lib): enum_txt = '' for table in lib.tables: enum_txt += ' sai_create_%s_entry_fn create_%s_entry;\n' % (table.cname, table.cname) enum_txt += ' sai_remove_%s_entry_fn remove_%s_entry;\n' % (table.cname, table.cname) enum_txt += ' sai_set_%s_entry_attribute_fn set_%s_entry_attribute;\n' % (table.cname, table.cname) enum_txt += ' sai_get_%s_entry_attribute_fn get_%s_entry_attribute;\n' % (table.cname, table.cname) enum_txt += ' sai_get_%s_stats_fn get_%s_stats;\n' % (lib.name, lib.name) enum_txt += ' sai_clear_%s_stats_fn clear_%s_stats;\n' % (lib.name, lib.name) return enum_txt def sai_write_table_fn_def(lib): enum_txt = '' for table in lib.tables: enum_txt += create_header(brief='Create %s_entry' % table.cname, params_out=['entry_id Entry id'], params_in=['switch_id Switch id', 'attr_count Number of attributes', 'attr_list Array of attributes'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_create_%s_entry_fn)(\n _Out_ sai_object_id_t *entry_id,\n _In_ sai_object_id_t switch_id,\n _In_ uint32_t attr_count,\n _In_ const sai_attribute_t *attr_list);\n\n' % table.cname enum_txt += create_header(brief='Remove %s_entry' % table.cname, params_in=['entry_id Entry id'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_remove_%s_entry_fn)(\n _In_ sai_object_id_t entry_id);\n\n' % table.cname enum_txt += create_header(brief='Set attribute for %s_entry' % table.cname, params_in=['entry_id Entry id', 'attr Attribute'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_set_%s_entry_attribute_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ const sai_attribute_t *attr);\n\n' % table.cname enum_txt += create_header(brief='Get attribute for %s_entry' % table.cname, params_inout=['attr_list Array of attributes'], params_in=['entry_id Entry id', 'attr_count Number of attributes'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_get_%s_entry_attribute_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ uint32_t attr_count,\n _Inout_ sai_attribute_t *attr_list);\n\n' % table.cname # Stats enum_txt += create_header(brief='Get statistics counters.', params_out=['counters Array of resulting counter values.'], params_in=['entry_id Entry id', 'number_of_counters Number of counters in the array', 'counter_ids Specifies the array of counter ids'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_get_%s_stats_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ uint32_t number_of_counters,\n _In_ const sai_%s_stat_t *counter_ids,\n _Out_ uint64_t *counters);\n\n' % (lib.name, lib.name) enum_txt += create_header(brief='Clear statistics counters.', params_in=['entry_id Entry id', 'number_of_counters Number of counters in the array', 'counter_ids Specifies the array of counter ids'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_clear_%s_stats_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ uint32_t number_of_counters,\n _In_ const sai_%s_stat_t *counter_ids);\n\n' % (lib.name, lib.name) return enum_txt def sai_write_object_type(lib, obj_num): c_code = '' for table in lib.tables: c_code += ' SAI_OBJECT_TYPE_%s_ENTRY = %d,\n' % (table.cname.upper(), obj_num) obj_num += 1 c_code += ' SAI_OBJECT_TYPE_MAX = %d,\n' % obj_num return c_code def sai_write_api_initialize(lib): if_list = '' for pipe in lib.flexPipes: if any(table.flexPipe == pipe for table in lib.tables): # check if pipe is empty if_list += 'sai_object_list_t %s_if_list, ' % pipe if_list = if_list[:-2] c_code = 'sai_status_t sai_ext_api_initialize(%s);\n' % if_list c_code += 'sai_status_t sai_ext_api_uninitialize(%s);\n' % if_list return c_code def sai_write_stats_def(lib): c_code = create_header(brief = 'Counter IDs in sai_get_%s_stats() call' % (lib.name)) c_code += 'typedef enum _sai_%s_stat_t\n{\n' % lib.name for table in lib.tables: c_code += ' SAI_%s_STAT_%s_HIT_PACKETS,\n' % (lib.name.upper(), table.cname.upper()) c_code += ' SAI_%s_STAT_%s_HIT_OCTETS,\n' % (lib.name.upper(), table.cname.upper()) c_code+='} sai_%s_stat_t;\n' % lib.name return c_code def sai_create_lib_headers(lib, template_dir): # sai|filename|.h with open(lib.h_template_path,'r') as t, open (lib.h_path,'w') as o: lines = t.readlines() for line in lines: line = line.replace('|FILENAME|',lib.name.upper()) line = line.replace('|filename|',lib.name.lower()) if '__PER_TABLE_ACTION_ENUM__' in line: line = sai_write_table_action_enum(lib) elif '__PER_TABLE_ATTR_ENUM__' in line: line = sai_write_table_attr(lib) elif '__STATS_DEF__' in line: line = sai_write_stats_def(lib) elif '__PER_TABLE_FN_DEF__' in line: line = sai_write_table_fn_def(lib) elif '__PER_TABLE_API_FN__' in line: line = sai_write_table_api_fn(lib) elif '__EXT_API_INITIALIZE__' in line: line = sai_write_api_initialize(lib) o.write(line) # sai.h with open(os.path.join(template_dir,'sai.h'),'r') as t, open (os.path.join(lib.inc_path, 'sai.h'),'w') as o: lines = t.readlines() for line in lines: line = line.replace('|FILENAME|',lib.name.upper()) line = line.replace('|filename|',lib.name.lower()) o.write(line) #saitypes.h with open(os.path.join(template_dir,'saitypes.h'),'r') as t, open (os.path.join(lib.inc_path, 'saitypes.h'),'w') as o: lines = t.readlines() for line in lines: if '___SAI_OBJECT_TYPE___' in line: obj_num = int(line.split('___')[-1]) line = sai_write_object_type(lib, obj_num) o.write(line) print('created lib header file') def sai_create_header(template_path, output_path, lib_name): with open(template_path,'r') as t, open (output_path,'w') as o: lines = t.readlines() for line in lines: line = line.replace('|FILENAME|',lib_name.upper()) line = line.replace('|filename|',lib_name.lower()) o.write(line) print('created ext header file') def sai_write_object_type_enum(lib): enum_txt = '' for table in lib.tables: enum_txt += ' SAI_OJECT_TYPE_%s_ENTRY,\n' % table.cname.upper() return enum_txt def sai_write_lib_api(lib): enum_txt = '' for table in lib.tables: enum_txt += ' mlnx_create_%s_entry,\n' % table.cname.lower() enum_txt += ' mlnx_remove_%s_entry,\n' % table.cname.lower() enum_txt += ' mlnx_set_%s_entry_attribute,\n' % table.cname.lower() enum_txt += ' mlnx_get_%s_entry_attribute,\n' % table.cname.lower() return enum_txt def sai_write_create_destroy_pipes(lib, cmd): enum_txt = '' for pipe in lib.flexPipes: if any(table.flexPipe == pipe for table in lib.tables): # check if pipe is empty enum_txt += ' rc = fx_pipe_%s(fx_handle, FX_%s, (void *)port_list, num_of_ports);\n if (rc) {\n printf("Error - rc:%%d\\n", rc);\n return rc;\n }\n' % (cmd, pipe.upper()) return enum_txt def add_attribute(table_name, attribute_name, attribute_type, attr_key, attr_mask = ''): c_code = ' %s %s_%s;\n' % (attribute_type, table_name, attribute_name) if attr_mask != '': c_code += ' %s %s_%s_mask;\n' % (attribute_type, table_name, attribute_name) c_code += ' if (SAI_STATUS_SUCCESS ==\n' c_code += ' (sai_status =\n' c_code += ' find_attrib_in_list(attr_count, attr_list, SAI_TABLE_%s_ENTRY_ATTR_%s, &attr, &attr_idx)))\n' % (table_name.upper(), attribute_name.upper()) c_code += ' {\n' if attr_key == 'attr->oid': c_code += ("abvd" "asdf" ) else: c_code += ' %s_%s = %s;\n' % (table_name, attribute_name, attr_key) if attr_mask != '': c_code += ' %s_%s_mask = %s;\n' % (attribute_type, table_name, attribute_name, attr_mask) c_code += ' }\n' c_code += ' else\n' c_code += ' {\n' c_code += ' MLNX_SAI_LOG_ERR(\"Did not recieve mandatory %s attribute\\n\");\n' % attribute_name c_code += ' return SAI_STATUS_INVALID_PARAMETER;\n' c_code += ' }\n' return c_code def get_attr_exact(sai_key_type, sai_key_sdk_type): if sai_key_type == 'sai_object_id_t': attr_type = sai_key_sdk_type attr_key = 'attr->oid' if sai_key_type == 'sai_ip_address_t': attr_type = 'uint32_t' attr_key = 'ntohl((uint32_t) attr->ipaddr.addr.ip4);' if sai_key_type == 'sai_uint16_t': attr_type = 'uint16_t' attr_key = 'attr->u16' if sai_key_type == 'sai_uint32_t': attr_type = 'uint32_t' attr_key = 'attr->u32' return attr_type, attr_key def get_attr_ternary(sai_key_type, sai_key_sdk_type): if sai_key_type == 'sai_object_id_t': attr_type = sai_key_sdk_type attr_key = 'attr->oid' if sai_key_type == 'sai_ip_address_t': attr_type = 'uint32_t' attr_key = 'ntohl((uint32_t) attr->ipaddr.addr.ip4);' if sai_key_type == 'sai_uint16_t': attr_type = 'uint16_t' attr_key = 'attr->u16' if sai_key_type == 'sai_uint32_t': attr_type = 'uint32_t' attr_key = 'attr->u32' return attr_type, attr_key, attr_mask def sai_get_attribute_values(lib, table): c_code = '' name = table.cname.split('table_')[-1] c_code += add_attribute(name, 'action', 'flextrum_action_id_t', 'attr->s32') if ('ternary' in table.key_types): # need offset attribute c_code += add_attribute(name, 'priority', 'uint32_t', 'attr->u32') for key_type, key in zip(table.key_types,table.key_fields): sai_key_name, sai_key_type, sai_key_sdk_type, sai_object_type = get_sai_key(lib, key) if key_type == 'exact': attr_type, attr_key = get_attr_exact(sai_key_type, sai_key_sdk_type) c_code += add_attribute(name, sai_key_name, attr_type, attr_key, '') # if key_type == 'ternary': # c_code += add_attribute(name, sai_key_name, get_attr_value_str(sai_key_type), key_type) return c_code def create_outputs(lib): # TODO - take paths in \ out of lib for all paths print('\n==================================\nCreating SAI extention header file\n==================================') template_dir = os.path.join(lib.backend_path,'output_stage','SAI_templates') template_path = os.path.join(template_dir, 'sai_template.h') api_set_lib_paths(lib, template_dir) sai_create_lib_headers(lib, template_dir) sys.exit(0)
import os from subprocess import call, Popen, PIPE import re import os import sys import json from datetime import datetime from shutil import copy2 from glob import glob import P4_aux as aux def api_set_lib_paths(lib, template_dir): lib.h_template_path = os.path.join(template_dir, 'sai_lib_template.h') lib.name = lib.name.split('lib')[-1] lib.inc_path = os.path.join(lib.output_path, 'sai_inc') lib.h_path = os.path.join(lib.inc_path, 'sai' + lib.name + '.h') lib.src_path = os.path.join(lib.output_path, 'sai_src') lib.c_path = os.path.join(lib.src_path, 'libsai_'+lib.name +'.c') #---------------------------------------------------------------------------- # header files gen def sai_write_table_id_enum(lib): enum_txt ='' for table in lib.tables: enum_txt += '/** SAI extension table {name} in pipe {pipe}*/\n'.format(name = table.cname,pipe=table.flexPipe) enum_txt += ' SAI_{pipe}_{name},\n\n'.format(name = table.cname.upper(),pipe=table.flexPipe.upper()) return enum_txt def create_header(brief='', type='', flags='', objects='', condition='', params_out=[], params_in=[], params_inout=[], return_='', tabs=0, isvlan=0, default=''): hdr = ' '*tabs + '/**\n' + ' '*tabs + ' * @brief %s\n' % brief if (type or flags or objects or condition or params_out or params_in or return_): hdr += ' '*tabs + ' *\n' if type: hdr += ' '*tabs + ' * @type %s\n' % type if flags: hdr += ' '*tabs + ' * @flags %s\n' % flags if default: hdr += ' '*tabs + ' * @default %s\n' % default if isvlan == 1: hdr += ' '*tabs + ' * @isvlan false\n' if isvlan == 2: hdr += ' '*tabs + ' * @isvlan true\n' if objects: hdr += ' '*tabs + ' * @objects %s\n' % objects if condition: hdr += ' '*tabs + ' * @condition %s\n' % condition for param in params_in: hdr += ' '*tabs + ' * @param[in] %s\n' % param for param in params_out: hdr += ' '*tabs + ' * @param[out] %s\n' % param for param in params_inout: hdr += ' '*tabs + ' * @param[inout] %s\n' % param if return_: hdr += ' '*tabs + ' *\n' + ' '*tabs + ' * @return %s\n' % return_ hdr += ' '*tabs + ' */\n' return hdr def sai_create_action_type_enum(table): enum_txt = create_header(brief='Attribute data for #SAI_%s_ENTRY_ATTR_ACTION' % table.cname.upper()) enum_txt+='typedef enum _sai_%s_entry_action_t\n{\n'%table.cname.lower() # enum_txt+=' SAI_%s_ENTRY_ACTION_NO_ACTION,\n\n' % table.cname.upper() for action_name,action_id in zip(table.cactions,table.action_ids): # enum_txt+=' /** upon table entry hit, invoke action %s */\n'%action_name # if action_name != 'NoAction': enum_txt+=' SAI_%s_ENTRY_ACTION_%s,\n\n' % (table.cname.upper(), action_name.upper()) enum_txt+='} sai_%s_entry_action_t;\n\n'%table.cname return enum_txt def get_sai_key(lib, key): sai_key_dict = lib.sai_keys[key] if 'sai_object_type' in sai_key_dict: sai_object_type = sai_key_dict['sai_object_type'] else: sai_object_type = '' return sai_key_dict['sai_name'], sai_key_dict['sai_type'], sai_object_type def sai_write_table_action_enum(lib): enum_txt='' for table in lib.tables: enum_txt+=sai_create_action_type_enum(table) return enum_txt def get_action_def(lib, action_id): for action_def in lib.p4_action_def: if action_def['id'] == action_id: return action_def return None def sai_write_table_attr(lib): enum_txt='' for table in lib.tables: attr_prefix = 'SAI_%s_ENTRY_ATTR' % table.cname.upper() enum_txt += create_header(brief=('Attribute ID for %s' % table.cname)) enum_txt += 'typedef enum _sai_%s_entry_attr_t\n{\n' % (table.cname) enum_txt += create_header(brief='Start of attributes', tabs=1) enum_txt += ' %s_START,\n\n' % attr_prefix enum_txt += create_header(brief = 'Action', type='sai_%s_entry_action_t' % table.cname, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1) enum_txt += ' %s_ACTION = %s_START,\n\n' % (attr_prefix, attr_prefix) if ('ternary' in table.key_types): enum_txt += create_header(brief = 'Rule priority in table', type='sai_uint32_t', flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1) enum_txt += ' %s_PRIORITY,\n\n' % attr_prefix for key_type, key in zip(table.key_types,table.key_fields): sai_key_name, sai_key_type, sai_object_type = get_sai_key(lib, key) isvlan = 0 if sai_key_type == 'sai_uint16_t': isvlan = 1 #TODO check for 2 if key_type == 'exact': enum_txt += create_header(brief = 'Matched key %s' % sai_key_name, type=sai_key_type, objects=sai_object_type, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s,\n\n' % (attr_prefix, sai_key_name.upper()) if key_type == 'ternary': enum_txt += create_header(brief = 'Matched key %s (key)' % sai_key_name, type=sai_key_type, objects=sai_object_type, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s_KEY,\n\n' % (attr_prefix, sai_key_name.upper()) enum_txt += create_header(brief = 'Matched key %s (mask)' % sai_key_name, type=sai_key_type, objects=sai_object_type, flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s_MASK,\n\n' % (attr_prefix, sai_key_name.upper()) if ('exact' in table.key_types): enum_txt += create_header(brief = 'Is default entry', type='bool', default='false', flags='CREATE_ONLY', tabs=1) enum_txt += ' %s_IS_DEFAULT,\n\n' % attr_prefix for action_name, action_id in zip(table.cactions, table.action_ids): action_def = get_action_def(lib, action_id) if action_def['primitives']: for primitive in action_def['primitives']: op = primitive['op'] # if op == 'hit_counter': # TODO Counter # enum_txt += create_header(brief='Action %s hit counter' % action_name, flags='CREATE_AND_SET', type='sai_object_id_t', objects='SAI_OBJECT_TYPE_COUNTER', tabs=1) # enum_txt += ' %s_%s_COUNTER,\n\n' % (attr_prefix, action_name.upper()) sai_action = lib.sai_actions[op] if 'sai_params' in sai_action: for sai_param in sai_action['sai_params']: isvlan = 0 # 0 - no tag, 1 - tag false, 2 - tag true if sai_param['type'] == 'sai_uint16_t': isvlan = 1 # TODO: add check if needs vlan true enum_txt += create_header(brief='Action %s parameter %s' % (action_name, sai_param['name']), type=sai_param['type'], objects=sai_param['object_type'], condition='%s_ACTION == SAI_%s_ENTRY_ACTION_%s' % (attr_prefix, table.cname.upper(), action_name.upper()), flags='MANDATORY_ON_CREATE | CREATE_ONLY', tabs=1, isvlan=isvlan) enum_txt += ' %s_%s,\n\n' % (attr_prefix, sai_param['name'].upper()) enum_txt += create_header(brief='End of attributes', tabs=1) enum_txt += ' %s_END,\n\n' % attr_prefix enum_txt += ' /** Custom range base value */\n' enum_txt += ' %s_CUSTOM_RANGE_START = 0x10000000,\n\n' % attr_prefix enum_txt += ' /** End of custom range base */\n' enum_txt += ' %s_CUSTOM_RANGE_END,\n\n' % attr_prefix enum_txt += '} sai_%s_entry_attr_t;\n\n' % (table.cname) return enum_txt def sai_write_table_api_fn(lib): enum_txt = '' for table in lib.tables: enum_txt += ' sai_create_%s_entry_fn create_%s_entry;\n' % (table.cname, table.cname) enum_txt += ' sai_remove_%s_entry_fn remove_%s_entry;\n' % (table.cname, table.cname) enum_txt += ' sai_set_%s_entry_attribute_fn set_%s_entry_attribute;\n' % (table.cname, table.cname) enum_txt += ' sai_get_%s_entry_attribute_fn get_%s_entry_attribute;\n' % (table.cname, table.cname) enum_txt += ' sai_get_%s_stats_fn get_%s_stats;\n' % (lib.name, lib.name) enum_txt += ' sai_clear_%s_stats_fn clear_%s_stats;\n' % (lib.name, lib.name) return enum_txt def sai_write_table_fn_def(lib): enum_txt = '' for table in lib.tables: enum_txt += create_header(brief='Create %s_entry' % table.cname, params_out=['entry_id Entry id'], params_in=['switch_id Switch id', 'attr_count Number of attributes', 'attr_list Array of attributes'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_create_%s_entry_fn)(\n _Out_ sai_object_id_t *entry_id,\n _In_ sai_object_id_t switch_id,\n _In_ uint32_t attr_count,\n _In_ const sai_attribute_t *attr_list);\n\n' % table.cname enum_txt += create_header(brief='Remove %s_entry' % table.cname, params_in=['entry_id Entry id'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_remove_%s_entry_fn)(\n _In_ sai_object_id_t entry_id);\n\n' % table.cname enum_txt += create_header(brief='Set attribute for %s_entry' % table.cname, params_in=['entry_id Entry id', 'attr Attribute'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_set_%s_entry_attribute_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ const sai_attribute_t *attr);\n\n' % table.cname enum_txt += create_header(brief='Get attribute for %s_entry' % table.cname, params_inout=['attr_list Array of attributes'], params_in=['entry_id Entry id', 'attr_count Number of attributes'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_get_%s_entry_attribute_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ uint32_t attr_count,\n _Inout_ sai_attribute_t *attr_list);\n\n' % table.cname # Stats enum_txt += create_header(brief='Get statistics counters.', params_out=['counters Array of resulting counter values.'], params_in=['entry_id Entry id', 'number_of_counters Number of counters in the array', 'counter_ids Specifies the array of counter ids'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_get_%s_stats_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ uint32_t number_of_counters,\n _In_ const sai_%s_stat_t *counter_ids,\n _Out_ uint64_t *counters);\n\n' % (lib.name, lib.name) enum_txt += create_header(brief='Clear statistics counters.', params_in=['entry_id Entry id', 'number_of_counters Number of counters in the array', 'counter_ids Specifies the array of counter ids'], return_='#SAI_STATUS_SUCCESS on success Failure status code on error') enum_txt += 'typedef sai_status_t(*sai_clear_%s_stats_fn)(\n _In_ sai_object_id_t entry_id,\n _In_ uint32_t number_of_counters,\n _In_ const sai_%s_stat_t *counter_ids);\n\n' % (lib.name, lib.name) return enum_txt def sai_write_object_type(lib, obj_num): c_code = '' for table in lib.tables: c_code += ' SAI_OBJECT_TYPE_%s_ENTRY = %d,\n' % (table.cname.upper(), obj_num) obj_num += 1 c_code += ' SAI_OBJECT_TYPE_MAX = %d,\n' % obj_num return c_code def sai_write_api_initialize(lib): if_list = '' for pipe in lib.flexPipes: if any(table.flexPipe == pipe for table in lib.tables): # check if pipe is empty if_list += 'sai_object_list_t %s_if_list, ' % pipe if_list = if_list[:-2] c_code = 'sai_status_t sai_ext_api_initialize(%s);\n' % if_list c_code += 'sai_status_t sai_ext_api_uninitialize(%s);\n' % if_list return c_code def sai_write_stats_def(lib): c_code = create_header(brief = 'Counter IDs in sai_get_%s_stats() call' % (lib.name)) c_code += 'typedef enum _sai_%s_stat_t\n{\n' % lib.name for table in lib.tables: c_code += ' SAI_%s_STAT_%s_HIT_PACKETS,\n' % (lib.name.upper(), table.cname.upper()) c_code += ' SAI_%s_STAT_%s_HIT_OCTETS,\n' % (lib.name.upper(), table.cname.upper()) c_code+='} sai_%s_stat_t;\n' % lib.name return c_code def sai_create_lib_headers(lib, template_dir): # sai|filename|.h with open(lib.h_template_path,'r') as t, open (lib.h_path,'w') as o: lines = t.readlines() for line in lines: line = line.replace('|FILENAME|',lib.name.upper()) line = line.replace('|filename|',lib.name.lower()) if '__PER_TABLE_ACTION_ENUM__' in line: line = sai_write_table_action_enum(lib) elif '__PER_TABLE_ATTR_ENUM__' in line: line = sai_write_table_attr(lib) elif '__STATS_DEF__' in line: line = sai_write_stats_def(lib) elif '__PER_TABLE_FN_DEF__' in line: line = sai_write_table_fn_def(lib) elif '__PER_TABLE_API_FN__' in line: line = sai_write_table_api_fn(lib) elif '__EXT_API_INITIALIZE__' in line: line = sai_write_api_initialize(lib) o.write(line) # sai.h with open(os.path.join(template_dir,'sai.h'),'r') as t, open (os.path.join(lib.inc_path, 'sai.h'),'w') as o: lines = t.readlines() for line in lines: line = line.replace('|FILENAME|',lib.name.upper()) line = line.replace('|filename|',lib.name.lower()) o.write(line) #saitypes.h with open(os.path.join(template_dir,'saitypes.h'),'r') as t, open (os.path.join(lib.inc_path, 'saitypes.h'),'w') as o: lines = t.readlines() for line in lines: if '___SAI_OBJECT_TYPE___' in line: obj_num = int(line.split('___')[-1]) line = sai_write_object_type(lib, obj_num) o.write(line) print('created lib header file') def sai_create_header(template_path, output_path, lib_name): with open(template_path,'r') as t, open (output_path,'w') as o: lines = t.readlines() for line in lines: line = line.replace('|FILENAME|',lib_name.upper()) line = line.replace('|filename|',lib_name.lower()) o.write(line) print('created ext header file') def sai_write_object_type_enum(lib): enum_txt = '' for table in lib.tables: enum_txt += ' SAI_OJECT_TYPE_%s_ENTRY,\n' % table.cname.upper() return enum_txt def sai_write_lib_api(lib): enum_txt = '' for table in lib.tables: enum_txt += ' mlnx_create_%s_entry,\n' % table.cname.lower() enum_txt += ' mlnx_remove_%s_entry,\n' % table.cname.lower() enum_txt += ' mlnx_set_%s_entry_attribute,\n' % table.cname.lower() enum_txt += ' mlnx_get_%s_entry_attribute,\n' % table.cname.lower() return enum_txt def sai_write_create_destroy_pipes(lib, cmd): enum_txt = '' for pipe in lib.flexPipes: if any(table.flexPipe == pipe for table in lib.tables): # check if pipe is empty enum_txt += ' rc = fx_pipe_%s(fx_handle, FX_%s, (void *)port_list, num_of_ports);\n if (rc) {\n printf("Error - rc:%%d\\n", rc);\n return rc;\n }\n' % (cmd, pipe.upper()) return enum_txt def add_attribute(table_name, attribute_name, attribute_type, attr_key, attr_mask = ''): c_code = ' %s %s_%s;\n' % (attribute_type, table_name, attribute_name) if attr_mask != '': c_code += ' %s %s_%s_mask;\n' % (attribute_type, table_name, attribute_name) c_code += ' if (SAI_STATUS_SUCCESS ==\n' c_code += ' (sai_status =\n' c_code += ' find_attrib_in_list(attr_count, attr_list, SAI_TABLE_%s_ENTRY_ATTR_%s, &attr, &attr_idx)))\n' % (table_name.upper(), attribute_name.upper()) c_code += ' {\n' if attr_key == 'attr->oid': c_code += ("abvd" "asdf" ) else: c_code += ' %s_%s = %s;\n' % (table_name, attribute_name, attr_key) if attr_mask != '': c_code += ' %s_%s_mask = %s;\n' % (attribute_type, table_name, attribute_name, attr_mask) c_code += ' }\n' c_code += ' else\n' c_code += ' {\n' c_code += ' MLNX_SAI_LOG_ERR(\"Did not recieve mandatory %s attribute\\n\");\n' % attribute_name c_code += ' return SAI_STATUS_INVALID_PARAMETER;\n' c_code += ' }\n' return c_code def get_attr_exact(sai_key_type, sai_key_sdk_type): if sai_key_type == 'sai_object_id_t': attr_type = sai_key_sdk_type attr_key = 'attr->oid' if sai_key_type == 'sai_ip_address_t': attr_type = 'uint32_t' attr_key = 'ntohl((uint32_t) attr->ipaddr.addr.ip4);' if sai_key_type == 'sai_uint16_t': attr_type = 'uint16_t' attr_key = 'attr->u16' if sai_key_type == 'sai_uint32_t': attr_type = 'uint32_t' attr_key = 'attr->u32' return attr_type, attr_key def get_attr_ternary(sai_key_type, sai_key_sdk_type): if sai_key_type == 'sai_object_id_t': attr_type = sai_key_sdk_type attr_key = 'attr->oid' if sai_key_type == 'sai_ip_address_t': attr_type = 'uint32_t' attr_key = 'ntohl((uint32_t) attr->ipaddr.addr.ip4);' if sai_key_type == 'sai_uint16_t': attr_type = 'uint16_t' attr_key = 'attr->u16' if sai_key_type == 'sai_uint32_t': attr_type = 'uint32_t' attr_key = 'attr->u32' return attr_type, attr_key, attr_mask def sai_get_attribute_values(lib, table): c_code = '' name = table.cname.split('table_')[-1] c_code += add_attribute(name, 'action', 'flextrum_action_id_t', 'attr->s32') if ('ternary' in table.key_types): # need offset attribute c_code += add_attribute(name, 'priority', 'uint32_t', 'attr->u32') for key_type, key in zip(table.key_types,table.key_fields): sai_key_name, sai_key_type, sai_key_sdk_type, sai_object_type = get_sai_key(lib, key) if key_type == 'exact': attr_type, attr_key = get_attr_exact(sai_key_type, sai_key_sdk_type) c_code += add_attribute(name, sai_key_name, attr_type, attr_key, '') # if key_type == 'ternary': # c_code += add_attribute(name, sai_key_name, get_attr_value_str(sai_key_type), key_type) return c_code def create_outputs(lib): # TODO - take paths in \ out of lib for all paths print('\n==================================\nCreating SAI extention header file\n==================================') template_dir = os.path.join(lib.backend_path,'output_stage','SAI_templates') template_path = os.path.join(template_dir, 'sai_template.h') api_set_lib_paths(lib, template_dir) sai_create_lib_headers(lib, template_dir) sys.exit(0)
en
0.231617
#---------------------------------------------------------------------------- # header files gen #SAI_%s_ENTRY_ATTR_ACTION' % table.cname.upper()) # enum_txt+=' SAI_%s_ENTRY_ACTION_NO_ACTION,\n\n' % table.cname.upper() # enum_txt+=' /** upon table entry hit, invoke action %s */\n'%action_name # if action_name != 'NoAction': #TODO check for 2 # if op == 'hit_counter': # TODO Counter # enum_txt += create_header(brief='Action %s hit counter' % action_name, flags='CREATE_AND_SET', type='sai_object_id_t', objects='SAI_OBJECT_TYPE_COUNTER', tabs=1) # enum_txt += ' %s_%s_COUNTER,\n\n' % (attr_prefix, action_name.upper()) # 0 - no tag, 1 - tag false, 2 - tag true # TODO: add check if needs vlan true # Stats # check if pipe is empty # sai|filename|.h # sai.h #saitypes.h # check if pipe is empty # need offset attribute # if key_type == 'ternary': # c_code += add_attribute(name, sai_key_name, get_attr_value_str(sai_key_type), key_type) # TODO - take paths in \ out of lib for all paths
2.098838
2
bikeshed/widlparser/widlparser/parser.py
dirkschulze/bikeshed
0
6624588
<filename>bikeshed/widlparser/widlparser/parser.py # coding=utf-8 # # Copyright © 2013 Hewlett-Packard Development Company, L.P. # # This work is distributed under the W3C® Software License [1] # in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # [1] http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231 # import re import tokenizer from constructs import * class Parser(object): def __init__(self, text = None, ui = None): self.ui = ui self.reset() if (text): self.parse(text) def reset(self): self.constructs = [] @property def complexityFactor(self): complexity = 0 for construct in self.constructs: complexity += construct.complexityFactor return complexity def parse(self, text): tokens = tokenizer.Tokenizer(text, self.ui) while (tokens.hasTokens()): if (Callback.peek(tokens)): self.constructs.append(Callback(tokens)) elif (Interface.peek(tokens)): self.constructs.append(Interface(tokens)) elif (Dictionary.peek(tokens)): self.constructs.append(Dictionary(tokens)) elif (Enum.peek(tokens)): self.constructs.append(Enum(tokens)) elif (Typedef.peek(tokens)): self.constructs.append(Typedef(tokens)) elif (Const.peek(tokens)): # Legacy support (SVG spec) self.constructs.append(Const(tokens)) elif (ImplementsStatement.peek(tokens)): self.constructs.append(ImplementsStatement(tokens)) else: self.constructs.append(SyntaxError(tokens, None)) def __str__(self): return self.__unicode__() def __unicode__(self): return u''.join([unicode(construct) for construct in self.constructs]) def __repr__(self): return '[Parser: ' + ''.join([(repr(construct) + '\n') for construct in self.constructs]) + ']' def __len__(self): return len(self.constructs) def keys(self): return [construct.name for construct in self.constructs] def __getitem__(self, key): if (isinstance(key, basestring)): for construct in self.constructs: if (key == construct.name): return construct return None return self.constructs[key] def __nonzero__(self): return True def __iter__(self): return iter(self.constructs) def __contains__(self, key): if (isinstance(key, basestring)): for construct in self.constructs: if (key == construct.name): return True return False return (key in self.constructs) def find(self, name): match = re.match('(.*)\(.*\)(.*)', name) # strip ()'s while (match): name = match.group(1) + match.group(2) match = re.match('(.*)\(.*\)(.*)', name) path = None if ('/' in name): path = name.split('/') elif ('.' in name): path = name.split('.') if (path): constructName = path[0] memberName = path[1] argumentName = path[2] if (2 < len(path)) else memberName for construct in reversed(self.constructs): if (constructName == construct.name): if (1 == len(path)): return construct for member in construct: if (memberName == member.name): if (2 < len(path)): argument = member.findArgument(argumentName) if (argument): return argument else: return member else: if (2 == len(path)): argument = construct.findArgument(argumentName, False) if (argument): return argument return None for construct in reversed(self.constructs): if (name == construct.name): return construct # check inside top level constructs for construct in reversed(self.constructs): member = construct.findMember(name) if (member): return member # check argument names last for construct in reversed(self.constructs): argument = construct.findArgument(name) if (argument): return argument def normalizedMethodName(self, methodText, interfaceName = None): match = re.match(r'(.*)\((.*)\)(.*)', methodText) if (match): tokens = tokenizer.Tokenizer(match.group(2)) if (ArgumentList.peek(tokens)): arguments = ArgumentList(tokens, None) return match.group(1) + '(' + arguments.argumentNames[0] + ')' name = match.group(1) + match.group(3) arguments = match.group(2) else: name = methodText arguments = '' if (interfaceName): interface = self.find(interfaceName) if (interface): method = interface.findMethod(name) if (method): return method.methodName return name + '(' + arguments + ')' for construct in self.constructs: method = construct.findMethod(name) if (method): return method.methodName construct = self.find(name) if (construct and ('method' == construct.idlType)): return construct.methodName return name + '(' + arguments + ')' def normalizedMethodNames(self, methodText, interfaceName = None): match = re.match(r'(.*)\((.*)\)(.*)', methodText) if (match): tokens = tokenizer.Tokenizer(match.group(2)) if (ArgumentList.peek(tokens)): arguments = ArgumentList(tokens, None) return [match.group(1) + '(' + argumentName + ')' for argumentName in arguments.argumentNames] name = match.group(1) + match.group(3) arguments = match.group(2) else: name = methodText arguments = '' if (interfaceName): interface = self.find(interfaceName) if (interface): method = interface.findMethod(name) if (method): return method.methodNames return [name + '(' + arguments + ')'] for construct in self.constructs: method = construct.findMethod(name) if (method): return method.methodNames construct = self.find(name) if (construct and ('method' == construct.idlType)): return construct.methodNames return [name + '(' + arguments + ')'] def markup(self, marker): if (marker): generator = MarkupGenerator(None) for construct in self.constructs: construct.markup(generator) return generator.markup(marker) return unicode(self)
<filename>bikeshed/widlparser/widlparser/parser.py # coding=utf-8 # # Copyright © 2013 Hewlett-Packard Development Company, L.P. # # This work is distributed under the W3C® Software License [1] # in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # [1] http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231 # import re import tokenizer from constructs import * class Parser(object): def __init__(self, text = None, ui = None): self.ui = ui self.reset() if (text): self.parse(text) def reset(self): self.constructs = [] @property def complexityFactor(self): complexity = 0 for construct in self.constructs: complexity += construct.complexityFactor return complexity def parse(self, text): tokens = tokenizer.Tokenizer(text, self.ui) while (tokens.hasTokens()): if (Callback.peek(tokens)): self.constructs.append(Callback(tokens)) elif (Interface.peek(tokens)): self.constructs.append(Interface(tokens)) elif (Dictionary.peek(tokens)): self.constructs.append(Dictionary(tokens)) elif (Enum.peek(tokens)): self.constructs.append(Enum(tokens)) elif (Typedef.peek(tokens)): self.constructs.append(Typedef(tokens)) elif (Const.peek(tokens)): # Legacy support (SVG spec) self.constructs.append(Const(tokens)) elif (ImplementsStatement.peek(tokens)): self.constructs.append(ImplementsStatement(tokens)) else: self.constructs.append(SyntaxError(tokens, None)) def __str__(self): return self.__unicode__() def __unicode__(self): return u''.join([unicode(construct) for construct in self.constructs]) def __repr__(self): return '[Parser: ' + ''.join([(repr(construct) + '\n') for construct in self.constructs]) + ']' def __len__(self): return len(self.constructs) def keys(self): return [construct.name for construct in self.constructs] def __getitem__(self, key): if (isinstance(key, basestring)): for construct in self.constructs: if (key == construct.name): return construct return None return self.constructs[key] def __nonzero__(self): return True def __iter__(self): return iter(self.constructs) def __contains__(self, key): if (isinstance(key, basestring)): for construct in self.constructs: if (key == construct.name): return True return False return (key in self.constructs) def find(self, name): match = re.match('(.*)\(.*\)(.*)', name) # strip ()'s while (match): name = match.group(1) + match.group(2) match = re.match('(.*)\(.*\)(.*)', name) path = None if ('/' in name): path = name.split('/') elif ('.' in name): path = name.split('.') if (path): constructName = path[0] memberName = path[1] argumentName = path[2] if (2 < len(path)) else memberName for construct in reversed(self.constructs): if (constructName == construct.name): if (1 == len(path)): return construct for member in construct: if (memberName == member.name): if (2 < len(path)): argument = member.findArgument(argumentName) if (argument): return argument else: return member else: if (2 == len(path)): argument = construct.findArgument(argumentName, False) if (argument): return argument return None for construct in reversed(self.constructs): if (name == construct.name): return construct # check inside top level constructs for construct in reversed(self.constructs): member = construct.findMember(name) if (member): return member # check argument names last for construct in reversed(self.constructs): argument = construct.findArgument(name) if (argument): return argument def normalizedMethodName(self, methodText, interfaceName = None): match = re.match(r'(.*)\((.*)\)(.*)', methodText) if (match): tokens = tokenizer.Tokenizer(match.group(2)) if (ArgumentList.peek(tokens)): arguments = ArgumentList(tokens, None) return match.group(1) + '(' + arguments.argumentNames[0] + ')' name = match.group(1) + match.group(3) arguments = match.group(2) else: name = methodText arguments = '' if (interfaceName): interface = self.find(interfaceName) if (interface): method = interface.findMethod(name) if (method): return method.methodName return name + '(' + arguments + ')' for construct in self.constructs: method = construct.findMethod(name) if (method): return method.methodName construct = self.find(name) if (construct and ('method' == construct.idlType)): return construct.methodName return name + '(' + arguments + ')' def normalizedMethodNames(self, methodText, interfaceName = None): match = re.match(r'(.*)\((.*)\)(.*)', methodText) if (match): tokens = tokenizer.Tokenizer(match.group(2)) if (ArgumentList.peek(tokens)): arguments = ArgumentList(tokens, None) return [match.group(1) + '(' + argumentName + ')' for argumentName in arguments.argumentNames] name = match.group(1) + match.group(3) arguments = match.group(2) else: name = methodText arguments = '' if (interfaceName): interface = self.find(interfaceName) if (interface): method = interface.findMethod(name) if (method): return method.methodNames return [name + '(' + arguments + ')'] for construct in self.constructs: method = construct.findMethod(name) if (method): return method.methodNames construct = self.find(name) if (construct and ('method' == construct.idlType)): return construct.methodNames return [name + '(' + arguments + ')'] def markup(self, marker): if (marker): generator = MarkupGenerator(None) for construct in self.constructs: construct.markup(generator) return generator.markup(marker) return unicode(self)
en
0.717169
# coding=utf-8 # # Copyright © 2013 Hewlett-Packard Development Company, L.P. # # This work is distributed under the W3C® Software License [1] # in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # [1] http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231 # # Legacy support (SVG spec) # strip ()'s # check inside top level constructs # check argument names last
2.408528
2
lab3/main.py
iamNCJ/CV-2020
0
6624589
<gh_stars>0 import cv2 import matplotlib.pyplot as plt import numpy as np from tqdm import tqdm cam = cv2.VideoCapture(0) while True: # Capture frame-by-frame _, img = cam.read() # img = cv2.imread('assets/sample.jpg') # Color to grayscale gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) gray = np.float32(gray) # Spatial derivative calculation Ix = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=5) Iy = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=5) # Structure tensor setup Ixx = Ix ** 2 Ixy = Ix * Iy Iyy = Iy ** 2 kernel = np.ones((3, 3), np.float32) Sum_xx = cv2.filter2D(Ixx, -1, kernel) Sum_xy = cv2.filter2D(Ixy, -1, kernel) Sum_yy = cv2.filter2D(Iyy, -1, kernel) # Harris response calculation k = 0.04 R = Sum_xx * Sum_yy - Sum_xy ** 2 - k * (Sum_xx + Sum_yy) ** 2 dst = R # dst = cv2.cornerHarris(gray, 2, 3, 0.04) # result is dilated for marking the corners, not important dst = cv2.dilate(dst, None) # Threshold for an optimal value, it may vary depending on the image. tmp = img.copy() tmp[dst > 0.01 * dst.max()] = [0, 0, 255] # Display the resulting frame cv2.imshow('frame', tmp) if cv2.waitKey(1) == 32: h, w = gray.shape temp = np.zeros((h, w, 2, 2)) temp[:, :, 0, 0] = Sum_xx temp[:, :, 0, 1] = Sum_xy temp[:, :, 1, 0] = Sum_xy temp[:, :, 1, 1] = Sum_yy eigen, _ = np.linalg.eig(temp) lambda1 = eigen[:, :, 0] lambda2 = eigen[:, :, 1] lambda_max = np.maximum(lambda1, lambda2) lambda_min = np.minimum(lambda1, lambda2) # Non-maximum suppression pos = np.argwhere(R > 0.01 * R.max()) for a, b in tqdm(pos): x0 = max(0, a - 1) x1 = min(h, a + 1) y0 = max(0, b - 1) y1 = min(w, b + 1) if R[a, b] == np.max(R[x0:x1, y0:y1]): cv2.drawMarker(img, (b, a), (0, 0, 255)) fig, subplots = plt.subplots(2, 2) subplots[0, 0].imshow(lambda_max, cmap='hot', interpolation='nearest') subplots[0, 0].set_title(r'$\lambda_{max}$') subplots[0, 0].axis('off') subplots[1, 0].set_title(r'$\lambda_{min}$') subplots[1, 0].imshow(lambda_min, cmap='hot', interpolation='nearest') subplots[1, 0].axis('off') subplots[0, 1].imshow(R, cmap='hot', interpolation='nearest') subplots[0, 1].set_title(r"$R$") subplots[0, 1].axis('off') subplots[1, 1].imshow(cv2.cvtColor(img, cv2.COLOR_RGB2BGR)) subplots[1, 1].set_title("Result") subplots[1, 1].axis('off') # redraw the canvas fig.canvas.draw() # convert canvas to image img = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') img = img.reshape(fig.canvas.get_width_height()[::-1] + (3,)) # img is rgb, convert to opencv's default bgr img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) cv2.imshow('frame', img) flag = False while True: char = cv2.waitKey(-1) if char == 32: break elif char == ord('s'): fig.savefig('harris.png') elif char == ord('q'): flag = True break if flag: break # When everything done, release the capture cam.release() cv2.destroyAllWindows()
import cv2 import matplotlib.pyplot as plt import numpy as np from tqdm import tqdm cam = cv2.VideoCapture(0) while True: # Capture frame-by-frame _, img = cam.read() # img = cv2.imread('assets/sample.jpg') # Color to grayscale gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) gray = np.float32(gray) # Spatial derivative calculation Ix = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=5) Iy = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=5) # Structure tensor setup Ixx = Ix ** 2 Ixy = Ix * Iy Iyy = Iy ** 2 kernel = np.ones((3, 3), np.float32) Sum_xx = cv2.filter2D(Ixx, -1, kernel) Sum_xy = cv2.filter2D(Ixy, -1, kernel) Sum_yy = cv2.filter2D(Iyy, -1, kernel) # Harris response calculation k = 0.04 R = Sum_xx * Sum_yy - Sum_xy ** 2 - k * (Sum_xx + Sum_yy) ** 2 dst = R # dst = cv2.cornerHarris(gray, 2, 3, 0.04) # result is dilated for marking the corners, not important dst = cv2.dilate(dst, None) # Threshold for an optimal value, it may vary depending on the image. tmp = img.copy() tmp[dst > 0.01 * dst.max()] = [0, 0, 255] # Display the resulting frame cv2.imshow('frame', tmp) if cv2.waitKey(1) == 32: h, w = gray.shape temp = np.zeros((h, w, 2, 2)) temp[:, :, 0, 0] = Sum_xx temp[:, :, 0, 1] = Sum_xy temp[:, :, 1, 0] = Sum_xy temp[:, :, 1, 1] = Sum_yy eigen, _ = np.linalg.eig(temp) lambda1 = eigen[:, :, 0] lambda2 = eigen[:, :, 1] lambda_max = np.maximum(lambda1, lambda2) lambda_min = np.minimum(lambda1, lambda2) # Non-maximum suppression pos = np.argwhere(R > 0.01 * R.max()) for a, b in tqdm(pos): x0 = max(0, a - 1) x1 = min(h, a + 1) y0 = max(0, b - 1) y1 = min(w, b + 1) if R[a, b] == np.max(R[x0:x1, y0:y1]): cv2.drawMarker(img, (b, a), (0, 0, 255)) fig, subplots = plt.subplots(2, 2) subplots[0, 0].imshow(lambda_max, cmap='hot', interpolation='nearest') subplots[0, 0].set_title(r'$\lambda_{max}$') subplots[0, 0].axis('off') subplots[1, 0].set_title(r'$\lambda_{min}$') subplots[1, 0].imshow(lambda_min, cmap='hot', interpolation='nearest') subplots[1, 0].axis('off') subplots[0, 1].imshow(R, cmap='hot', interpolation='nearest') subplots[0, 1].set_title(r"$R$") subplots[0, 1].axis('off') subplots[1, 1].imshow(cv2.cvtColor(img, cv2.COLOR_RGB2BGR)) subplots[1, 1].set_title("Result") subplots[1, 1].axis('off') # redraw the canvas fig.canvas.draw() # convert canvas to image img = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') img = img.reshape(fig.canvas.get_width_height()[::-1] + (3,)) # img is rgb, convert to opencv's default bgr img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) cv2.imshow('frame', img) flag = False while True: char = cv2.waitKey(-1) if char == 32: break elif char == ord('s'): fig.savefig('harris.png') elif char == ord('q'): flag = True break if flag: break # When everything done, release the capture cam.release() cv2.destroyAllWindows()
en
0.710342
# Capture frame-by-frame # img = cv2.imread('assets/sample.jpg') # Color to grayscale # Spatial derivative calculation # Structure tensor setup # Harris response calculation # dst = cv2.cornerHarris(gray, 2, 3, 0.04) # result is dilated for marking the corners, not important # Threshold for an optimal value, it may vary depending on the image. # Display the resulting frame # Non-maximum suppression # redraw the canvas # convert canvas to image # img is rgb, convert to opencv's default bgr # When everything done, release the capture
2.760818
3
.travis/manage_daily_builds.py
loonwerks/jkind-plugin
0
6624590
<filename>.travis/manage_daily_builds.py #!/usr/bin/env python3 ''' Created on May 16, 2019 ''' import os import re from github3 import GitHub from pprint import pformat GITHUB_API = 'https://api.github.com/repos' GITHUB_RELEASES = 'releases' AUTH_TOKEN = os.environ['GH_TOKEN'] if 'GH_TOKEN' in os.environ.keys() else None REPOSITORY_OWNER = 'loonwerks' REPOSITORY_REPO = 'jkind-plugin' PRODUCT_ASSET_PATTERN = re.compile(r'com.collins.fmw.ide-\d+\.\d+\.\d+-(\d{12})-.*') def manage_daily_builds(): # obtain git handle gh = GitHub(GITHUB_API, token=AUTH_TOKEN) repository = gh.repository(REPOSITORY_OWNER, REPOSITORY_REPO) # get list of releases releases = repository.releases() # extract keys and sort by build date release_keys = {x.id : x.created_at for x in releases if "Nightly development build" in x.name} sorted_keys = sorted(release_keys.items(), reverse=True, key=lambda x: x[1]) print('%s' % (pformat(sorted_keys))) # filter to obtain the keys to delete delete_keys = [v[0] for v in sorted_keys[2:]] print('Deleting releases: %s' % (pformat(delete_keys))) # iterate, deleting the releases and corresponding tags for rel in releases: print('examining rel %d from %s...' % (rel.id, str(rel.created_at))) if rel.id in delete_keys: print(' deleting release id %d and tag %s.' % (rel.id, rel.tag_name)) rel_tag_ref = repository.ref('tags/%s' % (rel.tag_name)) rel.delete() if rel_tag_ref is not None: print(' deleting tag %s' % (rel_tag_ref.ref)) rel_tag_ref.delete() else: # Look for stale files in the release assets = rel.assets() print('In release %s found assets:' % (rel.name)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) build_times = sorted([PRODUCT_ASSET_PATTERN.search(x.name).group(1) for x in assets if PRODUCT_ASSET_PATTERN.search(x.name)]) latest_build_time = build_times[-1] if build_times else None print('Lastest build time is %s' % (latest_build_time)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) # print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) if match is not None: asset_build_time = match.group(1) if asset_build_time != latest_build_time: print('deleting stale asset %s' % (asset.name)) asset.delete() if __name__ == '__main__': manage_daily_builds()
<filename>.travis/manage_daily_builds.py #!/usr/bin/env python3 ''' Created on May 16, 2019 ''' import os import re from github3 import GitHub from pprint import pformat GITHUB_API = 'https://api.github.com/repos' GITHUB_RELEASES = 'releases' AUTH_TOKEN = os.environ['GH_TOKEN'] if 'GH_TOKEN' in os.environ.keys() else None REPOSITORY_OWNER = 'loonwerks' REPOSITORY_REPO = 'jkind-plugin' PRODUCT_ASSET_PATTERN = re.compile(r'com.collins.fmw.ide-\d+\.\d+\.\d+-(\d{12})-.*') def manage_daily_builds(): # obtain git handle gh = GitHub(GITHUB_API, token=AUTH_TOKEN) repository = gh.repository(REPOSITORY_OWNER, REPOSITORY_REPO) # get list of releases releases = repository.releases() # extract keys and sort by build date release_keys = {x.id : x.created_at for x in releases if "Nightly development build" in x.name} sorted_keys = sorted(release_keys.items(), reverse=True, key=lambda x: x[1]) print('%s' % (pformat(sorted_keys))) # filter to obtain the keys to delete delete_keys = [v[0] for v in sorted_keys[2:]] print('Deleting releases: %s' % (pformat(delete_keys))) # iterate, deleting the releases and corresponding tags for rel in releases: print('examining rel %d from %s...' % (rel.id, str(rel.created_at))) if rel.id in delete_keys: print(' deleting release id %d and tag %s.' % (rel.id, rel.tag_name)) rel_tag_ref = repository.ref('tags/%s' % (rel.tag_name)) rel.delete() if rel_tag_ref is not None: print(' deleting tag %s' % (rel_tag_ref.ref)) rel_tag_ref.delete() else: # Look for stale files in the release assets = rel.assets() print('In release %s found assets:' % (rel.name)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) build_times = sorted([PRODUCT_ASSET_PATTERN.search(x.name).group(1) for x in assets if PRODUCT_ASSET_PATTERN.search(x.name)]) latest_build_time = build_times[-1] if build_times else None print('Lastest build time is %s' % (latest_build_time)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) # print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) if match is not None: asset_build_time = match.group(1) if asset_build_time != latest_build_time: print('deleting stale asset %s' % (asset.name)) asset.delete() if __name__ == '__main__': manage_daily_builds()
en
0.85349
#!/usr/bin/env python3 Created on May 16, 2019 # obtain git handle # get list of releases # extract keys and sort by build date # filter to obtain the keys to delete # iterate, deleting the releases and corresponding tags # Look for stale files in the release # print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None'))
2.466351
2
src/apply_manual_codes.py
AfricasVoices/Project-ADSS
0
6624591
<reponame>AfricasVoices/Project-ADSS<gh_stars>0 import time from os import path from core_data_modules.cleaners import Codes from core_data_modules.cleaners.cleaning_utils import CleaningUtils from core_data_modules.cleaners.location_tools import SomaliaLocations from core_data_modules.traced_data import Metadata from core_data_modules.traced_data.io import TracedDataCodaV2IO from core_data_modules.util import TimeUtils from src.lib import PipelineConfiguration from src.lib.pipeline_configuration import CodeSchemes class ApplyManualCodes(object): @staticmethod def make_location_code(scheme, clean_value): if clean_value == Codes.NOT_CODED: return scheme.get_code_with_control_code(Codes.NOT_CODED) else: return scheme.get_code_with_match_value(clean_value) @classmethod def _impute_location_codes(cls, user, data): for td in data: # Up to 1 location code should have been assigned in Coda. Search for that code, # ensuring that only 1 has been assigned or, if multiple have been assigned, that they are non-conflicting # control codes location_code = None for plan in PipelineConfiguration.LOCATION_CODING_PLANS: coda_code = plan.code_scheme.get_code_with_id(td[plan.coded_field]["CodeID"]) if location_code is not None: if not (coda_code.code_id == location_code.code_id or coda_code.control_code == Codes.NOT_REVIEWED): location_code = CodeSchemes.MOGADISHU_SUB_DISTRICT.get_code_with_control_code( Codes.CODING_ERROR) elif coda_code.control_code != Codes.NOT_REVIEWED: location_code = coda_code # If no code was found, then this location is still not reviewed. # Synthesise a NOT_REVIEWED code accordingly. if location_code is None: location_code = CodeSchemes.MOGADISHU_SUB_DISTRICT.get_code_with_control_code(Codes.NOT_REVIEWED) # If a control code was found, set all other location keys to that control code, # otherwise convert the provided location to the other locations in the hierarchy. if location_code.code_type == "Control": for plan in PipelineConfiguration.LOCATION_CODING_PLANS: td.append_data({ plan.coded_field: CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(location_code.control_code), Metadata.get_call_location() ).to_dict() }, Metadata(user, Metadata.get_call_location(), time.time())) else: location = location_code.match_values[0] td.append_data({ "mogadishu_sub_district_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.MOGADISHU_SUB_DISTRICT, cls.make_location_code(CodeSchemes.MOGADISHU_SUB_DISTRICT, SomaliaLocations.mogadishu_sub_district_for_location_code(location)), Metadata.get_call_location()).to_dict(), "district_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_DISTRICT, cls.make_location_code(CodeSchemes.SOMALIA_DISTRICT, SomaliaLocations.district_for_location_code(location)), Metadata.get_call_location()).to_dict(), "region_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_REGION, cls.make_location_code(CodeSchemes.SOMALIA_REGION, SomaliaLocations.region_for_location_code(location)), Metadata.get_call_location()).to_dict(), "state_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_STATE, cls.make_location_code(CodeSchemes.SOMALIA_STATE, SomaliaLocations.state_for_location_code(location)), Metadata.get_call_location()).to_dict(), "zone_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_ZONE, cls.make_location_code(CodeSchemes.SOMALIA_ZONE, SomaliaLocations.zone_for_location_code(location)), Metadata.get_call_location()).to_dict() }, Metadata(user, Metadata.get_call_location(), time.time())) # If the location is not coded, set the zone from the operator if location_code.control_code == Codes.NOT_CODED: operator = CodeSchemes.SOMALIA_OPERATOR.get_code_with_id(td["operator_coded"]["CodeID"]).match_values[0] td.append_data({ "zone_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_ZONE, cls.make_location_code(CodeSchemes.SOMALIA_ZONE, SomaliaLocations.zone_for_operator_code(operator)), Metadata.get_call_location()).to_dict() }, Metadata(user, Metadata.get_call_location(), time.time())) @staticmethod def _impute_coding_error_codes(user, data): for td in data: coding_error_dict = dict() for plan in PipelineConfiguration.RQA_CODING_PLANS: if f"{plan.coded_field}_WS_correct_dataset" in td: if td[f"{plan.coded_field}_WS_correct_dataset"]["CodeID"] == \ CodeSchemes.WS_CORRECT_DATASET.get_code_with_control_code(Codes.CODING_ERROR).code_id: coding_error_dict[plan.coded_field] = [ CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.CODING_ERROR), Metadata.get_call_location() ).to_dict() ] if plan.binary_code_scheme is not None: coding_error_dict[plan.binary_coded_field] = \ CleaningUtils.make_label_from_cleaner_code( plan.binary_code_scheme, plan.binary_code_scheme.get_code_with_control_code(Codes.CODING_ERROR), Metadata.get_call_location() ).to_dict() for plan in PipelineConfiguration.SURVEY_CODING_PLANS: if f"{plan.coded_field}_WS_correct_dataset" in td: if td[f"{plan.coded_field}_WS_correct_dataset"]["CodeID"] == \ CodeSchemes.WS_CORRECT_DATASET.get_code_with_control_code(Codes.CODING_ERROR).code_id: coding_error_dict[plan.coded_field] = \ CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.CODING_ERROR), Metadata.get_call_location() ).to_dict() td.append_data(coding_error_dict, Metadata(user, Metadata.get_call_location(), TimeUtils.utc_now_as_iso_string())) @classmethod def apply_manual_codes(cls, user, data, coda_input_dir): # Merge manually coded radio show files into the cleaned dataset for plan in PipelineConfiguration.RQA_CODING_PLANS: rqa_messages = [td for td in data if plan.raw_field in td] coda_input_path = path.join(coda_input_dir, plan.coda_filename) f = None try: if path.exists(coda_input_path): f = open(coda_input_path, "r") TracedDataCodaV2IO.import_coda_2_to_traced_data_iterable_multi_coded( user, rqa_messages, plan.id_field, {plan.coded_field: plan.code_scheme}, f) if plan.binary_code_scheme is not None: if f is not None: f.seek(0) TracedDataCodaV2IO.import_coda_2_to_traced_data_iterable( user, rqa_messages, plan.id_field, {plan.binary_coded_field: plan.binary_code_scheme}, f) finally: if f is not None: f.close() # At this point, the TracedData objects still contain messages for at most one week each. # Label the weeks for which there is no response as TRUE_MISSING. for td in data: missing_dict = dict() for plan in PipelineConfiguration.RQA_CODING_PLANS: if plan.raw_field not in td: na_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.TRUE_MISSING), Metadata.get_call_location() ) missing_dict[plan.coded_field] = [na_label.to_dict()] if plan.binary_code_scheme is not None: na_label = CleaningUtils.make_label_from_cleaner_code( plan.binary_code_scheme, plan.binary_code_scheme.get_code_with_control_code(Codes.TRUE_MISSING), Metadata.get_call_location() ) missing_dict[plan.binary_coded_field] = na_label.to_dict() td.append_data(missing_dict, Metadata(user, Metadata.get_call_location(), time.time())) # Mark data that is noise as Codes.NOT_CODED for td in data: if td["noise"]: nc_dict = dict() for plan in PipelineConfiguration.RQA_CODING_PLANS: if plan.coded_field not in td: nc_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location() ) nc_dict[plan.coded_field] = [nc_label.to_dict()] if plan.binary_code_scheme is not None: nc_label = CleaningUtils.make_label_from_cleaner_code( plan.binary_code_scheme, plan.binary_code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location() ) nc_dict[plan.binary_coded_field] = nc_label.to_dict() td.append_data(nc_dict, Metadata(user, Metadata.get_call_location(), time.time())) # Synchronise the control codes between the binary and reasons schemes: # Some RQA datasets have a binary scheme, which is always labelled, and a reasons scheme, which is only labelled # if there is an additional reason given. Importing those two schemes separately above caused the labels in # each scheme to go out of sync with each other, e.g. reasons can be NR when the binary *was* reviewed. # This block updates the reasons scheme in cases where only a binary label was set, by assigning the # label 'NC' if the binary label was set to a normal code, otherwise to be the same control code as the binary. for plan in PipelineConfiguration.RQA_CODING_PLANS: rqa_messages = [td for td in data if plan.raw_field in td] if plan.binary_code_scheme is not None: for td in rqa_messages: binary_label = td[plan.binary_coded_field] binary_code = plan.binary_code_scheme.get_code_with_id(binary_label["CodeID"]) binary_label_present = binary_label["CodeID"] != \ plan.binary_code_scheme.get_code_with_control_code( Codes.NOT_REVIEWED).code_id reasons_label_present = len(td[plan.coded_field]) > 1 or td[plan.coded_field][0][ "CodeID"] != \ plan.code_scheme.get_code_with_control_code( Codes.NOT_REVIEWED).code_id if binary_label_present and not reasons_label_present: if binary_code.code_type == "Control": control_code = binary_code.control_code reasons_code = plan.code_scheme.get_code_with_control_code(control_code) reasons_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, reasons_code, Metadata.get_call_location(), origin_name="Pipeline Code Synchronisation") td.append_data( {plan.coded_field: [reasons_label.to_dict()]}, Metadata(user, Metadata.get_call_location(), TimeUtils.utc_now_as_iso_string()) ) else: assert binary_code.code_type == "Normal" nc_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location(), origin_name="Pipeline Code Synchronisation" ) td.append_data( {plan.coded_field: [nc_label.to_dict()]}, Metadata(user, Metadata.get_call_location(), TimeUtils.utc_now_as_iso_string()) ) # Merge manually coded survey files into the cleaned dataset for plan in PipelineConfiguration.SURVEY_CODING_PLANS: f = None try: coda_input_path = path.join(coda_input_dir, plan.coda_filename) if path.exists(coda_input_path): f = open(coda_input_path, "r") TracedDataCodaV2IO.import_coda_2_to_traced_data_iterable( user, data, plan.id_field, {plan.coded_field: plan.code_scheme}, f) finally: if f is not None: f.close() # Not everyone will have answered all of the demographic flows. # Label demographic questions which had no responses as TRUE_MISSING. # Label data which is just the empty string as NOT_CODED. for td in data: missing_dict = dict() for plan in PipelineConfiguration.SURVEY_CODING_PLANS: if plan.raw_field not in td: na_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.TRUE_MISSING), Metadata.get_call_location() ) missing_dict[plan.coded_field] = na_label.to_dict() elif td[plan.raw_field] == "": nc_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location() ) missing_dict[plan.coded_field] = nc_label.to_dict() td.append_data(missing_dict, Metadata(user, Metadata.get_call_location(), time.time())) # Set district/region/state/zone codes from the coded district field. cls._impute_location_codes(user, data) # Set coding error codes using the coding error field cls._impute_coding_error_codes(user, data) return data
import time from os import path from core_data_modules.cleaners import Codes from core_data_modules.cleaners.cleaning_utils import CleaningUtils from core_data_modules.cleaners.location_tools import SomaliaLocations from core_data_modules.traced_data import Metadata from core_data_modules.traced_data.io import TracedDataCodaV2IO from core_data_modules.util import TimeUtils from src.lib import PipelineConfiguration from src.lib.pipeline_configuration import CodeSchemes class ApplyManualCodes(object): @staticmethod def make_location_code(scheme, clean_value): if clean_value == Codes.NOT_CODED: return scheme.get_code_with_control_code(Codes.NOT_CODED) else: return scheme.get_code_with_match_value(clean_value) @classmethod def _impute_location_codes(cls, user, data): for td in data: # Up to 1 location code should have been assigned in Coda. Search for that code, # ensuring that only 1 has been assigned or, if multiple have been assigned, that they are non-conflicting # control codes location_code = None for plan in PipelineConfiguration.LOCATION_CODING_PLANS: coda_code = plan.code_scheme.get_code_with_id(td[plan.coded_field]["CodeID"]) if location_code is not None: if not (coda_code.code_id == location_code.code_id or coda_code.control_code == Codes.NOT_REVIEWED): location_code = CodeSchemes.MOGADISHU_SUB_DISTRICT.get_code_with_control_code( Codes.CODING_ERROR) elif coda_code.control_code != Codes.NOT_REVIEWED: location_code = coda_code # If no code was found, then this location is still not reviewed. # Synthesise a NOT_REVIEWED code accordingly. if location_code is None: location_code = CodeSchemes.MOGADISHU_SUB_DISTRICT.get_code_with_control_code(Codes.NOT_REVIEWED) # If a control code was found, set all other location keys to that control code, # otherwise convert the provided location to the other locations in the hierarchy. if location_code.code_type == "Control": for plan in PipelineConfiguration.LOCATION_CODING_PLANS: td.append_data({ plan.coded_field: CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(location_code.control_code), Metadata.get_call_location() ).to_dict() }, Metadata(user, Metadata.get_call_location(), time.time())) else: location = location_code.match_values[0] td.append_data({ "mogadishu_sub_district_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.MOGADISHU_SUB_DISTRICT, cls.make_location_code(CodeSchemes.MOGADISHU_SUB_DISTRICT, SomaliaLocations.mogadishu_sub_district_for_location_code(location)), Metadata.get_call_location()).to_dict(), "district_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_DISTRICT, cls.make_location_code(CodeSchemes.SOMALIA_DISTRICT, SomaliaLocations.district_for_location_code(location)), Metadata.get_call_location()).to_dict(), "region_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_REGION, cls.make_location_code(CodeSchemes.SOMALIA_REGION, SomaliaLocations.region_for_location_code(location)), Metadata.get_call_location()).to_dict(), "state_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_STATE, cls.make_location_code(CodeSchemes.SOMALIA_STATE, SomaliaLocations.state_for_location_code(location)), Metadata.get_call_location()).to_dict(), "zone_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_ZONE, cls.make_location_code(CodeSchemes.SOMALIA_ZONE, SomaliaLocations.zone_for_location_code(location)), Metadata.get_call_location()).to_dict() }, Metadata(user, Metadata.get_call_location(), time.time())) # If the location is not coded, set the zone from the operator if location_code.control_code == Codes.NOT_CODED: operator = CodeSchemes.SOMALIA_OPERATOR.get_code_with_id(td["operator_coded"]["CodeID"]).match_values[0] td.append_data({ "zone_coded": CleaningUtils.make_label_from_cleaner_code( CodeSchemes.SOMALIA_ZONE, cls.make_location_code(CodeSchemes.SOMALIA_ZONE, SomaliaLocations.zone_for_operator_code(operator)), Metadata.get_call_location()).to_dict() }, Metadata(user, Metadata.get_call_location(), time.time())) @staticmethod def _impute_coding_error_codes(user, data): for td in data: coding_error_dict = dict() for plan in PipelineConfiguration.RQA_CODING_PLANS: if f"{plan.coded_field}_WS_correct_dataset" in td: if td[f"{plan.coded_field}_WS_correct_dataset"]["CodeID"] == \ CodeSchemes.WS_CORRECT_DATASET.get_code_with_control_code(Codes.CODING_ERROR).code_id: coding_error_dict[plan.coded_field] = [ CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.CODING_ERROR), Metadata.get_call_location() ).to_dict() ] if plan.binary_code_scheme is not None: coding_error_dict[plan.binary_coded_field] = \ CleaningUtils.make_label_from_cleaner_code( plan.binary_code_scheme, plan.binary_code_scheme.get_code_with_control_code(Codes.CODING_ERROR), Metadata.get_call_location() ).to_dict() for plan in PipelineConfiguration.SURVEY_CODING_PLANS: if f"{plan.coded_field}_WS_correct_dataset" in td: if td[f"{plan.coded_field}_WS_correct_dataset"]["CodeID"] == \ CodeSchemes.WS_CORRECT_DATASET.get_code_with_control_code(Codes.CODING_ERROR).code_id: coding_error_dict[plan.coded_field] = \ CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.CODING_ERROR), Metadata.get_call_location() ).to_dict() td.append_data(coding_error_dict, Metadata(user, Metadata.get_call_location(), TimeUtils.utc_now_as_iso_string())) @classmethod def apply_manual_codes(cls, user, data, coda_input_dir): # Merge manually coded radio show files into the cleaned dataset for plan in PipelineConfiguration.RQA_CODING_PLANS: rqa_messages = [td for td in data if plan.raw_field in td] coda_input_path = path.join(coda_input_dir, plan.coda_filename) f = None try: if path.exists(coda_input_path): f = open(coda_input_path, "r") TracedDataCodaV2IO.import_coda_2_to_traced_data_iterable_multi_coded( user, rqa_messages, plan.id_field, {plan.coded_field: plan.code_scheme}, f) if plan.binary_code_scheme is not None: if f is not None: f.seek(0) TracedDataCodaV2IO.import_coda_2_to_traced_data_iterable( user, rqa_messages, plan.id_field, {plan.binary_coded_field: plan.binary_code_scheme}, f) finally: if f is not None: f.close() # At this point, the TracedData objects still contain messages for at most one week each. # Label the weeks for which there is no response as TRUE_MISSING. for td in data: missing_dict = dict() for plan in PipelineConfiguration.RQA_CODING_PLANS: if plan.raw_field not in td: na_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.TRUE_MISSING), Metadata.get_call_location() ) missing_dict[plan.coded_field] = [na_label.to_dict()] if plan.binary_code_scheme is not None: na_label = CleaningUtils.make_label_from_cleaner_code( plan.binary_code_scheme, plan.binary_code_scheme.get_code_with_control_code(Codes.TRUE_MISSING), Metadata.get_call_location() ) missing_dict[plan.binary_coded_field] = na_label.to_dict() td.append_data(missing_dict, Metadata(user, Metadata.get_call_location(), time.time())) # Mark data that is noise as Codes.NOT_CODED for td in data: if td["noise"]: nc_dict = dict() for plan in PipelineConfiguration.RQA_CODING_PLANS: if plan.coded_field not in td: nc_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location() ) nc_dict[plan.coded_field] = [nc_label.to_dict()] if plan.binary_code_scheme is not None: nc_label = CleaningUtils.make_label_from_cleaner_code( plan.binary_code_scheme, plan.binary_code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location() ) nc_dict[plan.binary_coded_field] = nc_label.to_dict() td.append_data(nc_dict, Metadata(user, Metadata.get_call_location(), time.time())) # Synchronise the control codes between the binary and reasons schemes: # Some RQA datasets have a binary scheme, which is always labelled, and a reasons scheme, which is only labelled # if there is an additional reason given. Importing those two schemes separately above caused the labels in # each scheme to go out of sync with each other, e.g. reasons can be NR when the binary *was* reviewed. # This block updates the reasons scheme in cases where only a binary label was set, by assigning the # label 'NC' if the binary label was set to a normal code, otherwise to be the same control code as the binary. for plan in PipelineConfiguration.RQA_CODING_PLANS: rqa_messages = [td for td in data if plan.raw_field in td] if plan.binary_code_scheme is not None: for td in rqa_messages: binary_label = td[plan.binary_coded_field] binary_code = plan.binary_code_scheme.get_code_with_id(binary_label["CodeID"]) binary_label_present = binary_label["CodeID"] != \ plan.binary_code_scheme.get_code_with_control_code( Codes.NOT_REVIEWED).code_id reasons_label_present = len(td[plan.coded_field]) > 1 or td[plan.coded_field][0][ "CodeID"] != \ plan.code_scheme.get_code_with_control_code( Codes.NOT_REVIEWED).code_id if binary_label_present and not reasons_label_present: if binary_code.code_type == "Control": control_code = binary_code.control_code reasons_code = plan.code_scheme.get_code_with_control_code(control_code) reasons_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, reasons_code, Metadata.get_call_location(), origin_name="Pipeline Code Synchronisation") td.append_data( {plan.coded_field: [reasons_label.to_dict()]}, Metadata(user, Metadata.get_call_location(), TimeUtils.utc_now_as_iso_string()) ) else: assert binary_code.code_type == "Normal" nc_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location(), origin_name="Pipeline Code Synchronisation" ) td.append_data( {plan.coded_field: [nc_label.to_dict()]}, Metadata(user, Metadata.get_call_location(), TimeUtils.utc_now_as_iso_string()) ) # Merge manually coded survey files into the cleaned dataset for plan in PipelineConfiguration.SURVEY_CODING_PLANS: f = None try: coda_input_path = path.join(coda_input_dir, plan.coda_filename) if path.exists(coda_input_path): f = open(coda_input_path, "r") TracedDataCodaV2IO.import_coda_2_to_traced_data_iterable( user, data, plan.id_field, {plan.coded_field: plan.code_scheme}, f) finally: if f is not None: f.close() # Not everyone will have answered all of the demographic flows. # Label demographic questions which had no responses as TRUE_MISSING. # Label data which is just the empty string as NOT_CODED. for td in data: missing_dict = dict() for plan in PipelineConfiguration.SURVEY_CODING_PLANS: if plan.raw_field not in td: na_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.TRUE_MISSING), Metadata.get_call_location() ) missing_dict[plan.coded_field] = na_label.to_dict() elif td[plan.raw_field] == "": nc_label = CleaningUtils.make_label_from_cleaner_code( plan.code_scheme, plan.code_scheme.get_code_with_control_code(Codes.NOT_CODED), Metadata.get_call_location() ) missing_dict[plan.coded_field] = nc_label.to_dict() td.append_data(missing_dict, Metadata(user, Metadata.get_call_location(), time.time())) # Set district/region/state/zone codes from the coded district field. cls._impute_location_codes(user, data) # Set coding error codes using the coding error field cls._impute_coding_error_codes(user, data) return data
en
0.940262
# Up to 1 location code should have been assigned in Coda. Search for that code, # ensuring that only 1 has been assigned or, if multiple have been assigned, that they are non-conflicting # control codes # If no code was found, then this location is still not reviewed. # Synthesise a NOT_REVIEWED code accordingly. # If a control code was found, set all other location keys to that control code, # otherwise convert the provided location to the other locations in the hierarchy. # If the location is not coded, set the zone from the operator # Merge manually coded radio show files into the cleaned dataset # At this point, the TracedData objects still contain messages for at most one week each. # Label the weeks for which there is no response as TRUE_MISSING. # Mark data that is noise as Codes.NOT_CODED # Synchronise the control codes between the binary and reasons schemes: # Some RQA datasets have a binary scheme, which is always labelled, and a reasons scheme, which is only labelled # if there is an additional reason given. Importing those two schemes separately above caused the labels in # each scheme to go out of sync with each other, e.g. reasons can be NR when the binary *was* reviewed. # This block updates the reasons scheme in cases where only a binary label was set, by assigning the # label 'NC' if the binary label was set to a normal code, otherwise to be the same control code as the binary. # Merge manually coded survey files into the cleaned dataset # Not everyone will have answered all of the demographic flows. # Label demographic questions which had no responses as TRUE_MISSING. # Label data which is just the empty string as NOT_CODED. # Set district/region/state/zone codes from the coded district field. # Set coding error codes using the coding error field
1.989488
2
package/spack-bison/package.py
ctuning/ck-spack
1
6624592
<reponame>ctuning/ck-spack<filename>package/spack-bison/package.py ############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by <NAME>, <EMAIL>, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * from spack.operating_systems.mac_os import macos_version import sys class Bison(AutotoolsPackage): """Bison is a general-purpose parser generator that converts an annotated context-free grammar into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables.""" homepage = "http://www.gnu.org/software/bison/" url = "http://ftp.gnu.org/gnu/bison/bison-3.0.4.tar.gz" version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8') version('2.7', 'ded660799e76fb1667d594de1f7a0da9') depends_on('m4', type=('build', 'run')) patch('pgi.patch', when='@3.0.4') if sys.platform == 'darwin' and macos_version() >= Version('10.13'): patch('secure_snprintf.patch', level=0, when='@3.0.4') build_directory = 'spack-build'
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by <NAME>, <EMAIL>, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * from spack.operating_systems.mac_os import macos_version import sys class Bison(AutotoolsPackage): """Bison is a general-purpose parser generator that converts an annotated context-free grammar into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables.""" homepage = "http://www.gnu.org/software/bison/" url = "http://ftp.gnu.org/gnu/bison/bison-3.0.4.tar.gz" version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8') version('2.7', 'ded660799e76fb1667d594de1f7a0da9') depends_on('m4', type=('build', 'run')) patch('pgi.patch', when='@3.0.4') if sys.platform == 'darwin' and macos_version() >= Version('10.13'): patch('secure_snprintf.patch', level=0, when='@3.0.4') build_directory = 'spack-build'
en
0.759017
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by <NAME>, <EMAIL>, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## Bison is a general-purpose parser generator that converts an annotated context-free grammar into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables.
1.819244
2
tests/examples/minlplib/pooling_foulds3tp.py
ouyang-w-19/decogo
2
6624593
<reponame>ouyang-w-19/decogo # NLP written by GAMS Convert at 04/21/18 13:53:10 # # Equation counts # Total E G L N X C B # 572 521 0 51 0 0 0 0 # # Variable counts # x b i s1s s2s sc si # Total cont binary integer sos1 sos2 scont sint # 673 673 0 0 0 0 0 0 # FX 0 0 0 0 0 0 0 0 # # Nonzero counts # Total const NL DLL # 4171 3147 1024 0 # # Reformulation has removed 1 variable and 1 equation from pyomo.environ import * model = m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,1),initialize=0) m.x3 = Var(within=Reals,bounds=(0,1),initialize=0) m.x4 = Var(within=Reals,bounds=(0,1),initialize=0) m.x5 = Var(within=Reals,bounds=(0,1),initialize=0) m.x6 = Var(within=Reals,bounds=(0,1),initialize=0) m.x7 = Var(within=Reals,bounds=(0,1),initialize=0) m.x8 = Var(within=Reals,bounds=(0,1),initialize=0) m.x9 = Var(within=Reals,bounds=(0,1),initialize=0) m.x10 = Var(within=Reals,bounds=(0,1),initialize=0) m.x11 = Var(within=Reals,bounds=(0,1),initialize=0) m.x12 = Var(within=Reals,bounds=(0,1),initialize=0) m.x13 = Var(within=Reals,bounds=(0,1),initialize=0) m.x14 = Var(within=Reals,bounds=(0,1),initialize=0) m.x15 = Var(within=Reals,bounds=(0,1),initialize=0) m.x16 = Var(within=Reals,bounds=(0,1),initialize=0) m.x17 = Var(within=Reals,bounds=(0,1),initialize=0) m.x18 = Var(within=Reals,bounds=(0,1),initialize=0) m.x19 = Var(within=Reals,bounds=(0,1),initialize=0) m.x20 = Var(within=Reals,bounds=(0,1),initialize=0) m.x21 = Var(within=Reals,bounds=(0,1),initialize=0) m.x22 = Var(within=Reals,bounds=(0,1),initialize=0) m.x23 = Var(within=Reals,bounds=(0,1),initialize=0) m.x24 = Var(within=Reals,bounds=(0,1),initialize=0) m.x25 = Var(within=Reals,bounds=(0,1),initialize=0) m.x26 = Var(within=Reals,bounds=(0,1),initialize=0) m.x27 = Var(within=Reals,bounds=(0,1),initialize=0) m.x28 = Var(within=Reals,bounds=(0,1),initialize=0) m.x29 = Var(within=Reals,bounds=(0,1),initialize=0) m.x30 = Var(within=Reals,bounds=(0,1),initialize=0) m.x31 = Var(within=Reals,bounds=(0,1),initialize=0) m.x32 = Var(within=Reals,bounds=(0,1),initialize=0) m.x33 = Var(within=Reals,bounds=(0,1),initialize=0) m.x34 = Var(within=Reals,bounds=(0,1),initialize=0) m.x35 = Var(within=Reals,bounds=(0,1),initialize=0) m.x36 = Var(within=Reals,bounds=(0,1),initialize=0) m.x37 = Var(within=Reals,bounds=(0,1),initialize=0) m.x38 = Var(within=Reals,bounds=(0,1),initialize=0) m.x39 = Var(within=Reals,bounds=(0,1),initialize=0) m.x40 = Var(within=Reals,bounds=(0,1),initialize=0) m.x41 = Var(within=Reals,bounds=(0,1),initialize=0) m.x42 = Var(within=Reals,bounds=(0,1),initialize=0) m.x43 = Var(within=Reals,bounds=(0,1),initialize=0) m.x44 = Var(within=Reals,bounds=(0,1),initialize=0) m.x45 = Var(within=Reals,bounds=(0,1),initialize=0) m.x46 = Var(within=Reals,bounds=(0,1),initialize=0) m.x47 = Var(within=Reals,bounds=(0,1),initialize=0) m.x48 = Var(within=Reals,bounds=(0,1),initialize=0) m.x49 = Var(within=Reals,bounds=(0,1),initialize=0) m.x50 = Var(within=Reals,bounds=(0,1),initialize=0) m.x51 = Var(within=Reals,bounds=(0,1),initialize=0) m.x52 = Var(within=Reals,bounds=(0,1),initialize=0) m.x53 = Var(within=Reals,bounds=(0,1),initialize=0) m.x54 = Var(within=Reals,bounds=(0,1),initialize=0) m.x55 = Var(within=Reals,bounds=(0,1),initialize=0) m.x56 = Var(within=Reals,bounds=(0,1),initialize=0) m.x57 = Var(within=Reals,bounds=(0,1),initialize=0) m.x58 = Var(within=Reals,bounds=(0,1),initialize=0) m.x59 = Var(within=Reals,bounds=(0,1),initialize=0) m.x60 = Var(within=Reals,bounds=(0,1),initialize=0) m.x61 = Var(within=Reals,bounds=(0,1),initialize=0) m.x62 = Var(within=Reals,bounds=(0,1),initialize=0) m.x63 = Var(within=Reals,bounds=(0,1),initialize=0) m.x64 = Var(within=Reals,bounds=(0,1),initialize=0) m.x65 = Var(within=Reals,bounds=(0,1),initialize=0) m.x66 = Var(within=Reals,bounds=(0,1),initialize=0) m.x67 = Var(within=Reals,bounds=(0,1),initialize=0) m.x68 = Var(within=Reals,bounds=(0,1),initialize=0) m.x69 = Var(within=Reals,bounds=(0,1),initialize=0) m.x70 = Var(within=Reals,bounds=(0,1),initialize=0) m.x71 = Var(within=Reals,bounds=(0,1),initialize=0) m.x72 = Var(within=Reals,bounds=(0,1),initialize=0) m.x73 = Var(within=Reals,bounds=(0,1),initialize=0) m.x74 = Var(within=Reals,bounds=(0,1),initialize=0) m.x75 = Var(within=Reals,bounds=(0,1),initialize=0) m.x76 = Var(within=Reals,bounds=(0,1),initialize=0) m.x77 = Var(within=Reals,bounds=(0,1),initialize=0) m.x78 = Var(within=Reals,bounds=(0,1),initialize=0) m.x79 = Var(within=Reals,bounds=(0,1),initialize=0) m.x80 = Var(within=Reals,bounds=(0,1),initialize=0) m.x81 = Var(within=Reals,bounds=(0,1),initialize=0) m.x82 = Var(within=Reals,bounds=(0,1),initialize=0) m.x83 = Var(within=Reals,bounds=(0,1),initialize=0) m.x84 = Var(within=Reals,bounds=(0,1),initialize=0) m.x85 = Var(within=Reals,bounds=(0,1),initialize=0) m.x86 = Var(within=Reals,bounds=(0,1),initialize=0) m.x87 = Var(within=Reals,bounds=(0,1),initialize=0) m.x88 = Var(within=Reals,bounds=(0,1),initialize=0) m.x89 = Var(within=Reals,bounds=(0,1),initialize=0) m.x90 = Var(within=Reals,bounds=(0,1),initialize=0) m.x91 = Var(within=Reals,bounds=(0,1),initialize=0) m.x92 = Var(within=Reals,bounds=(0,1),initialize=0) m.x93 = Var(within=Reals,bounds=(0,1),initialize=0) m.x94 = Var(within=Reals,bounds=(0,1),initialize=0) m.x95 = Var(within=Reals,bounds=(0,1),initialize=0) m.x96 = Var(within=Reals,bounds=(0,1),initialize=0) m.x97 = Var(within=Reals,bounds=(0,1),initialize=0) m.x98 = Var(within=Reals,bounds=(0,1),initialize=0) m.x99 = Var(within=Reals,bounds=(0,1),initialize=0) m.x100 = Var(within=Reals,bounds=(0,1),initialize=0) m.x101 = Var(within=Reals,bounds=(0,1),initialize=0) m.x102 = Var(within=Reals,bounds=(0,1),initialize=0) m.x103 = Var(within=Reals,bounds=(0,1),initialize=0) m.x104 = Var(within=Reals,bounds=(0,1),initialize=0) m.x105 = Var(within=Reals,bounds=(0,1),initialize=0) m.x106 = Var(within=Reals,bounds=(0,1),initialize=0) m.x107 = Var(within=Reals,bounds=(0,1),initialize=0) m.x108 = Var(within=Reals,bounds=(0,1),initialize=0) m.x109 = Var(within=Reals,bounds=(0,1),initialize=0) m.x110 = Var(within=Reals,bounds=(0,1),initialize=0) m.x111 = Var(within=Reals,bounds=(0,1),initialize=0) m.x112 = Var(within=Reals,bounds=(0,1),initialize=0) m.x113 = Var(within=Reals,bounds=(0,1),initialize=0) m.x114 = Var(within=Reals,bounds=(0,1),initialize=0) m.x115 = Var(within=Reals,bounds=(0,1),initialize=0) m.x116 = Var(within=Reals,bounds=(0,1),initialize=0) m.x117 = Var(within=Reals,bounds=(0,1),initialize=0) m.x118 = Var(within=Reals,bounds=(0,1),initialize=0) m.x119 = Var(within=Reals,bounds=(0,1),initialize=0) m.x120 = Var(within=Reals,bounds=(0,1),initialize=0) m.x121 = Var(within=Reals,bounds=(0,1),initialize=0) m.x122 = Var(within=Reals,bounds=(0,1),initialize=0) m.x123 = Var(within=Reals,bounds=(0,1),initialize=0) m.x124 = Var(within=Reals,bounds=(0,1),initialize=0) m.x125 = Var(within=Reals,bounds=(0,1),initialize=0) m.x126 = Var(within=Reals,bounds=(0,1),initialize=0) m.x127 = Var(within=Reals,bounds=(0,1),initialize=0) m.x128 = Var(within=Reals,bounds=(0,1),initialize=0) m.x129 = Var(within=Reals,bounds=(0,1),initialize=0) m.x130 = Var(within=Reals,bounds=(0,1),initialize=0) m.x131 = Var(within=Reals,bounds=(0,1),initialize=0) m.x132 = Var(within=Reals,bounds=(0,1),initialize=0) m.x133 = Var(within=Reals,bounds=(0,1),initialize=0) m.x134 = Var(within=Reals,bounds=(0,1),initialize=0) m.x135 = Var(within=Reals,bounds=(0,1),initialize=0) m.x136 = Var(within=Reals,bounds=(0,1),initialize=0) m.x137 = Var(within=Reals,bounds=(0,1),initialize=0) m.x138 = Var(within=Reals,bounds=(0,1),initialize=0) m.x139 = Var(within=Reals,bounds=(0,1),initialize=0) m.x140 = Var(within=Reals,bounds=(0,1),initialize=0) m.x141 = Var(within=Reals,bounds=(0,1),initialize=0) m.x142 = Var(within=Reals,bounds=(0,1),initialize=0) m.x143 = Var(within=Reals,bounds=(0,1),initialize=0) m.x144 = Var(within=Reals,bounds=(0,1),initialize=0) m.x145 = Var(within=Reals,bounds=(0,1),initialize=0) m.x146 = Var(within=Reals,bounds=(0,1),initialize=0) m.x147 = Var(within=Reals,bounds=(0,1),initialize=0) m.x148 = Var(within=Reals,bounds=(0,1),initialize=0) m.x149 = Var(within=Reals,bounds=(0,1),initialize=0) m.x150 = Var(within=Reals,bounds=(0,1),initialize=0) m.x151 = Var(within=Reals,bounds=(0,1),initialize=0) m.x152 = Var(within=Reals,bounds=(0,1),initialize=0) m.x153 = Var(within=Reals,bounds=(0,1),initialize=0) m.x154 = Var(within=Reals,bounds=(0,1),initialize=0) m.x155 = Var(within=Reals,bounds=(0,1),initialize=0) m.x156 = Var(within=Reals,bounds=(0,1),initialize=0) m.x157 = Var(within=Reals,bounds=(0,1),initialize=0) m.x158 = Var(within=Reals,bounds=(0,1),initialize=0) m.x159 = Var(within=Reals,bounds=(0,1),initialize=0) m.x160 = Var(within=Reals,bounds=(0,1),initialize=0) m.x161 = Var(within=Reals,bounds=(0,1),initialize=0) m.x162 = Var(within=Reals,bounds=(0,1),initialize=0) m.x163 = Var(within=Reals,bounds=(0,1),initialize=0) m.x164 = Var(within=Reals,bounds=(0,1),initialize=0) m.x165 = Var(within=Reals,bounds=(0,1),initialize=0) m.x166 = Var(within=Reals,bounds=(0,1),initialize=0) m.x167 = Var(within=Reals,bounds=(0,1),initialize=0) m.x168 = Var(within=Reals,bounds=(0,1),initialize=0) m.x169 = Var(within=Reals,bounds=(0,1),initialize=0) m.x170 = Var(within=Reals,bounds=(0,1),initialize=0) m.x171 = Var(within=Reals,bounds=(0,1),initialize=0) m.x172 = Var(within=Reals,bounds=(0,1),initialize=0) m.x173 = Var(within=Reals,bounds=(0,1),initialize=0) m.x174 = Var(within=Reals,bounds=(0,1),initialize=0) m.x175 = Var(within=Reals,bounds=(0,1),initialize=0) m.x176 = Var(within=Reals,bounds=(0,1),initialize=0) m.x177 = Var(within=Reals,bounds=(0,1),initialize=0) m.x178 = Var(within=Reals,bounds=(0,1),initialize=0) m.x179 = Var(within=Reals,bounds=(0,1),initialize=0) m.x180 = Var(within=Reals,bounds=(0,1),initialize=0) m.x181 = Var(within=Reals,bounds=(0,1),initialize=0) m.x182 = Var(within=Reals,bounds=(0,1),initialize=0) m.x183 = Var(within=Reals,bounds=(0,1),initialize=0) m.x184 = Var(within=Reals,bounds=(0,1),initialize=0) m.x185 = Var(within=Reals,bounds=(0,1),initialize=0) m.x186 = Var(within=Reals,bounds=(0,1),initialize=0) m.x187 = Var(within=Reals,bounds=(0,1),initialize=0) m.x188 = Var(within=Reals,bounds=(0,1),initialize=0) m.x189 = Var(within=Reals,bounds=(0,1),initialize=0) m.x190 = Var(within=Reals,bounds=(0,1),initialize=0) m.x191 = Var(within=Reals,bounds=(0,1),initialize=0) m.x192 = Var(within=Reals,bounds=(0,1),initialize=0) m.x193 = Var(within=Reals,bounds=(0,1),initialize=0) m.x194 = Var(within=Reals,bounds=(0,1),initialize=0) m.x195 = Var(within=Reals,bounds=(0,1),initialize=0) m.x196 = Var(within=Reals,bounds=(0,1),initialize=0) m.x197 = Var(within=Reals,bounds=(0,1),initialize=0) m.x198 = Var(within=Reals,bounds=(0,1),initialize=0) m.x199 = Var(within=Reals,bounds=(0,1),initialize=0) m.x200 = Var(within=Reals,bounds=(0,1),initialize=0) m.x201 = Var(within=Reals,bounds=(0,1),initialize=0) m.x202 = Var(within=Reals,bounds=(0,1),initialize=0) m.x203 = Var(within=Reals,bounds=(0,1),initialize=0) m.x204 = Var(within=Reals,bounds=(0,1),initialize=0) m.x205 = Var(within=Reals,bounds=(0,1),initialize=0) m.x206 = Var(within=Reals,bounds=(0,1),initialize=0) m.x207 = Var(within=Reals,bounds=(0,1),initialize=0) m.x208 = Var(within=Reals,bounds=(0,1),initialize=0) m.x209 = Var(within=Reals,bounds=(0,1),initialize=0) m.x210 = Var(within=Reals,bounds=(0,1),initialize=0) m.x211 = Var(within=Reals,bounds=(0,1),initialize=0) m.x212 = Var(within=Reals,bounds=(0,1),initialize=0) m.x213 = Var(within=Reals,bounds=(0,1),initialize=0) m.x214 = Var(within=Reals,bounds=(0,1),initialize=0) m.x215 = Var(within=Reals,bounds=(0,1),initialize=0) m.x216 = Var(within=Reals,bounds=(0,1),initialize=0) m.x217 = Var(within=Reals,bounds=(0,1),initialize=0) m.x218 = Var(within=Reals,bounds=(0,1),initialize=0) m.x219 = Var(within=Reals,bounds=(0,1),initialize=0) m.x220 = Var(within=Reals,bounds=(0,1),initialize=0) m.x221 = Var(within=Reals,bounds=(0,1),initialize=0) m.x222 = Var(within=Reals,bounds=(0,1),initialize=0) m.x223 = Var(within=Reals,bounds=(0,1),initialize=0) m.x224 = Var(within=Reals,bounds=(0,1),initialize=0) m.x225 = Var(within=Reals,bounds=(0,1),initialize=0) m.x226 = Var(within=Reals,bounds=(0,1),initialize=0) m.x227 = Var(within=Reals,bounds=(0,1),initialize=0) m.x228 = Var(within=Reals,bounds=(0,1),initialize=0) m.x229 = Var(within=Reals,bounds=(0,1),initialize=0) m.x230 = Var(within=Reals,bounds=(0,1),initialize=0) m.x231 = Var(within=Reals,bounds=(0,1),initialize=0) m.x232 = Var(within=Reals,bounds=(0,1),initialize=0) m.x233 = Var(within=Reals,bounds=(0,1),initialize=0) m.x234 = Var(within=Reals,bounds=(0,1),initialize=0) m.x235 = Var(within=Reals,bounds=(0,1),initialize=0) m.x236 = Var(within=Reals,bounds=(0,1),initialize=0) m.x237 = Var(within=Reals,bounds=(0,1),initialize=0) m.x238 = Var(within=Reals,bounds=(0,1),initialize=0) m.x239 = Var(within=Reals,bounds=(0,1),initialize=0) m.x240 = Var(within=Reals,bounds=(0,1),initialize=0) m.x241 = Var(within=Reals,bounds=(0,1),initialize=0) m.x242 = Var(within=Reals,bounds=(0,1),initialize=0) m.x243 = Var(within=Reals,bounds=(0,1),initialize=0) m.x244 = Var(within=Reals,bounds=(0,1),initialize=0) m.x245 = Var(within=Reals,bounds=(0,1),initialize=0) m.x246 = Var(within=Reals,bounds=(0,1),initialize=0) m.x247 = Var(within=Reals,bounds=(0,1),initialize=0) m.x248 = Var(within=Reals,bounds=(0,1),initialize=0) m.x249 = Var(within=Reals,bounds=(0,1),initialize=0) m.x250 = Var(within=Reals,bounds=(0,1),initialize=0) m.x251 = Var(within=Reals,bounds=(0,1),initialize=0) m.x252 = Var(within=Reals,bounds=(0,1),initialize=0) m.x253 = Var(within=Reals,bounds=(0,1),initialize=0) m.x254 = Var(within=Reals,bounds=(0,1),initialize=0) m.x255 = Var(within=Reals,bounds=(0,1),initialize=0) m.x256 = Var(within=Reals,bounds=(0,1),initialize=0) m.x257 = Var(within=Reals,bounds=(0,1),initialize=0) m.x258 = Var(within=Reals,bounds=(0,1),initialize=0) m.x259 = Var(within=Reals,bounds=(0,1),initialize=0) m.x260 = Var(within=Reals,bounds=(0,1),initialize=0) m.x261 = Var(within=Reals,bounds=(0,1),initialize=0) m.x262 = Var(within=Reals,bounds=(0,1),initialize=0) m.x263 = Var(within=Reals,bounds=(0,1),initialize=0) m.x264 = Var(within=Reals,bounds=(0,1),initialize=0) m.x265 = Var(within=Reals,bounds=(0,1),initialize=0) m.x266 = Var(within=Reals,bounds=(0,1),initialize=0) m.x267 = Var(within=Reals,bounds=(0,1),initialize=0) m.x268 = Var(within=Reals,bounds=(0,1),initialize=0) m.x269 = Var(within=Reals,bounds=(0,1),initialize=0) m.x270 = Var(within=Reals,bounds=(0,1),initialize=0) m.x271 = Var(within=Reals,bounds=(0,1),initialize=0) m.x272 = Var(within=Reals,bounds=(0,1),initialize=0) m.x273 = Var(within=Reals,bounds=(0,1),initialize=0) m.x274 = Var(within=Reals,bounds=(0,1),initialize=0) m.x275 = Var(within=Reals,bounds=(0,1),initialize=0) m.x276 = Var(within=Reals,bounds=(0,1),initialize=0) m.x277 = Var(within=Reals,bounds=(0,1),initialize=0) m.x278 = Var(within=Reals,bounds=(0,1),initialize=0) m.x279 = Var(within=Reals,bounds=(0,1),initialize=0) m.x280 = Var(within=Reals,bounds=(0,1),initialize=0) m.x281 = Var(within=Reals,bounds=(0,1),initialize=0) m.x282 = Var(within=Reals,bounds=(0,1),initialize=0) m.x283 = Var(within=Reals,bounds=(0,1),initialize=0) m.x284 = Var(within=Reals,bounds=(0,1),initialize=0) m.x285 = Var(within=Reals,bounds=(0,1),initialize=0) m.x286 = Var(within=Reals,bounds=(0,1),initialize=0) m.x287 = Var(within=Reals,bounds=(0,1),initialize=0) m.x288 = Var(within=Reals,bounds=(0,1),initialize=0) m.x289 = Var(within=Reals,bounds=(0,1),initialize=0) m.x290 = Var(within=Reals,bounds=(0,1),initialize=0) m.x291 = Var(within=Reals,bounds=(0,1),initialize=0) m.x292 = Var(within=Reals,bounds=(0,1),initialize=0) m.x293 = Var(within=Reals,bounds=(0,1),initialize=0) m.x294 = Var(within=Reals,bounds=(0,1),initialize=0) m.x295 = Var(within=Reals,bounds=(0,1),initialize=0) m.x296 = Var(within=Reals,bounds=(0,1),initialize=0) m.x297 = Var(within=Reals,bounds=(0,1),initialize=0) m.x298 = Var(within=Reals,bounds=(0,1),initialize=0) m.x299 = Var(within=Reals,bounds=(0,1),initialize=0) m.x300 = Var(within=Reals,bounds=(0,1),initialize=0) m.x301 = Var(within=Reals,bounds=(0,1),initialize=0) m.x302 = Var(within=Reals,bounds=(0,1),initialize=0) m.x303 = Var(within=Reals,bounds=(0,1),initialize=0) m.x304 = Var(within=Reals,bounds=(0,1),initialize=0) m.x305 = Var(within=Reals,bounds=(0,1),initialize=0) m.x306 = Var(within=Reals,bounds=(0,1),initialize=0) m.x307 = Var(within=Reals,bounds=(0,1),initialize=0) m.x308 = Var(within=Reals,bounds=(0,1),initialize=0) m.x309 = Var(within=Reals,bounds=(0,1),initialize=0) m.x310 = Var(within=Reals,bounds=(0,1),initialize=0) m.x311 = Var(within=Reals,bounds=(0,1),initialize=0) m.x312 = Var(within=Reals,bounds=(0,1),initialize=0) m.x313 = Var(within=Reals,bounds=(0,1),initialize=0) m.x314 = Var(within=Reals,bounds=(0,1),initialize=0) m.x315 = Var(within=Reals,bounds=(0,1),initialize=0) m.x316 = Var(within=Reals,bounds=(0,1),initialize=0) m.x317 = Var(within=Reals,bounds=(0,1),initialize=0) m.x318 = Var(within=Reals,bounds=(0,1),initialize=0) m.x319 = Var(within=Reals,bounds=(0,1),initialize=0) m.x320 = Var(within=Reals,bounds=(0,1),initialize=0) m.x321 = Var(within=Reals,bounds=(0,1),initialize=0) m.x322 = Var(within=Reals,bounds=(0,1),initialize=0) m.x323 = Var(within=Reals,bounds=(0,1),initialize=0) m.x324 = Var(within=Reals,bounds=(0,1),initialize=0) m.x325 = Var(within=Reals,bounds=(0,1),initialize=0) m.x326 = Var(within=Reals,bounds=(0,1),initialize=0) m.x327 = Var(within=Reals,bounds=(0,1),initialize=0) m.x328 = Var(within=Reals,bounds=(0,1),initialize=0) m.x329 = Var(within=Reals,bounds=(0,1),initialize=0) m.x330 = Var(within=Reals,bounds=(0,1),initialize=0) m.x331 = Var(within=Reals,bounds=(0,1),initialize=0) m.x332 = Var(within=Reals,bounds=(0,1),initialize=0) m.x333 = Var(within=Reals,bounds=(0,1),initialize=0) m.x334 = Var(within=Reals,bounds=(0,1),initialize=0) m.x335 = Var(within=Reals,bounds=(0,1),initialize=0) m.x336 = Var(within=Reals,bounds=(0,1),initialize=0) m.x337 = Var(within=Reals,bounds=(0,1),initialize=0) m.x338 = Var(within=Reals,bounds=(0,1),initialize=0) m.x339 = Var(within=Reals,bounds=(0,1),initialize=0) m.x340 = Var(within=Reals,bounds=(0,1),initialize=0) m.x341 = Var(within=Reals,bounds=(0,1),initialize=0) m.x342 = Var(within=Reals,bounds=(0,1),initialize=0) m.x343 = Var(within=Reals,bounds=(0,1),initialize=0) m.x344 = Var(within=Reals,bounds=(0,1),initialize=0) m.x345 = Var(within=Reals,bounds=(0,1),initialize=0) m.x346 = Var(within=Reals,bounds=(0,1),initialize=0) m.x347 = Var(within=Reals,bounds=(0,1),initialize=0) m.x348 = Var(within=Reals,bounds=(0,1),initialize=0) m.x349 = Var(within=Reals,bounds=(0,1),initialize=0) m.x350 = Var(within=Reals,bounds=(0,1),initialize=0) m.x351 = Var(within=Reals,bounds=(0,1),initialize=0) m.x352 = Var(within=Reals,bounds=(0,1),initialize=0) m.x353 = Var(within=Reals,bounds=(0,1),initialize=0) m.x354 = Var(within=Reals,bounds=(0,1),initialize=0) m.x355 = Var(within=Reals,bounds=(0,1),initialize=0) m.x356 = Var(within=Reals,bounds=(0,1),initialize=0) m.x357 = Var(within=Reals,bounds=(0,1),initialize=0) m.x358 = Var(within=Reals,bounds=(0,1),initialize=0) m.x359 = Var(within=Reals,bounds=(0,1),initialize=0) m.x360 = Var(within=Reals,bounds=(0,1),initialize=0) m.x361 = Var(within=Reals,bounds=(0,1),initialize=0) m.x362 = Var(within=Reals,bounds=(0,1),initialize=0) m.x363 = Var(within=Reals,bounds=(0,1),initialize=0) m.x364 = Var(within=Reals,bounds=(0,1),initialize=0) m.x365 = Var(within=Reals,bounds=(0,1),initialize=0) m.x366 = Var(within=Reals,bounds=(0,1),initialize=0) m.x367 = Var(within=Reals,bounds=(0,1),initialize=0) m.x368 = Var(within=Reals,bounds=(0,1),initialize=0) m.x369 = Var(within=Reals,bounds=(0,1),initialize=0) m.x370 = Var(within=Reals,bounds=(0,1),initialize=0) m.x371 = Var(within=Reals,bounds=(0,1),initialize=0) m.x372 = Var(within=Reals,bounds=(0,1),initialize=0) m.x373 = Var(within=Reals,bounds=(0,1),initialize=0) m.x374 = Var(within=Reals,bounds=(0,1),initialize=0) m.x375 = Var(within=Reals,bounds=(0,1),initialize=0) m.x376 = Var(within=Reals,bounds=(0,1),initialize=0) m.x377 = Var(within=Reals,bounds=(0,1),initialize=0) m.x378 = Var(within=Reals,bounds=(0,1),initialize=0) m.x379 = Var(within=Reals,bounds=(0,1),initialize=0) m.x380 = Var(within=Reals,bounds=(0,1),initialize=0) m.x381 = Var(within=Reals,bounds=(0,1),initialize=0) m.x382 = Var(within=Reals,bounds=(0,1),initialize=0) m.x383 = Var(within=Reals,bounds=(0,1),initialize=0) m.x384 = Var(within=Reals,bounds=(0,1),initialize=0) m.x385 = Var(within=Reals,bounds=(0,1),initialize=0) m.x386 = Var(within=Reals,bounds=(0,1),initialize=0) m.x387 = Var(within=Reals,bounds=(0,1),initialize=0) m.x388 = Var(within=Reals,bounds=(0,1),initialize=0) m.x389 = Var(within=Reals,bounds=(0,1),initialize=0) m.x390 = Var(within=Reals,bounds=(0,1),initialize=0) m.x391 = Var(within=Reals,bounds=(0,1),initialize=0) m.x392 = Var(within=Reals,bounds=(0,1),initialize=0) m.x393 = Var(within=Reals,bounds=(0,1),initialize=0) m.x394 = Var(within=Reals,bounds=(0,1),initialize=0) m.x395 = Var(within=Reals,bounds=(0,1),initialize=0) m.x396 = Var(within=Reals,bounds=(0,1),initialize=0) m.x397 = Var(within=Reals,bounds=(0,1),initialize=0) m.x398 = Var(within=Reals,bounds=(0,1),initialize=0) m.x399 = Var(within=Reals,bounds=(0,1),initialize=0) m.x400 = Var(within=Reals,bounds=(0,1),initialize=0) m.x401 = Var(within=Reals,bounds=(0,1),initialize=0) m.x402 = Var(within=Reals,bounds=(0,1),initialize=0) m.x403 = Var(within=Reals,bounds=(0,1),initialize=0) m.x404 = Var(within=Reals,bounds=(0,1),initialize=0) m.x405 = Var(within=Reals,bounds=(0,1),initialize=0) m.x406 = Var(within=Reals,bounds=(0,1),initialize=0) m.x407 = Var(within=Reals,bounds=(0,1),initialize=0) m.x408 = Var(within=Reals,bounds=(0,1),initialize=0) m.x409 = Var(within=Reals,bounds=(0,1),initialize=0) m.x410 = Var(within=Reals,bounds=(0,1),initialize=0) m.x411 = Var(within=Reals,bounds=(0,1),initialize=0) m.x412 = Var(within=Reals,bounds=(0,1),initialize=0) m.x413 = Var(within=Reals,bounds=(0,1),initialize=0) m.x414 = Var(within=Reals,bounds=(0,1),initialize=0) m.x415 = Var(within=Reals,bounds=(0,1),initialize=0) m.x416 = Var(within=Reals,bounds=(0,1),initialize=0) m.x417 = Var(within=Reals,bounds=(0,1),initialize=0) m.x418 = Var(within=Reals,bounds=(0,1),initialize=0) m.x419 = Var(within=Reals,bounds=(0,1),initialize=0) m.x420 = Var(within=Reals,bounds=(0,1),initialize=0) m.x421 = Var(within=Reals,bounds=(0,1),initialize=0) m.x422 = Var(within=Reals,bounds=(0,1),initialize=0) m.x423 = Var(within=Reals,bounds=(0,1),initialize=0) m.x424 = Var(within=Reals,bounds=(0,1),initialize=0) m.x425 = Var(within=Reals,bounds=(0,1),initialize=0) m.x426 = Var(within=Reals,bounds=(0,1),initialize=0) m.x427 = Var(within=Reals,bounds=(0,1),initialize=0) m.x428 = Var(within=Reals,bounds=(0,1),initialize=0) m.x429 = Var(within=Reals,bounds=(0,1),initialize=0) m.x430 = Var(within=Reals,bounds=(0,1),initialize=0) m.x431 = Var(within=Reals,bounds=(0,1),initialize=0) m.x432 = Var(within=Reals,bounds=(0,1),initialize=0) m.x433 = Var(within=Reals,bounds=(0,1),initialize=0) m.x434 = Var(within=Reals,bounds=(0,1),initialize=0) m.x435 = Var(within=Reals,bounds=(0,1),initialize=0) m.x436 = Var(within=Reals,bounds=(0,1),initialize=0) m.x437 = Var(within=Reals,bounds=(0,1),initialize=0) m.x438 = Var(within=Reals,bounds=(0,1),initialize=0) m.x439 = Var(within=Reals,bounds=(0,1),initialize=0) m.x440 = Var(within=Reals,bounds=(0,1),initialize=0) m.x441 = Var(within=Reals,bounds=(0,1),initialize=0) m.x442 = Var(within=Reals,bounds=(0,1),initialize=0) m.x443 = Var(within=Reals,bounds=(0,1),initialize=0) m.x444 = Var(within=Reals,bounds=(0,1),initialize=0) m.x445 = Var(within=Reals,bounds=(0,1),initialize=0) m.x446 = Var(within=Reals,bounds=(0,1),initialize=0) m.x447 = Var(within=Reals,bounds=(0,1),initialize=0) m.x448 = Var(within=Reals,bounds=(0,1),initialize=0) m.x449 = Var(within=Reals,bounds=(0,1),initialize=0) m.x450 = Var(within=Reals,bounds=(0,1),initialize=0) m.x451 = Var(within=Reals,bounds=(0,1),initialize=0) m.x452 = Var(within=Reals,bounds=(0,1),initialize=0) m.x453 = Var(within=Reals,bounds=(0,1),initialize=0) m.x454 = Var(within=Reals,bounds=(0,1),initialize=0) m.x455 = Var(within=Reals,bounds=(0,1),initialize=0) m.x456 = Var(within=Reals,bounds=(0,1),initialize=0) m.x457 = Var(within=Reals,bounds=(0,1),initialize=0) m.x458 = Var(within=Reals,bounds=(0,1),initialize=0) m.x459 = Var(within=Reals,bounds=(0,1),initialize=0) m.x460 = Var(within=Reals,bounds=(0,1),initialize=0) m.x461 = Var(within=Reals,bounds=(0,1),initialize=0) m.x462 = Var(within=Reals,bounds=(0,1),initialize=0) m.x463 = Var(within=Reals,bounds=(0,1),initialize=0) m.x464 = Var(within=Reals,bounds=(0,1),initialize=0) m.x465 = Var(within=Reals,bounds=(0,1),initialize=0) m.x466 = Var(within=Reals,bounds=(0,1),initialize=0) m.x467 = Var(within=Reals,bounds=(0,1),initialize=0) m.x468 = Var(within=Reals,bounds=(0,1),initialize=0) m.x469 = Var(within=Reals,bounds=(0,1),initialize=0) m.x470 = Var(within=Reals,bounds=(0,1),initialize=0) m.x471 = Var(within=Reals,bounds=(0,1),initialize=0) m.x472 = Var(within=Reals,bounds=(0,1),initialize=0) m.x473 = Var(within=Reals,bounds=(0,1),initialize=0) m.x474 = Var(within=Reals,bounds=(0,1),initialize=0) m.x475 = Var(within=Reals,bounds=(0,1),initialize=0) m.x476 = Var(within=Reals,bounds=(0,1),initialize=0) m.x477 = Var(within=Reals,bounds=(0,1),initialize=0) m.x478 = Var(within=Reals,bounds=(0,1),initialize=0) m.x479 = Var(within=Reals,bounds=(0,1),initialize=0) m.x480 = Var(within=Reals,bounds=(0,1),initialize=0) m.x481 = Var(within=Reals,bounds=(0,1),initialize=0) m.x482 = Var(within=Reals,bounds=(0,1),initialize=0) m.x483 = Var(within=Reals,bounds=(0,1),initialize=0) m.x484 = Var(within=Reals,bounds=(0,1),initialize=0) m.x485 = Var(within=Reals,bounds=(0,1),initialize=0) m.x486 = Var(within=Reals,bounds=(0,1),initialize=0) m.x487 = Var(within=Reals,bounds=(0,1),initialize=0) m.x488 = Var(within=Reals,bounds=(0,1),initialize=0) m.x489 = Var(within=Reals,bounds=(0,1),initialize=0) m.x490 = Var(within=Reals,bounds=(0,1),initialize=0) m.x491 = Var(within=Reals,bounds=(0,1),initialize=0) m.x492 = Var(within=Reals,bounds=(0,1),initialize=0) m.x493 = Var(within=Reals,bounds=(0,1),initialize=0) m.x494 = Var(within=Reals,bounds=(0,1),initialize=0) m.x495 = Var(within=Reals,bounds=(0,1),initialize=0) m.x496 = Var(within=Reals,bounds=(0,1),initialize=0) m.x497 = Var(within=Reals,bounds=(0,1),initialize=0) m.x498 = Var(within=Reals,bounds=(0,1),initialize=0) m.x499 = Var(within=Reals,bounds=(0,1),initialize=0) m.x500 = Var(within=Reals,bounds=(0,1),initialize=0) m.x501 = Var(within=Reals,bounds=(0,1),initialize=0) m.x502 = Var(within=Reals,bounds=(0,1),initialize=0) m.x503 = Var(within=Reals,bounds=(0,1),initialize=0) m.x504 = Var(within=Reals,bounds=(0,1),initialize=0) m.x505 = Var(within=Reals,bounds=(0,1),initialize=0) m.x506 = Var(within=Reals,bounds=(0,1),initialize=0) m.x507 = Var(within=Reals,bounds=(0,1),initialize=0) m.x508 = Var(within=Reals,bounds=(0,1),initialize=0) m.x509 = Var(within=Reals,bounds=(0,1),initialize=0) m.x510 = Var(within=Reals,bounds=(0,1),initialize=0) m.x511 = Var(within=Reals,bounds=(0,1),initialize=0) m.x512 = Var(within=Reals,bounds=(0,1),initialize=0) m.x513 = Var(within=Reals,bounds=(0,1),initialize=0) m.x514 = Var(within=Reals,bounds=(0,1),initialize=0) m.x515 = Var(within=Reals,bounds=(0,1),initialize=0) m.x516 = Var(within=Reals,bounds=(0,1),initialize=0) m.x517 = Var(within=Reals,bounds=(0,1),initialize=0) m.x518 = Var(within=Reals,bounds=(0,1),initialize=0) m.x519 = Var(within=Reals,bounds=(0,1),initialize=0) m.x520 = Var(within=Reals,bounds=(0,1),initialize=0) m.x521 = Var(within=Reals,bounds=(0,1),initialize=0) m.x522 = Var(within=Reals,bounds=(0,1),initialize=0) m.x523 = Var(within=Reals,bounds=(0,1),initialize=0) m.x524 = Var(within=Reals,bounds=(0,1),initialize=0) m.x525 = Var(within=Reals,bounds=(0,1),initialize=0) m.x526 = Var(within=Reals,bounds=(0,1),initialize=0) m.x527 = Var(within=Reals,bounds=(0,1),initialize=0) m.x528 = Var(within=Reals,bounds=(0,1),initialize=0) m.x529 = Var(within=Reals,bounds=(0,1),initialize=0) m.x530 = Var(within=Reals,bounds=(0,1),initialize=0) m.x531 = Var(within=Reals,bounds=(0,1),initialize=0) m.x532 = Var(within=Reals,bounds=(0,1),initialize=0) m.x533 = Var(within=Reals,bounds=(0,1),initialize=0) m.x534 = Var(within=Reals,bounds=(0,1),initialize=0) m.x535 = Var(within=Reals,bounds=(0,1),initialize=0) m.x536 = Var(within=Reals,bounds=(0,1),initialize=0) m.x537 = Var(within=Reals,bounds=(0,1),initialize=0) m.x538 = Var(within=Reals,bounds=(0,1),initialize=0) m.x539 = Var(within=Reals,bounds=(0,1),initialize=0) m.x540 = Var(within=Reals,bounds=(0,1),initialize=0) m.x541 = Var(within=Reals,bounds=(0,1),initialize=0) m.x542 = Var(within=Reals,bounds=(0,1),initialize=0) m.x543 = Var(within=Reals,bounds=(0,1),initialize=0) m.x544 = Var(within=Reals,bounds=(0,1),initialize=0) m.x545 = Var(within=Reals,bounds=(0,1),initialize=0) m.x546 = Var(within=Reals,bounds=(0,1),initialize=0) m.x547 = Var(within=Reals,bounds=(0,1),initialize=0) m.x548 = Var(within=Reals,bounds=(0,1),initialize=0) m.x549 = Var(within=Reals,bounds=(0,1),initialize=0) m.x550 = Var(within=Reals,bounds=(0,1),initialize=0) m.x551 = Var(within=Reals,bounds=(0,1),initialize=0) m.x552 = Var(within=Reals,bounds=(0,1),initialize=0) m.x553 = Var(within=Reals,bounds=(0,1),initialize=0) m.x554 = Var(within=Reals,bounds=(0,1),initialize=0) m.x555 = Var(within=Reals,bounds=(0,1),initialize=0) m.x556 = Var(within=Reals,bounds=(0,1),initialize=0) m.x557 = Var(within=Reals,bounds=(0,1),initialize=0) m.x558 = Var(within=Reals,bounds=(0,1),initialize=0) m.x559 = Var(within=Reals,bounds=(0,1),initialize=0) m.x560 = Var(within=Reals,bounds=(0,1),initialize=0) m.x561 = Var(within=Reals,bounds=(0,1),initialize=0) m.x562 = Var(within=Reals,bounds=(0,1),initialize=0) m.x563 = Var(within=Reals,bounds=(0,1),initialize=0) m.x564 = Var(within=Reals,bounds=(0,1),initialize=0) m.x565 = Var(within=Reals,bounds=(0,1),initialize=0) m.x566 = Var(within=Reals,bounds=(0,1),initialize=0) m.x567 = Var(within=Reals,bounds=(0,1),initialize=0) m.x568 = Var(within=Reals,bounds=(0,1),initialize=0) m.x569 = Var(within=Reals,bounds=(0,1),initialize=0) m.x570 = Var(within=Reals,bounds=(0,1),initialize=0) m.x571 = Var(within=Reals,bounds=(0,1),initialize=0) m.x572 = Var(within=Reals,bounds=(0,1),initialize=0) m.x573 = Var(within=Reals,bounds=(0,1),initialize=0) m.x574 = Var(within=Reals,bounds=(0,1),initialize=0) m.x575 = Var(within=Reals,bounds=(0,1),initialize=0) m.x576 = Var(within=Reals,bounds=(0,1),initialize=0) m.x577 = Var(within=Reals,bounds=(0,1),initialize=0) m.x578 = Var(within=Reals,bounds=(0,1),initialize=0) m.x579 = Var(within=Reals,bounds=(0,1),initialize=0) m.x580 = Var(within=Reals,bounds=(0,1),initialize=0) m.x581 = Var(within=Reals,bounds=(0,1),initialize=0) m.x582 = Var(within=Reals,bounds=(0,1),initialize=0) m.x583 = Var(within=Reals,bounds=(0,1),initialize=0) m.x584 = Var(within=Reals,bounds=(0,1),initialize=0) m.x585 = Var(within=Reals,bounds=(0,1),initialize=0) m.x586 = Var(within=Reals,bounds=(0,1),initialize=0) m.x587 = Var(within=Reals,bounds=(0,1),initialize=0) m.x588 = Var(within=Reals,bounds=(0,1),initialize=0) m.x589 = Var(within=Reals,bounds=(0,1),initialize=0) m.x590 = Var(within=Reals,bounds=(0,1),initialize=0) m.x591 = Var(within=Reals,bounds=(0,1),initialize=0) m.x592 = Var(within=Reals,bounds=(0,1),initialize=0) m.x593 = Var(within=Reals,bounds=(0,1),initialize=0) m.x594 = Var(within=Reals,bounds=(0,1),initialize=0) m.x595 = Var(within=Reals,bounds=(0,1),initialize=0) m.x596 = Var(within=Reals,bounds=(0,1),initialize=0) m.x597 = Var(within=Reals,bounds=(0,1),initialize=0) m.x598 = Var(within=Reals,bounds=(0,1),initialize=0) m.x599 = Var(within=Reals,bounds=(0,1),initialize=0) m.x600 = Var(within=Reals,bounds=(0,1),initialize=0) m.x601 = Var(within=Reals,bounds=(0,1),initialize=0) m.x602 = Var(within=Reals,bounds=(0,1),initialize=0) m.x603 = Var(within=Reals,bounds=(0,1),initialize=0) m.x604 = Var(within=Reals,bounds=(0,1),initialize=0) m.x605 = Var(within=Reals,bounds=(0,1),initialize=0) m.x606 = Var(within=Reals,bounds=(0,1),initialize=0) m.x607 = Var(within=Reals,bounds=(0,1),initialize=0) m.x608 = Var(within=Reals,bounds=(0,1),initialize=0) m.x609 = Var(within=Reals,bounds=(0,1),initialize=0) m.x610 = Var(within=Reals,bounds=(0,1),initialize=0) m.x611 = Var(within=Reals,bounds=(0,1),initialize=0) m.x612 = Var(within=Reals,bounds=(0,1),initialize=0) m.x613 = Var(within=Reals,bounds=(0,1),initialize=0) m.x614 = Var(within=Reals,bounds=(0,1),initialize=0) m.x615 = Var(within=Reals,bounds=(0,1),initialize=0) m.x616 = Var(within=Reals,bounds=(0,1),initialize=0) m.x617 = Var(within=Reals,bounds=(0,1),initialize=0) m.x618 = Var(within=Reals,bounds=(0,1),initialize=0) m.x619 = Var(within=Reals,bounds=(0,1),initialize=0) m.x620 = Var(within=Reals,bounds=(0,1),initialize=0) m.x621 = Var(within=Reals,bounds=(0,1),initialize=0) m.x622 = Var(within=Reals,bounds=(0,1),initialize=0) m.x623 = Var(within=Reals,bounds=(0,1),initialize=0) m.x624 = Var(within=Reals,bounds=(0,1),initialize=0) m.x625 = Var(within=Reals,bounds=(0,1),initialize=0) m.x626 = Var(within=Reals,bounds=(0,1),initialize=0) m.x627 = Var(within=Reals,bounds=(0,1),initialize=0) m.x628 = Var(within=Reals,bounds=(0,1),initialize=0) m.x629 = Var(within=Reals,bounds=(0,1),initialize=0) m.x630 = Var(within=Reals,bounds=(0,1),initialize=0) m.x631 = Var(within=Reals,bounds=(0,1),initialize=0) m.x632 = Var(within=Reals,bounds=(0,1),initialize=0) m.x633 = Var(within=Reals,bounds=(0,1),initialize=0) m.x634 = Var(within=Reals,bounds=(0,1),initialize=0) m.x635 = Var(within=Reals,bounds=(0,1),initialize=0) m.x636 = Var(within=Reals,bounds=(0,1),initialize=0) m.x637 = Var(within=Reals,bounds=(0,1),initialize=0) m.x638 = Var(within=Reals,bounds=(0,1),initialize=0) m.x639 = Var(within=Reals,bounds=(0,1),initialize=0) m.x640 = Var(within=Reals,bounds=(0,1),initialize=0) m.x641 = Var(within=Reals,bounds=(0,1),initialize=0) m.x642 = Var(within=Reals,bounds=(0,16),initialize=0) m.x643 = Var(within=Reals,bounds=(0,16),initialize=0) m.x644 = Var(within=Reals,bounds=(0,16),initialize=0) m.x645 = Var(within=Reals,bounds=(0,16),initialize=0) m.x646 = Var(within=Reals,bounds=(0,16),initialize=0) m.x647 = Var(within=Reals,bounds=(0,16),initialize=0) m.x648 = Var(within=Reals,bounds=(0,16),initialize=0) m.x649 = Var(within=Reals,bounds=(0,16),initialize=0) m.x650 = Var(within=Reals,bounds=(0,16),initialize=0) m.x651 = Var(within=Reals,bounds=(0,16),initialize=0) m.x652 = Var(within=Reals,bounds=(0,16),initialize=0) m.x653 = Var(within=Reals,bounds=(0,16),initialize=0) m.x654 = Var(within=Reals,bounds=(0,16),initialize=0) m.x655 = Var(within=Reals,bounds=(0,16),initialize=0) m.x656 = Var(within=Reals,bounds=(0,16),initialize=0) m.x657 = Var(within=Reals,bounds=(0,16),initialize=0) m.x658 = Var(within=Reals,bounds=(0,16),initialize=0) m.x659 = Var(within=Reals,bounds=(0,16),initialize=0) m.x660 = Var(within=Reals,bounds=(0,16),initialize=0) m.x661 = Var(within=Reals,bounds=(0,16),initialize=0) m.x662 = Var(within=Reals,bounds=(0,16),initialize=0) m.x663 = Var(within=Reals,bounds=(0,16),initialize=0) m.x664 = Var(within=Reals,bounds=(0,16),initialize=0) m.x665 = Var(within=Reals,bounds=(0,16),initialize=0) m.x666 = Var(within=Reals,bounds=(0,16),initialize=0) m.x667 = Var(within=Reals,bounds=(0,16),initialize=0) m.x668 = Var(within=Reals,bounds=(0,16),initialize=0) m.x669 = Var(within=Reals,bounds=(0,16),initialize=0) m.x670 = Var(within=Reals,bounds=(0,16),initialize=0) m.x671 = Var(within=Reals,bounds=(0,16),initialize=0) m.x672 = Var(within=Reals,bounds=(0,16),initialize=0) m.x673 = Var(within=Reals,bounds=(0,16),initialize=0) m.obj = Objective(expr= - 10*m.x130 - 9.5*m.x131 - 9*m.x132 - 8.5*m.x133 - 8*m.x134 - 7.5*m.x135 - 7*m.x136 - 6.5*m.x137 - 6*m.x138 - 5.5*m.x139 - 5*m.x140 - 4.5*m.x141 - 4*m.x142 - 3.5*m.x143 - 3*m.x144 - 2.5*m.x145 - 9*m.x146 - 8.5*m.x147 - 8*m.x148 - 7.5*m.x149 - 7*m.x150 - 6.5*m.x151 - 6*m.x152 - 5.5*m.x153 - 5*m.x154 - 4.5*m.x155 - 4*m.x156 - 3.5*m.x157 - 3*m.x158 - 2.5*m.x159 - 2*m.x160 - 1.5*m.x161 - 9*m.x162 - 8.5*m.x163 - 8*m.x164 - 7.5*m.x165 - 7*m.x166 - 6.5*m.x167 - 6*m.x168 - 5.5*m.x169 - 5*m.x170 - 4.5*m.x171 - 4*m.x172 - 3.5*m.x173 - 3*m.x174 - 2.5*m.x175 - 2*m.x176 - 1.5*m.x177 - 8*m.x178 - 7.5*m.x179 - 7*m.x180 - 6.5*m.x181 - 6*m.x182 - 5.5*m.x183 - 5*m.x184 - 4.5*m.x185 - 4*m.x186 - 3.5*m.x187 - 3*m.x188 - 2.5*m.x189 - 2*m.x190 - 1.5*m.x191 - m.x192 - 0.5*m.x193 - 8*m.x194 - 7.5*m.x195 - 7*m.x196 - 6.5*m.x197 - 6*m.x198 - 5.5*m.x199 - 5*m.x200 - 4.5*m.x201 - 4*m.x202 - 3.5*m.x203 - 3*m.x204 - 2.5*m.x205 - 2*m.x206 - 1.5*m.x207 - m.x208 - 0.5*m.x209 - 8*m.x210 - 7.5*m.x211 - 7*m.x212 - 6.5*m.x213 - 6*m.x214 - 5.5*m.x215 - 5*m.x216 - 4.5*m.x217 - 4*m.x218 - 3.5*m.x219 - 3*m.x220 - 2.5*m.x221 - 2*m.x222 - 1.5*m.x223 - m.x224 - 0.5*m.x225 - 7*m.x226 - 6.5*m.x227 - 6*m.x228 - 5.5*m.x229 - 5*m.x230 - 4.5*m.x231 - 4*m.x232 - 3.5*m.x233 - 3*m.x234 - 2.5*m.x235 - 2*m.x236 - 1.5*m.x237 - m.x238 - 0.5*m.x239 + 0.5*m.x241 - 7*m.x242 - 6.5*m.x243 - 6*m.x244 - 5.5*m.x245 - 5*m.x246 - 4.5*m.x247 - 4*m.x248 - 3.5*m.x249 - 3*m.x250 - 2.5*m.x251 - 2*m.x252 - 1.5*m.x253 - m.x254 - 0.5*m.x255 + 0.5*m.x257 - 7*m.x258 - 6.5*m.x259 - 6*m.x260 - 5.5*m.x261 - 5*m.x262 - 4.5*m.x263 - 4*m.x264 - 3.5*m.x265 - 3*m.x266 - 2.5*m.x267 - 2*m.x268 - 1.5*m.x269 - m.x270 - 0.5*m.x271 + 0.5*m.x273 - 7*m.x274 - 6.5*m.x275 - 6*m.x276 - 5.5*m.x277 - 5*m.x278 - 4.5*m.x279 - 4*m.x280 - 3.5*m.x281 - 3*m.x282 - 2.5*m.x283 - 2*m.x284 - 1.5*m.x285 - m.x286 - 0.5*m.x287 + 0.5*m.x289 - 6*m.x290 - 5.5*m.x291 - 5*m.x292 - 4.5*m.x293 - 4*m.x294 - 3.5*m.x295 - 3*m.x296 - 2.5*m.x297 - 2*m.x298 - 1.5*m.x299 - m.x300 - 0.5*m.x301 + 0.5*m.x303 + m.x304 + 1.5*m.x305 - 6*m.x306 - 5.5*m.x307 - 5*m.x308 - 4.5*m.x309 - 4*m.x310 - 3.5*m.x311 - 3*m.x312 - 2.5*m.x313 - 2*m.x314 - 1.5*m.x315 - m.x316 - 0.5*m.x317 + 0.5*m.x319 + m.x320 + 1.5*m.x321 - 6*m.x322 - 5.5*m.x323 - 5*m.x324 - 4.5*m.x325 - 4*m.x326 - 3.5*m.x327 - 3*m.x328 - 2.5*m.x329 - 2*m.x330 - 1.5*m.x331 - m.x332 - 0.5*m.x333 + 0.5*m.x335 + m.x336 + 1.5*m.x337 - 6*m.x338 - 5.5*m.x339 - 5*m.x340 - 4.5*m.x341 - 4*m.x342 - 3.5*m.x343 - 3*m.x344 - 2.5*m.x345 - 2*m.x346 - 1.5*m.x347 - m.x348 - 0.5*m.x349 + 0.5*m.x351 + m.x352 + 1.5*m.x353 - 5*m.x354 - 4.5*m.x355 - 4*m.x356 - 3.5*m.x357 - 3*m.x358 - 2.5*m.x359 - 2*m.x360 - 1.5*m.x361 - m.x362 - 0.5*m.x363 + 0.5*m.x365 + m.x366 + 1.5*m.x367 + 2*m.x368 + 2.5*m.x369 - 5*m.x370 - 4.5*m.x371 - 4*m.x372 - 3.5*m.x373 - 3*m.x374 - 2.5*m.x375 - 2*m.x376 - 1.5*m.x377 - m.x378 - 0.5*m.x379 + 0.5*m.x381 + m.x382 + 1.5*m.x383 + 2*m.x384 + 2.5*m.x385 - 5*m.x386 - 4.5*m.x387 - 4*m.x388 - 3.5*m.x389 - 3*m.x390 - 2.5*m.x391 - 2*m.x392 - 1.5*m.x393 - m.x394 - 0.5*m.x395 + 0.5*m.x397 + m.x398 + 1.5*m.x399 + 2*m.x400 + 2.5*m.x401 - 5*m.x402 - 4.5*m.x403 - 4*m.x404 - 3.5*m.x405 - 3*m.x406 - 2.5*m.x407 - 2*m.x408 - 1.5*m.x409 - m.x410 - 0.5*m.x411 + 0.5*m.x413 + m.x414 + 1.5*m.x415 + 2*m.x416 + 2.5*m.x417 - 4*m.x418 - 3.5*m.x419 - 3*m.x420 - 2.5*m.x421 - 2*m.x422 - 1.5*m.x423 - m.x424 - 0.5*m.x425 + 0.5*m.x427 + m.x428 + 1.5*m.x429 + 2*m.x430 + 2.5*m.x431 + 3*m.x432 + 3.5*m.x433 - 4*m.x434 - 3.5*m.x435 - 3*m.x436 - 2.5*m.x437 - 2*m.x438 - 1.5*m.x439 - m.x440 - 0.5*m.x441 + 0.5*m.x443 + m.x444 + 1.5*m.x445 + 2*m.x446 + 2.5*m.x447 + 3*m.x448 + 3.5*m.x449 - 4*m.x450 - 3.5*m.x451 - 3*m.x452 - 2.5*m.x453 - 2*m.x454 - 1.5*m.x455 - m.x456 - 0.5*m.x457 + 0.5*m.x459 + m.x460 + 1.5*m.x461 + 2*m.x462 + 2.5*m.x463 + 3*m.x464 + 3.5*m.x465 - 4*m.x466 - 3.5*m.x467 - 3*m.x468 - 2.5*m.x469 - 2*m.x470 - 1.5*m.x471 - m.x472 - 0.5*m.x473 + 0.5*m.x475 + m.x476 + 1.5*m.x477 + 2*m.x478 + 2.5*m.x479 + 3*m.x480 + 3.5*m.x481 - 3*m.x482 - 2.5*m.x483 - 2*m.x484 - 1.5*m.x485 - m.x486 - 0.5*m.x487 + 0.5*m.x489 + m.x490 + 1.5*m.x491 + 2*m.x492 + 2.5*m.x493 + 3*m.x494 + 3.5*m.x495 + 4*m.x496 + 4.5*m.x497 - 3*m.x498 - 2.5*m.x499 - 2*m.x500 - 1.5*m.x501 - m.x502 - 0.5*m.x503 + 0.5*m.x505 + m.x506 + 1.5*m.x507 + 2*m.x508 + 2.5*m.x509 + 3*m.x510 + 3.5*m.x511 + 4*m.x512 + 4.5*m.x513 - 3*m.x514 - 2.5*m.x515 - 2*m.x516 - 1.5*m.x517 - m.x518 - 0.5*m.x519 + 0.5*m.x521 + m.x522 + 1.5*m.x523 + 2*m.x524 + 2.5*m.x525 + 3*m.x526 + 3.5*m.x527 + 4*m.x528 + 4.5*m.x529 - 3*m.x530 - 2.5*m.x531 - 2*m.x532 - 1.5*m.x533 - m.x534 - 0.5*m.x535 + 0.5*m.x537 + m.x538 + 1.5*m.x539 + 2*m.x540 + 2.5*m.x541 + 3*m.x542 + 3.5*m.x543 + 4*m.x544 + 4.5*m.x545 - 2*m.x546 - 1.5*m.x547 - m.x548 - 0.5*m.x549 + 0.5*m.x551 + m.x552 + 1.5*m.x553 + 2*m.x554 + 2.5*m.x555 + 3*m.x556 + 3.5*m.x557 + 4*m.x558 + 4.5*m.x559 + 5*m.x560 + 5.5*m.x561 - 2*m.x562 - 1.5*m.x563 - m.x564 - 0.5*m.x565 + 0.5*m.x567 + m.x568 + 1.5*m.x569 + 2*m.x570 + 2.5*m.x571 + 3*m.x572 + 3.5*m.x573 + 4*m.x574 + 4.5*m.x575 + 5*m.x576 + 5.5*m.x577 - 2*m.x578 - 1.5*m.x579 - m.x580 - 0.5*m.x581 + 0.5*m.x583 + m.x584 + 1.5*m.x585 + 2*m.x586 + 2.5*m.x587 + 3*m.x588 + 3.5*m.x589 + 4*m.x590 + 4.5*m.x591 + 5*m.x592 + 5.5*m.x593 - m.x594 - 0.5*m.x595 + 0.5*m.x597 + m.x598 + 1.5*m.x599 + 2*m.x600 + 2.5*m.x601 + 3*m.x602 + 3.5*m.x603 + 4*m.x604 + 4.5*m.x605 + 5*m.x606 + 5.5*m.x607 + 6*m.x608 + 6.5*m.x609 - m.x610 - 0.5*m.x611 + 0.5*m.x613 + m.x614 + 1.5*m.x615 + 2*m.x616 + 2.5*m.x617 + 3*m.x618 + 3.5*m.x619 + 4*m.x620 + 4.5*m.x621 + 5*m.x622 + 5.5*m.x623 + 6*m.x624 + 6.5*m.x625 + 0.5*m.x627 + m.x628 + 1.5*m.x629 + 2*m.x630 + 2.5*m.x631 + 3*m.x632 + 3.5*m.x633 + 4*m.x634 + 4.5*m.x635 + 5*m.x636 + 5.5*m.x637 + 6*m.x638 + 6.5*m.x639 + 7*m.x640 + 7.5*m.x641 , sense=minimize) m.c2 = Constraint(expr= m.x130 + m.x131 + m.x132 + m.x133 + m.x134 + m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140 + m.x141 + m.x142 + m.x143 + m.x144 + m.x145 <= 16) m.c3 = Constraint(expr= m.x146 + m.x147 + m.x148 + m.x149 + m.x150 + m.x151 + m.x152 + m.x153 + m.x154 + m.x155 + m.x156 + m.x157 + m.x158 + m.x159 + m.x160 + m.x161 + m.x162 + m.x163 + m.x164 + m.x165 + m.x166 + m.x167 + m.x168 + m.x169 + m.x170 + m.x171 + m.x172 + m.x173 + m.x174 + m.x175 + m.x176 + m.x177 <= 16) m.c4 = Constraint(expr= m.x178 + m.x179 + m.x180 + m.x181 + m.x182 + m.x183 + m.x184 + m.x185 + m.x186 + m.x187 + m.x188 + m.x189 + m.x190 + m.x191 + m.x192 + m.x193 + m.x194 + m.x195 + m.x196 + m.x197 + m.x198 + m.x199 + m.x200 + m.x201 + m.x202 + m.x203 + m.x204 + m.x205 + m.x206 + m.x207 + m.x208 + m.x209 + m.x210 + m.x211 + m.x212 + m.x213 + m.x214 + m.x215 + m.x216 + m.x217 + m.x218 + m.x219 + m.x220 + m.x221 + m.x222 + m.x223 + m.x224 + m.x225 <= 16) m.c5 = Constraint(expr= m.x226 + m.x227 + m.x228 + m.x229 + m.x230 + m.x231 + m.x232 + m.x233 + m.x234 + m.x235 + m.x236 + m.x237 + m.x238 + m.x239 + m.x240 + m.x241 + m.x242 + m.x243 + m.x244 + m.x245 + m.x246 + m.x247 + m.x248 + m.x249 + m.x250 + m.x251 + m.x252 + m.x253 + m.x254 + m.x255 + m.x256 + m.x257 + m.x258 + m.x259 + m.x260 + m.x261 + m.x262 + m.x263 + m.x264 + m.x265 + m.x266 + m.x267 + m.x268 + m.x269 + m.x270 + m.x271 + m.x272 + m.x273 + m.x274 + m.x275 + m.x276 + m.x277 + m.x278 + m.x279 + m.x280 + m.x281 + m.x282 + m.x283 + m.x284 + m.x285 + m.x286 + m.x287 + m.x288 + m.x289 <= 16) m.c6 = Constraint(expr= m.x290 + m.x291 + m.x292 + m.x293 + m.x294 + m.x295 + m.x296 + m.x297 + m.x298 + m.x299 + m.x300 + m.x301 + m.x302 + m.x303 + m.x304 + m.x305 + m.x306 + m.x307 + m.x308 + m.x309 + m.x310 + m.x311 + m.x312 + m.x313 + m.x314 + m.x315 + m.x316 + m.x317 + m.x318 + m.x319 + m.x320 + m.x321 + m.x322 + m.x323 + m.x324 + m.x325 + m.x326 + m.x327 + m.x328 + m.x329 + m.x330 + m.x331 + m.x332 + m.x333 + m.x334 + m.x335 + m.x336 + m.x337 + m.x338 + m.x339 + m.x340 + m.x341 + m.x342 + m.x343 + m.x344 + m.x345 + m.x346 + m.x347 + m.x348 + m.x349 + m.x350 + m.x351 + m.x352 + m.x353 <= 16) m.c7 = Constraint(expr= m.x354 + m.x355 + m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361 + m.x362 + m.x363 + m.x364 + m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x370 + m.x371 + m.x372 + m.x373 + m.x374 + m.x375 + m.x376 + m.x377 + m.x378 + m.x379 + m.x380 + m.x381 + m.x382 + m.x383 + m.x384 + m.x385 + m.x386 + m.x387 + m.x388 + m.x389 + m.x390 + m.x391 + m.x392 + m.x393 + m.x394 + m.x395 + m.x396 + m.x397 + m.x398 + m.x399 + m.x400 + m.x401 + m.x402 + m.x403 + m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x412 + m.x413 + m.x414 + m.x415 + m.x416 + m.x417 <= 16) m.c8 = Constraint(expr= m.x418 + m.x419 + m.x420 + m.x421 + m.x422 + m.x423 + m.x424 + m.x425 + m.x426 + m.x427 + m.x428 + m.x429 + m.x430 + m.x431 + m.x432 + m.x433 + m.x434 + m.x435 + m.x436 + m.x437 + m.x438 + m.x439 + m.x440 + m.x441 + m.x442 + m.x443 + m.x444 + m.x445 + m.x446 + m.x447 + m.x448 + m.x449 + m.x450 + m.x451 + m.x452 + m.x453 + m.x454 + m.x455 + m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465 + m.x466 + m.x467 + m.x468 + m.x469 + m.x470 + m.x471 + m.x472 + m.x473 + m.x474 + m.x475 + m.x476 + m.x477 + m.x478 + m.x479 + m.x480 + m.x481 <= 16) m.c9 = Constraint(expr= m.x482 + m.x483 + m.x484 + m.x485 + m.x486 + m.x487 + m.x488 + m.x489 + m.x490 + m.x491 + m.x492 + m.x493 + m.x494 + m.x495 + m.x496 + m.x497 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503 + m.x504 + m.x505 + m.x506 + m.x507 + m.x508 + m.x509 + m.x510 + m.x511 + m.x512 + m.x513 + m.x514 + m.x515 + m.x516 + m.x517 + m.x518 + m.x519 + m.x520 + m.x521 + m.x522 + m.x523 + m.x524 + m.x525 + m.x526 + m.x527 + m.x528 + m.x529 + m.x530 + m.x531 + m.x532 + m.x533 + m.x534 + m.x535 + m.x536 + m.x537 + m.x538 + m.x539 + m.x540 + m.x541 + m.x542 + m.x543 + m.x544 + m.x545 <= 16) m.c10 = Constraint(expr= m.x546 + m.x547 + m.x548 + m.x549 + m.x550 + m.x551 + m.x552 + m.x553 + m.x554 + m.x555 + m.x556 + m.x557 + m.x558 + m.x559 + m.x560 + m.x561 + m.x562 + m.x563 + m.x564 + m.x565 + m.x566 + m.x567 + m.x568 + m.x569 + m.x570 + m.x571 + m.x572 + m.x573 + m.x574 + m.x575 + m.x576 + m.x577 + m.x578 + m.x579 + m.x580 + m.x581 + m.x582 + m.x583 + m.x584 + m.x585 + m.x586 + m.x587 + m.x588 + m.x589 + m.x590 + m.x591 + m.x592 + m.x593 <= 16) m.c11 = Constraint(expr= m.x594 + m.x595 + m.x596 + m.x597 + m.x598 + m.x599 + m.x600 + m.x601 + m.x602 + m.x603 + m.x604 + m.x605 + m.x606 + m.x607 + m.x608 + m.x609 + m.x610 + m.x611 + m.x612 + m.x613 + m.x614 + m.x615 + m.x616 + m.x617 + m.x618 + m.x619 + m.x620 + m.x621 + m.x622 + m.x623 + m.x624 + m.x625 <= 16) m.c12 = Constraint(expr= m.x626 + m.x627 + m.x628 + m.x629 + m.x630 + m.x631 + m.x632 + m.x633 + m.x634 + m.x635 + m.x636 + m.x637 + m.x638 + m.x639 + m.x640 + m.x641 <= 16) m.c13 = Constraint(expr= m.x482 + m.x483 + m.x484 + m.x485 + m.x486 + m.x487 + m.x488 + m.x489 + m.x490 + m.x491 + m.x492 + m.x493 + m.x494 + m.x495 + m.x496 + m.x497 + m.x546 + m.x547 + m.x548 + m.x549 + m.x550 + m.x551 + m.x552 + m.x553 + m.x554 + m.x555 + m.x556 + m.x557 + m.x558 + m.x559 + m.x560 + m.x561 + m.x594 + m.x595 + m.x596 + m.x597 + m.x598 + m.x599 + m.x600 + m.x601 + m.x602 + m.x603 + m.x604 + m.x605 + m.x606 + m.x607 + m.x608 + m.x609 + m.x626 + m.x627 + m.x628 + m.x629 + m.x630 + m.x631 + m.x632 + m.x633 + m.x634 + m.x635 + m.x636 + m.x637 + m.x638 + m.x639 + m.x640 + m.x641 <= 16) m.c14 = Constraint(expr= m.x418 + m.x419 + m.x420 + m.x421 + m.x422 + m.x423 + m.x424 + m.x425 + m.x426 + m.x427 + m.x428 + m.x429 + m.x430 + m.x431 + m.x432 + m.x433 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503 + m.x504 + m.x505 + m.x506 + m.x507 + m.x508 + m.x509 + m.x510 + m.x511 + m.x512 + m.x513 + m.x562 + m.x563 + m.x564 + m.x565 + m.x566 + m.x567 + m.x568 + m.x569 + m.x570 + m.x571 + m.x572 + m.x573 + m.x574 + m.x575 + m.x576 + m.x577 + m.x610 + m.x611 + m.x612 + m.x613 + m.x614 + m.x615 + m.x616 + m.x617 + m.x618 + m.x619 + m.x620 + m.x621 + m.x622 + m.x623 + m.x624 + m.x625 <= 16) m.c15 = Constraint(expr= m.x354 + m.x355 + m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361 + m.x362 + m.x363 + m.x364 + m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x434 + m.x435 + m.x436 + m.x437 + m.x438 + m.x439 + m.x440 + m.x441 + m.x442 + m.x443 + m.x444 + m.x445 + m.x446 + m.x447 + m.x448 + m.x449 + m.x514 + m.x515 + m.x516 + m.x517 + m.x518 + m.x519 + m.x520 + m.x521 + m.x522 + m.x523 + m.x524 + m.x525 + m.x526 + m.x527 + m.x528 + m.x529 + m.x578 + m.x579 + m.x580 + m.x581 + m.x582 + m.x583 + m.x584 + m.x585 + m.x586 + m.x587 + m.x588 + m.x589 + m.x590 + m.x591 + m.x592 + m.x593 <= 16) m.c16 = Constraint(expr= m.x290 + m.x291 + m.x292 + m.x293 + m.x294 + m.x295 + m.x296 + m.x297 + m.x298 + m.x299 + m.x300 + m.x301 + m.x302 + m.x303 + m.x304 + m.x305 + m.x370 + m.x371 + m.x372 + m.x373 + m.x374 + m.x375 + m.x376 + m.x377 + m.x378 + m.x379 + m.x380 + m.x381 + m.x382 + m.x383 + m.x384 + m.x385 + m.x450 + m.x451 + m.x452 + m.x453 + m.x454 + m.x455 + m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465 + m.x530 + m.x531 + m.x532 + m.x533 + m.x534 + m.x535 + m.x536 + m.x537 + m.x538 + m.x539 + m.x540 + m.x541 + m.x542 + m.x543 + m.x544 + m.x545 <= 16) m.c17 = Constraint(expr= m.x226 + m.x227 + m.x228 + m.x229 + m.x230 + m.x231 + m.x232 + m.x233 + m.x234 + m.x235 + m.x236 + m.x237 + m.x238 + m.x239 + m.x240 + m.x241 + m.x306 + m.x307 + m.x308 + m.x309 + m.x310 + m.x311 + m.x312 + m.x313 + m.x314 + m.x315 + m.x316 + m.x317 + m.x318 + m.x319 + m.x320 + m.x321 + m.x386 + m.x387 + m.x388 + m.x389 + m.x390 + m.x391 + m.x392 + m.x393 + m.x394 + m.x395 + m.x396 + m.x397 + m.x398 + m.x399 + m.x400 + m.x401 + m.x466 + m.x467 + m.x468 + m.x469 + m.x470 + m.x471 + m.x472 + m.x473 + m.x474 + m.x475 + m.x476 + m.x477 + m.x478 + m.x479 + m.x480 + m.x481 <= 16) m.c18 = Constraint(expr= m.x178 + m.x179 + m.x180 + m.x181 + m.x182 + m.x183 + m.x184 + m.x185 + m.x186 + m.x187 + m.x188 + m.x189 + m.x190 + m.x191 + m.x192 + m.x193 + m.x242 + m.x243 + m.x244 + m.x245 + m.x246 + m.x247 + m.x248 + m.x249 + m.x250 + m.x251 + m.x252 + m.x253 + m.x254 + m.x255 + m.x256 + m.x257 + m.x322 + m.x323 + m.x324 + m.x325 + m.x326 + m.x327 + m.x328 + m.x329 + m.x330 + m.x331 + m.x332 + m.x333 + m.x334 + m.x335 + m.x336 + m.x337 + m.x402 + m.x403 + m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x412 + m.x413 + m.x414 + m.x415 + m.x416 + m.x417 <= 16) m.c19 = Constraint(expr= m.x146 + m.x147 + m.x148 + m.x149 + m.x150 + m.x151 + m.x152 + m.x153 + m.x154 + m.x155 + m.x156 + m.x157 + m.x158 + m.x159 + m.x160 + m.x161 + m.x194 + m.x195 + m.x196 + m.x197 + m.x198 + m.x199 + m.x200 + m.x201 + m.x202 + m.x203 + m.x204 + m.x205 + m.x206 + m.x207 + m.x208 + m.x209 + m.x258 + m.x259 + m.x260 + m.x261 + m.x262 + m.x263 + m.x264 + m.x265 + m.x266 + m.x267 + m.x268 + m.x269 + m.x270 + m.x271 + m.x272 + m.x273 + m.x338 + m.x339 + m.x340 + m.x341 + m.x342 + m.x343 + m.x344 + m.x345 + m.x346 + m.x347 + m.x348 + m.x349 + m.x350 + m.x351 + m.x352 + m.x353 <= 16) m.c20 = Constraint(expr= m.x130 + m.x131 + m.x132 + m.x133 + m.x134 + m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140 + m.x141 + m.x142 + m.x143 + m.x144 + m.x145 + m.x162 + m.x163 + m.x164 + m.x165 + m.x166 + m.x167 + m.x168 + m.x169 + m.x170 + m.x171 + m.x172 + m.x173 + m.x174 + m.x175 + m.x176 + m.x177 + m.x210 + m.x211 + m.x212 + m.x213 + m.x214 + m.x215 + m.x216 + m.x217 + m.x218 + m.x219 + m.x220 + m.x221 + m.x222 + m.x223 + m.x224 + m.x225 + m.x274 + m.x275 + m.x276 + m.x277 + m.x278 + m.x279 + m.x280 + m.x281 + m.x282 + m.x283 + m.x284 + m.x285 + m.x286 + m.x287 + m.x288 + m.x289 <= 16) m.c21 = Constraint(expr= m.x130 + m.x146 + m.x162 + m.x178 + m.x194 + m.x210 + m.x226 + m.x242 + m.x258 + m.x274 + m.x290 + m.x306 + m.x322 + m.x338 + m.x354 + m.x370 + m.x386 + m.x402 + m.x418 + m.x434 + m.x450 + m.x466 + m.x482 + m.x498 + m.x514 + m.x530 + m.x546 + m.x562 + m.x578 + m.x594 + m.x610 + m.x626 <= 1) m.c22 = Constraint(expr= m.x131 + m.x147 + m.x163 + m.x179 + m.x195 + m.x211 + m.x227 + m.x243 + m.x259 + m.x275 + m.x291 + m.x307 + m.x323 + m.x339 + m.x355 + m.x371 + m.x387 + m.x403 + m.x419 + m.x435 + m.x451 + m.x467 + m.x483 + m.x499 + m.x515 + m.x531 + m.x547 + m.x563 + m.x579 + m.x595 + m.x611 + m.x627 <= 1) m.c23 = Constraint(expr= m.x132 + m.x148 + m.x164 + m.x180 + m.x196 + m.x212 + m.x228 + m.x244 + m.x260 + m.x276 + m.x292 + m.x308 + m.x324 + m.x340 + m.x356 + m.x372 + m.x388 + m.x404 + m.x420 + m.x436 + m.x452 + m.x468 + m.x484 + m.x500 + m.x516 + m.x532 + m.x548 + m.x564 + m.x580 + m.x596 + m.x612 + m.x628 <= 1) m.c24 = Constraint(expr= m.x133 + m.x149 + m.x165 + m.x181 + m.x197 + m.x213 + m.x229 + m.x245 + m.x261 + m.x277 + m.x293 + m.x309 + m.x325 + m.x341 + m.x357 + m.x373 + m.x389 + m.x405 + m.x421 + m.x437 + m.x453 + m.x469 + m.x485 + m.x501 + m.x517 + m.x533 + m.x549 + m.x565 + m.x581 + m.x597 + m.x613 + m.x629 <= 1) m.c25 = Constraint(expr= m.x134 + m.x150 + m.x166 + m.x182 + m.x198 + m.x214 + m.x230 + m.x246 + m.x262 + m.x278 + m.x294 + m.x310 + m.x326 + m.x342 + m.x358 + m.x374 + m.x390 + m.x406 + m.x422 + m.x438 + m.x454 + m.x470 + m.x486 + m.x502 + m.x518 + m.x534 + m.x550 + m.x566 + m.x582 + m.x598 + m.x614 + m.x630 <= 1) m.c26 = Constraint(expr= m.x135 + m.x151 + m.x167 + m.x183 + m.x199 + m.x215 + m.x231 + m.x247 + m.x263 + m.x279 + m.x295 + m.x311 + m.x327 + m.x343 + m.x359 + m.x375 + m.x391 + m.x407 + m.x423 + m.x439 + m.x455 + m.x471 + m.x487 + m.x503 + m.x519 + m.x535 + m.x551 + m.x567 + m.x583 + m.x599 + m.x615 + m.x631 <= 1) m.c27 = Constraint(expr= m.x136 + m.x152 + m.x168 + m.x184 + m.x200 + m.x216 + m.x232 + m.x248 + m.x264 + m.x280 + m.x296 + m.x312 + m.x328 + m.x344 + m.x360 + m.x376 + m.x392 + m.x408 + m.x424 + m.x440 + m.x456 + m.x472 + m.x488 + m.x504 + m.x520 + m.x536 + m.x552 + m.x568 + m.x584 + m.x600 + m.x616 + m.x632 <= 1) m.c28 = Constraint(expr= m.x137 + m.x153 + m.x169 + m.x185 + m.x201 + m.x217 + m.x233 + m.x249 + m.x265 + m.x281 + m.x297 + m.x313 + m.x329 + m.x345 + m.x361 + m.x377 + m.x393 + m.x409 + m.x425 + m.x441 + m.x457 + m.x473 + m.x489 + m.x505 + m.x521 + m.x537 + m.x553 + m.x569 + m.x585 + m.x601 + m.x617 + m.x633 <= 1) m.c29 = Constraint(expr= m.x138 + m.x154 + m.x170 + m.x186 + m.x202 + m.x218 + m.x234 + m.x250 + m.x266 + m.x282 + m.x298 + m.x314 + m.x330 + m.x346 + m.x362 + m.x378 + m.x394 + m.x410 + m.x426 + m.x442 + m.x458 + m.x474 + m.x490 + m.x506 + m.x522 + m.x538 + m.x554 + m.x570 + m.x586 + m.x602 + m.x618 + m.x634 <= 1) m.c30 = Constraint(expr= m.x139 + m.x155 + m.x171 + m.x187 + m.x203 + m.x219 + m.x235 + m.x251 + m.x267 + m.x283 + m.x299 + m.x315 + m.x331 + m.x347 + m.x363 + m.x379 + m.x395 + m.x411 + m.x427 + m.x443 + m.x459 + m.x475 + m.x491 + m.x507 + m.x523 + m.x539 + m.x555 + m.x571 + m.x587 + m.x603 + m.x619 + m.x635 <= 1) m.c31 = Constraint(expr= m.x140 + m.x156 + m.x172 + m.x188 + m.x204 + m.x220 + m.x236 + m.x252 + m.x268 + m.x284 + m.x300 + m.x316 + m.x332 + m.x348 + m.x364 + m.x380 + m.x396 + m.x412 + m.x428 + m.x444 + m.x460 + m.x476 + m.x492 + m.x508 + m.x524 + m.x540 + m.x556 + m.x572 + m.x588 + m.x604 + m.x620 + m.x636 <= 1) m.c32 = Constraint(expr= m.x141 + m.x157 + m.x173 + m.x189 + m.x205 + m.x221 + m.x237 + m.x253 + m.x269 + m.x285 + m.x301 + m.x317 + m.x333 + m.x349 + m.x365 + m.x381 + m.x397 + m.x413 + m.x429 + m.x445 + m.x461 + m.x477 + m.x493 + m.x509 + m.x525 + m.x541 + m.x557 + m.x573 + m.x589 + m.x605 + m.x621 + m.x637 <= 1) m.c33 = Constraint(expr= m.x142 + m.x158 + m.x174 + m.x190 + m.x206 + m.x222 + m.x238 + m.x254 + m.x270 + m.x286 + m.x302 + m.x318 + m.x334 + m.x350 + m.x366 + m.x382 + m.x398 + m.x414 + m.x430 + m.x446 + m.x462 + m.x478 + m.x494 + m.x510 + m.x526 + m.x542 + m.x558 + m.x574 + m.x590 + m.x606 + m.x622 + m.x638 <= 1) m.c34 = Constraint(expr= m.x143 + m.x159 + m.x175 + m.x191 + m.x207 + m.x223 + m.x239 + m.x255 + m.x271 + m.x287 + m.x303 + m.x319 + m.x335 + m.x351 + m.x367 + m.x383 + m.x399 + m.x415 + m.x431 + m.x447 + m.x463 + m.x479 + m.x495 + m.x511 + m.x527 + m.x543 + m.x559 + m.x575 + m.x591 + m.x607 + m.x623 + m.x639 <= 1) m.c35 = Constraint(expr= m.x144 + m.x160 + m.x176 + m.x192 + m.x208 + m.x224 + m.x240 + m.x256 + m.x272 + m.x288 + m.x304 + m.x320 + m.x336 + m.x352 + m.x368 + m.x384 + m.x400 + m.x416 + m.x432 + m.x448 + m.x464 + m.x480 + m.x496 + m.x512 + m.x528 + m.x544 + m.x560 + m.x576 + m.x592 + m.x608 + m.x624 + m.x640 <= 1) m.c36 = Constraint(expr= m.x145 + m.x161 + m.x177 + m.x193 + m.x209 + m.x225 + m.x241 + m.x257 + m.x273 + m.x289 + m.x305 + m.x321 + m.x337 + m.x353 + m.x369 + m.x385 + m.x401 + m.x417 + m.x433 + m.x449 + m.x465 + m.x481 + m.x497 + m.x513 + m.x529 + m.x545 + m.x561 + m.x577 + m.x593 + m.x609 + m.x625 + m.x641 <= 1) m.c37 = Constraint(expr= 0.95*m.x130 + 0.85*m.x146 + 0.85*m.x162 + 0.75*m.x178 + 0.75*m.x194 + 0.75*m.x210 + 0.65*m.x226 + 0.65*m.x242 + 0.65*m.x258 + 0.65*m.x274 + 0.55*m.x290 + 0.55*m.x306 + 0.55*m.x322 + 0.55*m.x338 + 0.45*m.x354 + 0.45*m.x370 + 0.45*m.x386 + 0.45*m.x402 + 0.35*m.x418 + 0.35*m.x434 + 0.35*m.x450 + 0.35*m.x466 + 0.25*m.x482 + 0.25*m.x498 + 0.25*m.x514 + 0.25*m.x530 + 0.15*m.x546 + 0.15*m.x562 + 0.15*m.x578 + 0.05*m.x594 + 0.05*m.x610 - 0.05*m.x626 <= 0) m.c38 = Constraint(expr= 0.9*m.x131 + 0.8*m.x147 + 0.8*m.x163 + 0.7*m.x179 + 0.7*m.x195 + 0.7*m.x211 + 0.6*m.x227 + 0.6*m.x243 + 0.6*m.x259 + 0.6*m.x275 + 0.5*m.x291 + 0.5*m.x307 + 0.5*m.x323 + 0.5*m.x339 + 0.4*m.x355 + 0.4*m.x371 + 0.4*m.x387 + 0.4*m.x403 + 0.3*m.x419 + 0.3*m.x435 + 0.3*m.x451 + 0.3*m.x467 + 0.2*m.x483 + 0.2*m.x499 + 0.2*m.x515 + 0.2*m.x531 + 0.0999999999999999*m.x547 + 0.0999999999999999*m.x563 + 0.0999999999999999*m.x579 - 0.1*m.x627 <= 0) m.c39 = Constraint(expr= 0.85*m.x132 + 0.75*m.x148 + 0.75*m.x164 + 0.65*m.x180 + 0.65*m.x196 + 0.65*m.x212 + 0.55*m.x228 + 0.55*m.x244 + 0.55*m.x260 + 0.55*m.x276 + 0.45*m.x292 + 0.45*m.x308 + 0.45*m.x324 + 0.45*m.x340 + 0.35*m.x356 + 0.35*m.x372 + 0.35*m.x388 + 0.35*m.x404 + 0.25*m.x420 + 0.25*m.x436 + 0.25*m.x452 + 0.25*m.x468 + 0.15*m.x484 + 0.15*m.x500 + 0.15*m.x516 + 0.15*m.x532 + 0.05*m.x548 + 0.05*m.x564 + 0.05*m.x580 - 0.0499999999999998*m.x596 - 0.0499999999999998*m.x612 - 0.15*m.x628 <= 0) m.c40 = Constraint(expr= 0.8*m.x133 + 0.7*m.x149 + 0.7*m.x165 + 0.6*m.x181 + 0.6*m.x197 + 0.6*m.x213 + 0.5*m.x229 + 0.5*m.x245 + 0.5*m.x261 + 0.5*m.x277 + 0.4*m.x293 + 0.4*m.x309 + 0.4*m.x325 + 0.4*m.x341 + 0.3*m.x357 + 0.3*m.x373 + 0.3*m.x389 + 0.3*m.x405 + 0.2*m.x421 + 0.2*m.x437 + 0.2*m.x453 + 0.2*m.x469 + 0.1*m.x485 + 0.1*m.x501 + 0.1*m.x517 + 0.1*m.x533 - 0.0999999999999999*m.x597 - 0.0999999999999999*m.x613 - 0.2*m.x629 <= 0) m.c41 = Constraint(expr= 0.75*m.x134 + 0.65*m.x150 + 0.65*m.x166 + 0.55*m.x182 + 0.55*m.x198 + 0.55*m.x214 + 0.45*m.x230 + 0.45*m.x246 + 0.45*m.x262 + 0.45*m.x278 + 0.35*m.x294 + 0.35*m.x310 + 0.35*m.x326 + 0.35*m.x342 + 0.25*m.x358 + 0.25*m.x374 + 0.25*m.x390 + 0.25*m.x406 + 0.15*m.x422 + 0.15*m.x438 + 0.15*m.x454 + 0.15*m.x470 + 0.05*m.x486 + 0.05*m.x502 + 0.05*m.x518 + 0.05*m.x534 - 0.05*m.x550 - 0.05*m.x566 - 0.05*m.x582 - 0.15*m.x598 - 0.15*m.x614 - 0.25*m.x630 <= 0) m.c42 = Constraint(expr= 0.7*m.x135 + 0.6*m.x151 + 0.6*m.x167 + 0.5*m.x183 + 0.5*m.x199 + 0.5*m.x215 + 0.4*m.x231 + 0.4*m.x247 + 0.4*m.x263 + 0.4*m.x279 + 0.3*m.x295 + 0.3*m.x311 + 0.3*m.x327 + 0.3*m.x343 + 0.2*m.x359 + 0.2*m.x375 + 0.2*m.x391 + 0.2*m.x407 + 0.0999999999999999*m.x423 + 0.0999999999999999*m.x439 + 0.0999999999999999*m.x455 + 0.0999999999999999*m.x471 - 0.1*m.x551 - 0.1*m.x567 - 0.1*m.x583 - 0.2*m.x599 - 0.2*m.x615 - 0.3*m.x631 <= 0) m.c43 = Constraint(expr= 0.65*m.x136 + 0.55*m.x152 + 0.55*m.x168 + 0.45*m.x184 + 0.45*m.x200 + 0.45*m.x216 + 0.35*m.x232 + 0.35*m.x248 + 0.35*m.x264 + 0.35*m.x280 + 0.25*m.x296 + 0.25*m.x312 + 0.25*m.x328 + 0.25*m.x344 + 0.15*m.x360 + 0.15*m.x376 + 0.15*m.x392 + 0.15*m.x408 + 0.0499999999999998*m.x424 + 0.0499999999999998*m.x440 + 0.0499999999999998*m.x456 + 0.0499999999999998*m.x472 - 0.05*m.x488 - 0.05*m.x504 - 0.05*m.x520 - 0.05*m.x536 - 0.15*m.x552 - 0.15*m.x568 - 0.15*m.x584 - 0.25*m.x600 - 0.25*m.x616 - 0.35*m.x632 <= 0) m.c44 = Constraint(expr= 0.6*m.x137 + 0.5*m.x153 + 0.5*m.x169 + 0.4*m.x185 + 0.4*m.x201 + 0.4*m.x217 + 0.3*m.x233 + 0.3*m.x249 + 0.3*m.x265 + 0.3*m.x281 + 0.2*m.x297 + 0.2*m.x313 + 0.2*m.x329 + 0.2*m.x345 + 0.1*m.x361 + 0.1*m.x377 + 0.1*m.x393 + 0.1*m.x409 - 0.0999999999999999*m.x489 - 0.0999999999999999*m.x505 - 0.0999999999999999*m.x521 - 0.0999999999999999*m.x537 - 0.2*m.x553 - 0.2*m.x569 - 0.2*m.x585 - 0.3*m.x601 - 0.3*m.x617 - 0.4*m.x633 <= 0) m.c45 = Constraint(expr= 0.55*m.x138 + 0.45*m.x154 + 0.45*m.x170 + 0.35*m.x186 + 0.35*m.x202 + 0.35*m.x218 + 0.25*m.x234 + 0.25*m.x250 + 0.25*m.x266 + 0.25*m.x282 + 0.15*m.x298 + 0.15*m.x314 + 0.15*m.x330 + 0.15*m.x346 + 0.05*m.x362 + 0.05*m.x378 + 0.05*m.x394 + 0.05*m.x410 - 0.05*m.x426 - 0.05*m.x442 - 0.05*m.x458 - 0.05*m.x474 - 0.15*m.x490 - 0.15*m.x506 - 0.15*m.x522 - 0.15*m.x538 - 0.25*m.x554 - 0.25*m.x570 - 0.25*m.x586 - 0.35*m.x602 - 0.35*m.x618 - 0.45*m.x634 <= 0) m.c46 = Constraint(expr= 0.5*m.x139 + 0.4*m.x155 + 0.4*m.x171 + 0.3*m.x187 + 0.3*m.x203 + 0.3*m.x219 + 0.2*m.x235 + 0.2*m.x251 + 0.2*m.x267 + 0.2*m.x283 + 0.1*m.x299 + 0.1*m.x315 + 0.1*m.x331 + 0.1*m.x347 - 0.1*m.x427 - 0.1*m.x443 - 0.1*m.x459 - 0.1*m.x475 - 0.2*m.x491 - 0.2*m.x507 - 0.2*m.x523 - 0.2*m.x539 - 0.3*m.x555 - 0.3*m.x571 - 0.3*m.x587 - 0.4*m.x603 - 0.4*m.x619 - 0.5*m.x635 <= 0) m.c47 = Constraint(expr= 0.45*m.x140 + 0.35*m.x156 + 0.35*m.x172 + 0.25*m.x188 + 0.25*m.x204 + 0.25*m.x220 + 0.15*m.x236 + 0.15*m.x252 + 0.15*m.x268 + 0.15*m.x284 + 0.05*m.x300 + 0.05*m.x316 + 0.05*m.x332 + 0.05*m.x348 - 0.05*m.x364 - 0.05*m.x380 - 0.05*m.x396 - 0.05*m.x412 - 0.15*m.x428 - 0.15*m.x444 - 0.15*m.x460 - 0.15*m.x476 - 0.25*m.x492 - 0.25*m.x508 - 0.25*m.x524 - 0.25*m.x540 - 0.35*m.x556 - 0.35*m.x572 - 0.35*m.x588 - 0.45*m.x604 - 0.45*m.x620 - 0.55*m.x636 <= 0) m.c48 = Constraint(expr= 0.4*m.x141 + 0.3*m.x157 + 0.3*m.x173 + 0.2*m.x189 + 0.2*m.x205 + 0.2*m.x221 + 0.0999999999999999*m.x237 + 0.0999999999999999*m.x253 + 0.0999999999999999*m.x269 + 0.0999999999999999*m.x285 - 0.1*m.x365 - 0.1*m.x381 - 0.1*m.x397 - 0.1*m.x413 - 0.2*m.x429 - 0.2*m.x445 - 0.2*m.x461 - 0.2*m.x477 - 0.3*m.x493 - 0.3*m.x509 - 0.3*m.x525 - 0.3*m.x541 - 0.4*m.x557 - 0.4*m.x573 - 0.4*m.x589 - 0.5*m.x605 - 0.5*m.x621 - 0.6*m.x637 <= 0) m.c49 = Constraint(expr= 0.35*m.x142 + 0.25*m.x158 + 0.25*m.x174 + 0.15*m.x190 + 0.15*m.x206 + 0.15*m.x222 + 0.05*m.x238 + 0.05*m.x254 + 0.05*m.x270 + 0.05*m.x286 - 0.0499999999999998*m.x302 - 0.0499999999999998*m.x318 - 0.0499999999999998*m.x334 - 0.0499999999999998*m.x350 - 0.15*m.x366 - 0.15*m.x382 - 0.15*m.x398 - 0.15*m.x414 - 0.25*m.x430 - 0.25*m.x446 - 0.25*m.x462 - 0.25*m.x478 - 0.35*m.x494 - 0.35*m.x510 - 0.35*m.x526 - 0.35*m.x542 - 0.45*m.x558 - 0.45*m.x574 - 0.45*m.x590 - 0.55*m.x606 - 0.55*m.x622 - 0.65*m.x638 <= 0) m.c50 = Constraint(expr= 0.3*m.x143 + 0.2*m.x159 + 0.2*m.x175 + 0.1*m.x191 + 0.1*m.x207 + 0.1*m.x223 - 0.0999999999999999*m.x303 - 0.0999999999999999*m.x319 - 0.0999999999999999*m.x335 - 0.0999999999999999*m.x351 - 0.2*m.x367 - 0.2*m.x383 - 0.2*m.x399 - 0.2*m.x415 - 0.3*m.x431 - 0.3*m.x447 - 0.3*m.x463 - 0.3*m.x479 - 0.4*m.x495 - 0.4*m.x511 - 0.4*m.x527 - 0.4*m.x543 - 0.5*m.x559 - 0.5*m.x575 - 0.5*m.x591 - 0.6*m.x607 - 0.6*m.x623 - 0.7*m.x639 <= 0) m.c51 = Constraint(expr= 0.25*m.x144 + 0.15*m.x160 + 0.15*m.x176 + 0.05*m.x192 + 0.05*m.x208 + 0.05*m.x224 - 0.05*m.x240 - 0.05*m.x256 - 0.05*m.x272 - 0.05*m.x288 - 0.15*m.x304 - 0.15*m.x320 - 0.15*m.x336 - 0.15*m.x352 - 0.25*m.x368 - 0.25*m.x384 - 0.25*m.x400 - 0.25*m.x416 - 0.35*m.x432 - 0.35*m.x448 - 0.35*m.x464 - 0.35*m.x480 - 0.45*m.x496 - 0.45*m.x512 - 0.45*m.x528 - 0.45*m.x544 - 0.55*m.x560 - 0.55*m.x576 - 0.55*m.x592 - 0.65*m.x608 - 0.65*m.x624 - 0.75*m.x640 <= 0) m.c52 = Constraint(expr= 0.2*m.x145 + 0.0999999999999999*m.x161 + 0.0999999999999999*m.x177 - 0.1*m.x241 - 0.1*m.x257 - 0.1*m.x273 - 0.1*m.x289 - 0.2*m.x305 - 0.2*m.x321 - 0.2*m.x337 - 0.2*m.x353 - 0.3*m.x369 - 0.3*m.x385 - 0.3*m.x401 - 0.3*m.x417 - 0.4*m.x433 - 0.4*m.x449 - 0.4*m.x465 - 0.4*m.x481 - 0.5*m.x497 - 0.5*m.x513 - 0.5*m.x529 - 0.5*m.x545 - 0.6*m.x561 - 0.6*m.x577 - 0.6*m.x593 - 0.7*m.x609 - 0.7*m.x625 - 0.8*m.x641 <= 0) m.c53 = Constraint(expr= m.x2 + m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 + m.x12 + m.x13 + m.x14 + m.x15 + m.x16 + m.x17 == 1) m.c54 = Constraint(expr= m.x18 + m.x19 + m.x20 + m.x21 + m.x22 + m.x23 + m.x24 + m.x25 + m.x26 + m.x27 + m.x28 + m.x29 + m.x30 + m.x31 + m.x32 + m.x33 == 1) m.c55 = Constraint(expr= m.x34 + m.x35 + m.x36 + m.x37 + m.x38 + m.x39 + m.x40 + m.x41 + m.x42 + m.x43 + m.x44 + m.x45 + m.x46 + m.x47 + m.x48 + m.x49 == 1) m.c56 = Constraint(expr= m.x50 + m.x51 + m.x52 + m.x53 + m.x54 + m.x55 + m.x56 + m.x57 + m.x58 + m.x59 + m.x60 + m.x61 + m.x62 + m.x63 + m.x64 + m.x65 == 1) m.c57 = Constraint(expr= m.x66 + m.x67 + m.x68 + m.x69 + m.x70 + m.x71 + m.x72 + m.x73 + m.x74 + m.x75 + m.x76 + m.x77 + m.x78 + m.x79 + m.x80 + m.x81 == 1) m.c58 = Constraint(expr= m.x82 + m.x83 + m.x84 + m.x85 + m.x86 + m.x87 + m.x88 + m.x89 + m.x90 + m.x91 + m.x92 + m.x93 + m.x94 + m.x95 + m.x96 + m.x97 == 1) m.c59 = Constraint(expr= m.x98 + m.x99 + m.x100 + m.x101 + m.x102 + m.x103 + m.x104 + m.x105 + m.x106 + m.x107 + m.x108 + m.x109 + m.x110 + m.x111 + m.x112 + m.x113 == 1) m.c60 = Constraint(expr= m.x114 + m.x115 + m.x116 + m.x117 + m.x118 + m.x119 + m.x120 + m.x121 + m.x122 + m.x123 + m.x124 + m.x125 + m.x126 + m.x127 + m.x128 + m.x129 == 1) m.c61 = Constraint(expr=-m.x114*m.x642 + m.x130 == 0) m.c62 = Constraint(expr=-m.x115*m.x642 + m.x131 == 0) m.c63 = Constraint(expr=-m.x116*m.x642 + m.x132 == 0) m.c64 = Constraint(expr=-m.x117*m.x642 + m.x133 == 0) m.c65 = Constraint(expr=-m.x118*m.x642 + m.x134 == 0) m.c66 = Constraint(expr=-m.x119*m.x642 + m.x135 == 0) m.c67 = Constraint(expr=-m.x120*m.x642 + m.x136 == 0) m.c68 = Constraint(expr=-m.x121*m.x642 + m.x137 == 0) m.c69 = Constraint(expr=-m.x122*m.x642 + m.x138 == 0) m.c70 = Constraint(expr=-m.x123*m.x642 + m.x139 == 0) m.c71 = Constraint(expr=-m.x124*m.x642 + m.x140 == 0) m.c72 = Constraint(expr=-m.x125*m.x642 + m.x141 == 0) m.c73 = Constraint(expr=-m.x126*m.x642 + m.x142 == 0) m.c74 = Constraint(expr=-m.x127*m.x642 + m.x143 == 0) m.c75 = Constraint(expr=-m.x128*m.x642 + m.x144 == 0) m.c76 = Constraint(expr=-m.x129*m.x642 + m.x145 == 0) m.c77 = Constraint(expr=-m.x98*m.x643 + m.x146 == 0) m.c78 = Constraint(expr=-m.x99*m.x643 + m.x147 == 0) m.c79 = Constraint(expr=-m.x100*m.x643 + m.x148 == 0) m.c80 = Constraint(expr=-m.x101*m.x643 + m.x149 == 0) m.c81 = Constraint(expr=-m.x102*m.x643 + m.x150 == 0) m.c82 = Constraint(expr=-m.x103*m.x643 + m.x151 == 0) m.c83 = Constraint(expr=-m.x104*m.x643 + m.x152 == 0) m.c84 = Constraint(expr=-m.x105*m.x643 + m.x153 == 0) m.c85 = Constraint(expr=-m.x106*m.x643 + m.x154 == 0) m.c86 = Constraint(expr=-m.x107*m.x643 + m.x155 == 0) m.c87 = Constraint(expr=-m.x108*m.x643 + m.x156 == 0) m.c88 = Constraint(expr=-m.x109*m.x643 + m.x157 == 0) m.c89 = Constraint(expr=-m.x110*m.x643 + m.x158 == 0) m.c90 = Constraint(expr=-m.x111*m.x643 + m.x159 == 0) m.c91 = Constraint(expr=-m.x112*m.x643 + m.x160 == 0) m.c92 = Constraint(expr=-m.x113*m.x643 + m.x161 == 0) m.c93 = Constraint(expr=-m.x114*m.x644 + m.x162 == 0) m.c94 = Constraint(expr=-m.x115*m.x644 + m.x163 == 0) m.c95 = Constraint(expr=-m.x116*m.x644 + m.x164 == 0) m.c96 = Constraint(expr=-m.x117*m.x644 + m.x165 == 0) m.c97 = Constraint(expr=-m.x118*m.x644 + m.x166 == 0) m.c98 = Constraint(expr=-m.x119*m.x644 + m.x167 == 0) m.c99 = Constraint(expr=-m.x120*m.x644 + m.x168 == 0) m.c100 = Constraint(expr=-m.x121*m.x644 + m.x169 == 0) m.c101 = Constraint(expr=-m.x122*m.x644 + m.x170 == 0) m.c102 = Constraint(expr=-m.x123*m.x644 + m.x171 == 0) m.c103 = Constraint(expr=-m.x124*m.x644 + m.x172 == 0) m.c104 = Constraint(expr=-m.x125*m.x644 + m.x173 == 0) m.c105 = Constraint(expr=-m.x126*m.x644 + m.x174 == 0) m.c106 = Constraint(expr=-m.x127*m.x644 + m.x175 == 0) m.c107 = Constraint(expr=-m.x128*m.x644 + m.x176 == 0) m.c108 = Constraint(expr=-m.x129*m.x644 + m.x177 == 0) m.c109 = Constraint(expr=-m.x82*m.x645 + m.x178 == 0) m.c110 = Constraint(expr=-m.x83*m.x645 + m.x179 == 0) m.c111 = Constraint(expr=-m.x84*m.x645 + m.x180 == 0) m.c112 = Constraint(expr=-m.x85*m.x645 + m.x181 == 0) m.c113 = Constraint(expr=-m.x86*m.x645 + m.x182 == 0) m.c114 = Constraint(expr=-m.x87*m.x645 + m.x183 == 0) m.c115 = Constraint(expr=-m.x88*m.x645 + m.x184 == 0) m.c116 = Constraint(expr=-m.x89*m.x645 + m.x185 == 0) m.c117 = Constraint(expr=-m.x90*m.x645 + m.x186 == 0) m.c118 = Constraint(expr=-m.x91*m.x645 + m.x187 == 0) m.c119 = Constraint(expr=-m.x92*m.x645 + m.x188 == 0) m.c120 = Constraint(expr=-m.x93*m.x645 + m.x189 == 0) m.c121 = Constraint(expr=-m.x94*m.x645 + m.x190 == 0) m.c122 = Constraint(expr=-m.x95*m.x645 + m.x191 == 0) m.c123 = Constraint(expr=-m.x96*m.x645 + m.x192 == 0) m.c124 = Constraint(expr=-m.x97*m.x645 + m.x193 == 0) m.c125 = Constraint(expr=-m.x98*m.x646 + m.x194 == 0) m.c126 = Constraint(expr=-m.x99*m.x646 + m.x195 == 0) m.c127 = Constraint(expr=-m.x100*m.x646 + m.x196 == 0) m.c128 = Constraint(expr=-m.x101*m.x646 + m.x197 == 0) m.c129 = Constraint(expr=-m.x102*m.x646 + m.x198 == 0) m.c130 = Constraint(expr=-m.x103*m.x646 + m.x199 == 0) m.c131 = Constraint(expr=-m.x104*m.x646 + m.x200 == 0) m.c132 = Constraint(expr=-m.x105*m.x646 + m.x201 == 0) m.c133 = Constraint(expr=-m.x106*m.x646 + m.x202 == 0) m.c134 = Constraint(expr=-m.x107*m.x646 + m.x203 == 0) m.c135 = Constraint(expr=-m.x108*m.x646 + m.x204 == 0) m.c136 = Constraint(expr=-m.x109*m.x646 + m.x205 == 0) m.c137 = Constraint(expr=-m.x110*m.x646 + m.x206 == 0) m.c138 = Constraint(expr=-m.x111*m.x646 + m.x207 == 0) m.c139 = Constraint(expr=-m.x112*m.x646 + m.x208 == 0) m.c140 = Constraint(expr=-m.x113*m.x646 + m.x209 == 0) m.c141 = Constraint(expr=-m.x114*m.x647 + m.x210 == 0) m.c142 = Constraint(expr=-m.x115*m.x647 + m.x211 == 0) m.c143 = Constraint(expr=-m.x116*m.x647 + m.x212 == 0) m.c144 = Constraint(expr=-m.x117*m.x647 + m.x213 == 0) m.c145 = Constraint(expr=-m.x118*m.x647 + m.x214 == 0) m.c146 = Constraint(expr=-m.x119*m.x647 + m.x215 == 0) m.c147 = Constraint(expr=-m.x120*m.x647 + m.x216 == 0) m.c148 = Constraint(expr=-m.x121*m.x647 + m.x217 == 0) m.c149 = Constraint(expr=-m.x122*m.x647 + m.x218 == 0) m.c150 = Constraint(expr=-m.x123*m.x647 + m.x219 == 0) m.c151 = Constraint(expr=-m.x124*m.x647 + m.x220 == 0) m.c152 = Constraint(expr=-m.x125*m.x647 + m.x221 == 0) m.c153 = Constraint(expr=-m.x126*m.x647 + m.x222 == 0) m.c154 = Constraint(expr=-m.x127*m.x647 + m.x223 == 0) m.c155 = Constraint(expr=-m.x128*m.x647 + m.x224 == 0) m.c156 = Constraint(expr=-m.x129*m.x647 + m.x225 == 0) m.c157 = Constraint(expr=-m.x66*m.x648 + m.x226 == 0) m.c158 = Constraint(expr=-m.x67*m.x648 + m.x227 == 0) m.c159 = Constraint(expr=-m.x68*m.x648 + m.x228 == 0) m.c160 = Constraint(expr=-m.x69*m.x648 + m.x229 == 0) m.c161 = Constraint(expr=-m.x70*m.x648 + m.x230 == 0) m.c162 = Constraint(expr=-m.x71*m.x648 + m.x231 == 0) m.c163 = Constraint(expr=-m.x72*m.x648 + m.x232 == 0) m.c164 = Constraint(expr=-m.x73*m.x648 + m.x233 == 0) m.c165 = Constraint(expr=-m.x74*m.x648 + m.x234 == 0) m.c166 = Constraint(expr=-m.x75*m.x648 + m.x235 == 0) m.c167 = Constraint(expr=-m.x76*m.x648 + m.x236 == 0) m.c168 = Constraint(expr=-m.x77*m.x648 + m.x237 == 0) m.c169 = Constraint(expr=-m.x78*m.x648 + m.x238 == 0) m.c170 = Constraint(expr=-m.x79*m.x648 + m.x239 == 0) m.c171 = Constraint(expr=-m.x80*m.x648 + m.x240 == 0) m.c172 = Constraint(expr=-m.x81*m.x648 + m.x241 == 0) m.c173 = Constraint(expr=-m.x82*m.x649 + m.x242 == 0) m.c174 = Constraint(expr=-m.x83*m.x649 + m.x243 == 0) m.c175 = Constraint(expr=-m.x84*m.x649 + m.x244 == 0) m.c176 = Constraint(expr=-m.x85*m.x649 + m.x245 == 0) m.c177 = Constraint(expr=-m.x86*m.x649 + m.x246 == 0) m.c178 = Constraint(expr=-m.x87*m.x649 + m.x247 == 0) m.c179 = Constraint(expr=-m.x88*m.x649 + m.x248 == 0) m.c180 = Constraint(expr=-m.x89*m.x649 + m.x249 == 0) m.c181 = Constraint(expr=-m.x90*m.x649 + m.x250 == 0) m.c182 = Constraint(expr=-m.x91*m.x649 + m.x251 == 0) m.c183 = Constraint(expr=-m.x92*m.x649 + m.x252 == 0) m.c184 = Constraint(expr=-m.x93*m.x649 + m.x253 == 0) m.c185 = Constraint(expr=-m.x94*m.x649 + m.x254 == 0) m.c186 = Constraint(expr=-m.x95*m.x649 + m.x255 == 0) m.c187 = Constraint(expr=-m.x96*m.x649 + m.x256 == 0) m.c188 = Constraint(expr=-m.x97*m.x649 + m.x257 == 0) m.c189 = Constraint(expr=-m.x98*m.x650 + m.x258 == 0) m.c190 = Constraint(expr=-m.x99*m.x650 + m.x259 == 0) m.c191 = Constraint(expr=-m.x100*m.x650 + m.x260 == 0) m.c192 = Constraint(expr=-m.x101*m.x650 + m.x261 == 0) m.c193 = Constraint(expr=-m.x102*m.x650 + m.x262 == 0) m.c194 = Constraint(expr=-m.x103*m.x650 + m.x263 == 0) m.c195 = Constraint(expr=-m.x104*m.x650 + m.x264 == 0) m.c196 = Constraint(expr=-m.x105*m.x650 + m.x265 == 0) m.c197 = Constraint(expr=-m.x106*m.x650 + m.x266 == 0) m.c198 = Constraint(expr=-m.x107*m.x650 + m.x267 == 0) m.c199 = Constraint(expr=-m.x108*m.x650 + m.x268 == 0) m.c200 = Constraint(expr=-m.x109*m.x650 + m.x269 == 0) m.c201 = Constraint(expr=-m.x110*m.x650 + m.x270 == 0) m.c202 = Constraint(expr=-m.x111*m.x650 + m.x271 == 0) m.c203 = Constraint(expr=-m.x112*m.x650 + m.x272 == 0) m.c204 = Constraint(expr=-m.x113*m.x650 + m.x273 == 0) m.c205 = Constraint(expr=-m.x114*m.x651 + m.x274 == 0) m.c206 = Constraint(expr=-m.x115*m.x651 + m.x275 == 0) m.c207 = Constraint(expr=-m.x116*m.x651 + m.x276 == 0) m.c208 = Constraint(expr=-m.x117*m.x651 + m.x277 == 0) m.c209 = Constraint(expr=-m.x118*m.x651 + m.x278 == 0) m.c210 = Constraint(expr=-m.x119*m.x651 + m.x279 == 0) m.c211 = Constraint(expr=-m.x120*m.x651 + m.x280 == 0) m.c212 = Constraint(expr=-m.x121*m.x651 + m.x281 == 0) m.c213 = Constraint(expr=-m.x122*m.x651 + m.x282 == 0) m.c214 = Constraint(expr=-m.x123*m.x651 + m.x283 == 0) m.c215 = Constraint(expr=-m.x124*m.x651 + m.x284 == 0) m.c216 = Constraint(expr=-m.x125*m.x651 + m.x285 == 0) m.c217 = Constraint(expr=-m.x126*m.x651 + m.x286 == 0) m.c218 = Constraint(expr=-m.x127*m.x651 + m.x287 == 0) m.c219 = Constraint(expr=-m.x128*m.x651 + m.x288 == 0) m.c220 = Constraint(expr=-m.x129*m.x651 + m.x289 == 0) m.c221 = Constraint(expr=-m.x50*m.x652 + m.x290 == 0) m.c222 = Constraint(expr=-m.x51*m.x652 + m.x291 == 0) m.c223 = Constraint(expr=-m.x52*m.x652 + m.x292 == 0) m.c224 = Constraint(expr=-m.x53*m.x652 + m.x293 == 0) m.c225 = Constraint(expr=-m.x54*m.x652 + m.x294 == 0) m.c226 = Constraint(expr=-m.x55*m.x652 + m.x295 == 0) m.c227 = Constraint(expr=-m.x56*m.x652 + m.x296 == 0) m.c228 = Constraint(expr=-m.x57*m.x652 + m.x297 == 0) m.c229 = Constraint(expr=-m.x58*m.x652 + m.x298 == 0) m.c230 = Constraint(expr=-m.x59*m.x652 + m.x299 == 0) m.c231 = Constraint(expr=-m.x60*m.x652 + m.x300 == 0) m.c232 = Constraint(expr=-m.x61*m.x652 + m.x301 == 0) m.c233 = Constraint(expr=-m.x62*m.x652 + m.x302 == 0) m.c234 = Constraint(expr=-m.x63*m.x652 + m.x303 == 0) m.c235 = Constraint(expr=-m.x64*m.x652 + m.x304 == 0) m.c236 = Constraint(expr=-m.x65*m.x652 + m.x305 == 0) m.c237 = Constraint(expr=-m.x66*m.x653 + m.x306 == 0) m.c238 = Constraint(expr=-m.x67*m.x653 + m.x307 == 0) m.c239 = Constraint(expr=-m.x68*m.x653 + m.x308 == 0) m.c240 = Constraint(expr=-m.x69*m.x653 + m.x309 == 0) m.c241 = Constraint(expr=-m.x70*m.x653 + m.x310 == 0) m.c242 = Constraint(expr=-m.x71*m.x653 + m.x311 == 0) m.c243 = Constraint(expr=-m.x72*m.x653 + m.x312 == 0) m.c244 = Constraint(expr=-m.x73*m.x653 + m.x313 == 0) m.c245 = Constraint(expr=-m.x74*m.x653 + m.x314 == 0) m.c246 = Constraint(expr=-m.x75*m.x653 + m.x315 == 0) m.c247 = Constraint(expr=-m.x76*m.x653 + m.x316 == 0) m.c248 = Constraint(expr=-m.x77*m.x653 + m.x317 == 0) m.c249 = Constraint(expr=-m.x78*m.x653 + m.x318 == 0) m.c250 = Constraint(expr=-m.x79*m.x653 + m.x319 == 0) m.c251 = Constraint(expr=-m.x80*m.x653 + m.x320 == 0) m.c252 = Constraint(expr=-m.x81*m.x653 + m.x321 == 0) m.c253 = Constraint(expr=-m.x82*m.x654 + m.x322 == 0) m.c254 = Constraint(expr=-m.x83*m.x654 + m.x323 == 0) m.c255 = Constraint(expr=-m.x84*m.x654 + m.x324 == 0) m.c256 = Constraint(expr=-m.x85*m.x654 + m.x325 == 0) m.c257 = Constraint(expr=-m.x86*m.x654 + m.x326 == 0) m.c258 = Constraint(expr=-m.x87*m.x654 + m.x327 == 0) m.c259 = Constraint(expr=-m.x88*m.x654 + m.x328 == 0) m.c260 = Constraint(expr=-m.x89*m.x654 + m.x329 == 0) m.c261 = Constraint(expr=-m.x90*m.x654 + m.x330 == 0) m.c262 = Constraint(expr=-m.x91*m.x654 + m.x331 == 0) m.c263 = Constraint(expr=-m.x92*m.x654 + m.x332 == 0) m.c264 = Constraint(expr=-m.x93*m.x654 + m.x333 == 0) m.c265 = Constraint(expr=-m.x94*m.x654 + m.x334 == 0) m.c266 = Constraint(expr=-m.x95*m.x654 + m.x335 == 0) m.c267 = Constraint(expr=-m.x96*m.x654 + m.x336 == 0) m.c268 = Constraint(expr=-m.x97*m.x654 + m.x337 == 0) m.c269 = Constraint(expr=-m.x98*m.x655 + m.x338 == 0) m.c270 = Constraint(expr=-m.x99*m.x655 + m.x339 == 0) m.c271 = Constraint(expr=-m.x100*m.x655 + m.x340 == 0) m.c272 = Constraint(expr=-m.x101*m.x655 + m.x341 == 0) m.c273 = Constraint(expr=-m.x102*m.x655 + m.x342 == 0) m.c274 = Constraint(expr=-m.x103*m.x655 + m.x343 == 0) m.c275 = Constraint(expr=-m.x104*m.x655 + m.x344 == 0) m.c276 = Constraint(expr=-m.x105*m.x655 + m.x345 == 0) m.c277 = Constraint(expr=-m.x106*m.x655 + m.x346 == 0) m.c278 = Constraint(expr=-m.x107*m.x655 + m.x347 == 0) m.c279 = Constraint(expr=-m.x108*m.x655 + m.x348 == 0) m.c280 = Constraint(expr=-m.x109*m.x655 + m.x349 == 0) m.c281 = Constraint(expr=-m.x110*m.x655 + m.x350 == 0) m.c282 = Constraint(expr=-m.x111*m.x655 + m.x351 == 0) m.c283 = Constraint(expr=-m.x112*m.x655 + m.x352 == 0) m.c284 = Constraint(expr=-m.x113*m.x655 + m.x353 == 0) m.c285 = Constraint(expr=-m.x34*m.x656 + m.x354 == 0) m.c286 = Constraint(expr=-m.x35*m.x656 + m.x355 == 0) m.c287 = Constraint(expr=-m.x36*m.x656 + m.x356 == 0) m.c288 = Constraint(expr=-m.x37*m.x656 + m.x357 == 0) m.c289 = Constraint(expr=-m.x38*m.x656 + m.x358 == 0) m.c290 = Constraint(expr=-m.x39*m.x656 + m.x359 == 0) m.c291 = Constraint(expr=-m.x40*m.x656 + m.x360 == 0) m.c292 = Constraint(expr=-m.x41*m.x656 + m.x361 == 0) m.c293 = Constraint(expr=-m.x42*m.x656 + m.x362 == 0) m.c294 = Constraint(expr=-m.x43*m.x656 + m.x363 == 0) m.c295 = Constraint(expr=-m.x44*m.x656 + m.x364 == 0) m.c296 = Constraint(expr=-m.x45*m.x656 + m.x365 == 0) m.c297 = Constraint(expr=-m.x46*m.x656 + m.x366 == 0) m.c298 = Constraint(expr=-m.x47*m.x656 + m.x367 == 0) m.c299 = Constraint(expr=-m.x48*m.x656 + m.x368 == 0) m.c300 = Constraint(expr=-m.x49*m.x656 + m.x369 == 0) m.c301 = Constraint(expr=-m.x50*m.x657 + m.x370 == 0) m.c302 = Constraint(expr=-m.x51*m.x657 + m.x371 == 0) m.c303 = Constraint(expr=-m.x52*m.x657 + m.x372 == 0) m.c304 = Constraint(expr=-m.x53*m.x657 + m.x373 == 0) m.c305 = Constraint(expr=-m.x54*m.x657 + m.x374 == 0) m.c306 = Constraint(expr=-m.x55*m.x657 + m.x375 == 0) m.c307 = Constraint(expr=-m.x56*m.x657 + m.x376 == 0) m.c308 = Constraint(expr=-m.x57*m.x657 + m.x377 == 0) m.c309 = Constraint(expr=-m.x58*m.x657 + m.x378 == 0) m.c310 = Constraint(expr=-m.x59*m.x657 + m.x379 == 0) m.c311 = Constraint(expr=-m.x60*m.x657 + m.x380 == 0) m.c312 = Constraint(expr=-m.x61*m.x657 + m.x381 == 0) m.c313 = Constraint(expr=-m.x62*m.x657 + m.x382 == 0) m.c314 = Constraint(expr=-m.x63*m.x657 + m.x383 == 0) m.c315 = Constraint(expr=-m.x64*m.x657 + m.x384 == 0) m.c316 = Constraint(expr=-m.x65*m.x657 + m.x385 == 0) m.c317 = Constraint(expr=-m.x66*m.x658 + m.x386 == 0) m.c318 = Constraint(expr=-m.x67*m.x658 + m.x387 == 0) m.c319 = Constraint(expr=-m.x68*m.x658 + m.x388 == 0) m.c320 = Constraint(expr=-m.x69*m.x658 + m.x389 == 0) m.c321 = Constraint(expr=-m.x70*m.x658 + m.x390 == 0) m.c322 = Constraint(expr=-m.x71*m.x658 + m.x391 == 0) m.c323 = Constraint(expr=-m.x72*m.x658 + m.x392 == 0) m.c324 = Constraint(expr=-m.x73*m.x658 + m.x393 == 0) m.c325 = Constraint(expr=-m.x74*m.x658 + m.x394 == 0) m.c326 = Constraint(expr=-m.x75*m.x658 + m.x395 == 0) m.c327 = Constraint(expr=-m.x76*m.x658 + m.x396 == 0) m.c328 = Constraint(expr=-m.x77*m.x658 + m.x397 == 0) m.c329 = Constraint(expr=-m.x78*m.x658 + m.x398 == 0) m.c330 = Constraint(expr=-m.x79*m.x658 + m.x399 == 0) m.c331 = Constraint(expr=-m.x80*m.x658 + m.x400 == 0) m.c332 = Constraint(expr=-m.x81*m.x658 + m.x401 == 0) m.c333 = Constraint(expr=-m.x82*m.x659 + m.x402 == 0) m.c334 = Constraint(expr=-m.x83*m.x659 + m.x403 == 0) m.c335 = Constraint(expr=-m.x84*m.x659 + m.x404 == 0) m.c336 = Constraint(expr=-m.x85*m.x659 + m.x405 == 0) m.c337 = Constraint(expr=-m.x86*m.x659 + m.x406 == 0) m.c338 = Constraint(expr=-m.x87*m.x659 + m.x407 == 0) m.c339 = Constraint(expr=-m.x88*m.x659 + m.x408 == 0) m.c340 = Constraint(expr=-m.x89*m.x659 + m.x409 == 0) m.c341 = Constraint(expr=-m.x90*m.x659 + m.x410 == 0) m.c342 = Constraint(expr=-m.x91*m.x659 + m.x411 == 0) m.c343 = Constraint(expr=-m.x92*m.x659 + m.x412 == 0) m.c344 = Constraint(expr=-m.x93*m.x659 + m.x413 == 0) m.c345 = Constraint(expr=-m.x94*m.x659 + m.x414 == 0) m.c346 = Constraint(expr=-m.x95*m.x659 + m.x415 == 0) m.c347 = Constraint(expr=-m.x96*m.x659 + m.x416 == 0) m.c348 = Constraint(expr=-m.x97*m.x659 + m.x417 == 0) m.c349 = Constraint(expr=-m.x18*m.x660 + m.x418 == 0) m.c350 = Constraint(expr=-m.x19*m.x660 + m.x419 == 0) m.c351 = Constraint(expr=-m.x20*m.x660 + m.x420 == 0) m.c352 = Constraint(expr=-m.x21*m.x660 + m.x421 == 0) m.c353 = Constraint(expr=-m.x22*m.x660 + m.x422 == 0) m.c354 = Constraint(expr=-m.x23*m.x660 + m.x423 == 0) m.c355 = Constraint(expr=-m.x24*m.x660 + m.x424 == 0) m.c356 = Constraint(expr=-m.x25*m.x660 + m.x425 == 0) m.c357 = Constraint(expr=-m.x26*m.x660 + m.x426 == 0) m.c358 = Constraint(expr=-m.x27*m.x660 + m.x427 == 0) m.c359 = Constraint(expr=-m.x28*m.x660 + m.x428 == 0) m.c360 = Constraint(expr=-m.x29*m.x660 + m.x429 == 0) m.c361 = Constraint(expr=-m.x30*m.x660 + m.x430 == 0) m.c362 = Constraint(expr=-m.x31*m.x660 + m.x431 == 0) m.c363 = Constraint(expr=-m.x32*m.x660 + m.x432 == 0) m.c364 = Constraint(expr=-m.x33*m.x660 + m.x433 == 0) m.c365 = Constraint(expr=-m.x34*m.x661 + m.x434 == 0) m.c366 = Constraint(expr=-m.x35*m.x661 + m.x435 == 0) m.c367 = Constraint(expr=-m.x36*m.x661 + m.x436 == 0) m.c368 = Constraint(expr=-m.x37*m.x661 + m.x437 == 0) m.c369 = Constraint(expr=-m.x38*m.x661 + m.x438 == 0) m.c370 = Constraint(expr=-m.x39*m.x661 + m.x439 == 0) m.c371 = Constraint(expr=-m.x40*m.x661 + m.x440 == 0) m.c372 = Constraint(expr=-m.x41*m.x661 + m.x441 == 0) m.c373 = Constraint(expr=-m.x42*m.x661 + m.x442 == 0) m.c374 = Constraint(expr=-m.x43*m.x661 + m.x443 == 0) m.c375 = Constraint(expr=-m.x44*m.x661 + m.x444 == 0) m.c376 = Constraint(expr=-m.x45*m.x661 + m.x445 == 0) m.c377 = Constraint(expr=-m.x46*m.x661 + m.x446 == 0) m.c378 = Constraint(expr=-m.x47*m.x661 + m.x447 == 0) m.c379 = Constraint(expr=-m.x48*m.x661 + m.x448 == 0) m.c380 = Constraint(expr=-m.x49*m.x661 + m.x449 == 0) m.c381 = Constraint(expr=-m.x50*m.x662 + m.x450 == 0) m.c382 = Constraint(expr=-m.x51*m.x662 + m.x451 == 0) m.c383 = Constraint(expr=-m.x52*m.x662 + m.x452 == 0) m.c384 = Constraint(expr=-m.x53*m.x662 + m.x453 == 0) m.c385 = Constraint(expr=-m.x54*m.x662 + m.x454 == 0) m.c386 = Constraint(expr=-m.x55*m.x662 + m.x455 == 0) m.c387 = Constraint(expr=-m.x56*m.x662 + m.x456 == 0) m.c388 = Constraint(expr=-m.x57*m.x662 + m.x457 == 0) m.c389 = Constraint(expr=-m.x58*m.x662 + m.x458 == 0) m.c390 = Constraint(expr=-m.x59*m.x662 + m.x459 == 0) m.c391 = Constraint(expr=-m.x60*m.x662 + m.x460 == 0) m.c392 = Constraint(expr=-m.x61*m.x662 + m.x461 == 0) m.c393 = Constraint(expr=-m.x62*m.x662 + m.x462 == 0) m.c394 = Constraint(expr=-m.x63*m.x662 + m.x463 == 0) m.c395 = Constraint(expr=-m.x64*m.x662 + m.x464 == 0) m.c396 = Constraint(expr=-m.x65*m.x662 + m.x465 == 0) m.c397 = Constraint(expr=-m.x66*m.x663 + m.x466 == 0) m.c398 = Constraint(expr=-m.x67*m.x663 + m.x467 == 0) m.c399 = Constraint(expr=-m.x68*m.x663 + m.x468 == 0) m.c400 = Constraint(expr=-m.x69*m.x663 + m.x469 == 0) m.c401 = Constraint(expr=-m.x70*m.x663 + m.x470 == 0) m.c402 = Constraint(expr=-m.x71*m.x663 + m.x471 == 0) m.c403 = Constraint(expr=-m.x72*m.x663 + m.x472 == 0) m.c404 = Constraint(expr=-m.x73*m.x663 + m.x473 == 0) m.c405 = Constraint(expr=-m.x74*m.x663 + m.x474 == 0) m.c406 = Constraint(expr=-m.x75*m.x663 + m.x475 == 0) m.c407 = Constraint(expr=-m.x76*m.x663 + m.x476 == 0) m.c408 = Constraint(expr=-m.x77*m.x663 + m.x477 == 0) m.c409 = Constraint(expr=-m.x78*m.x663 + m.x478 == 0) m.c410 = Constraint(expr=-m.x79*m.x663 + m.x479 == 0) m.c411 = Constraint(expr=-m.x80*m.x663 + m.x480 == 0) m.c412 = Constraint(expr=-m.x81*m.x663 + m.x481 == 0) m.c413 = Constraint(expr=-m.x2*m.x664 + m.x482 == 0) m.c414 = Constraint(expr=-m.x3*m.x664 + m.x483 == 0) m.c415 = Constraint(expr=-m.x4*m.x664 + m.x484 == 0) m.c416 = Constraint(expr=-m.x5*m.x664 + m.x485 == 0) m.c417 = Constraint(expr=-m.x6*m.x664 + m.x486 == 0) m.c418 = Constraint(expr=-m.x7*m.x664 + m.x487 == 0) m.c419 = Constraint(expr=-m.x8*m.x664 + m.x488 == 0) m.c420 = Constraint(expr=-m.x9*m.x664 + m.x489 == 0) m.c421 = Constraint(expr=-m.x10*m.x664 + m.x490 == 0) m.c422 = Constraint(expr=-m.x11*m.x664 + m.x491 == 0) m.c423 = Constraint(expr=-m.x12*m.x664 + m.x492 == 0) m.c424 = Constraint(expr=-m.x13*m.x664 + m.x493 == 0) m.c425 = Constraint(expr=-m.x14*m.x664 + m.x494 == 0) m.c426 = Constraint(expr=-m.x15*m.x664 + m.x495 == 0) m.c427 = Constraint(expr=-m.x16*m.x664 + m.x496 == 0) m.c428 = Constraint(expr=-m.x17*m.x664 + m.x497 == 0) m.c429 = Constraint(expr=-m.x18*m.x665 + m.x498 == 0) m.c430 = Constraint(expr=-m.x19*m.x665 + m.x499 == 0) m.c431 = Constraint(expr=-m.x20*m.x665 + m.x500 == 0) m.c432 = Constraint(expr=-m.x21*m.x665 + m.x501 == 0) m.c433 = Constraint(expr=-m.x22*m.x665 + m.x502 == 0) m.c434 = Constraint(expr=-m.x23*m.x665 + m.x503 == 0) m.c435 = Constraint(expr=-m.x24*m.x665 + m.x504 == 0) m.c436 = Constraint(expr=-m.x25*m.x665 + m.x505 == 0) m.c437 = Constraint(expr=-m.x26*m.x665 + m.x506 == 0) m.c438 = Constraint(expr=-m.x27*m.x665 + m.x507 == 0) m.c439 = Constraint(expr=-m.x28*m.x665 + m.x508 == 0) m.c440 = Constraint(expr=-m.x29*m.x665 + m.x509 == 0) m.c441 = Constraint(expr=-m.x30*m.x665 + m.x510 == 0) m.c442 = Constraint(expr=-m.x31*m.x665 + m.x511 == 0) m.c443 = Constraint(expr=-m.x32*m.x665 + m.x512 == 0) m.c444 = Constraint(expr=-m.x33*m.x665 + m.x513 == 0) m.c445 = Constraint(expr=-m.x34*m.x666 + m.x514 == 0) m.c446 = Constraint(expr=-m.x35*m.x666 + m.x515 == 0) m.c447 = Constraint(expr=-m.x36*m.x666 + m.x516 == 0) m.c448 = Constraint(expr=-m.x37*m.x666 + m.x517 == 0) m.c449 = Constraint(expr=-m.x38*m.x666 + m.x518 == 0) m.c450 = Constraint(expr=-m.x39*m.x666 + m.x519 == 0) m.c451 = Constraint(expr=-m.x40*m.x666 + m.x520 == 0) m.c452 = Constraint(expr=-m.x41*m.x666 + m.x521 == 0) m.c453 = Constraint(expr=-m.x42*m.x666 + m.x522 == 0) m.c454 = Constraint(expr=-m.x43*m.x666 + m.x523 == 0) m.c455 = Constraint(expr=-m.x44*m.x666 + m.x524 == 0) m.c456 = Constraint(expr=-m.x45*m.x666 + m.x525 == 0) m.c457 = Constraint(expr=-m.x46*m.x666 + m.x526 == 0) m.c458 = Constraint(expr=-m.x47*m.x666 + m.x527 == 0) m.c459 = Constraint(expr=-m.x48*m.x666 + m.x528 == 0) m.c460 = Constraint(expr=-m.x49*m.x666 + m.x529 == 0) m.c461 = Constraint(expr=-m.x50*m.x667 + m.x530 == 0) m.c462 = Constraint(expr=-m.x51*m.x667 + m.x531 == 0) m.c463 = Constraint(expr=-m.x52*m.x667 + m.x532 == 0) m.c464 = Constraint(expr=-m.x53*m.x667 + m.x533 == 0) m.c465 = Constraint(expr=-m.x54*m.x667 + m.x534 == 0) m.c466 = Constraint(expr=-m.x55*m.x667 + m.x535 == 0) m.c467 = Constraint(expr=-m.x56*m.x667 + m.x536 == 0) m.c468 = Constraint(expr=-m.x57*m.x667 + m.x537 == 0) m.c469 = Constraint(expr=-m.x58*m.x667 + m.x538 == 0) m.c470 = Constraint(expr=-m.x59*m.x667 + m.x539 == 0) m.c471 = Constraint(expr=-m.x60*m.x667 + m.x540 == 0) m.c472 = Constraint(expr=-m.x61*m.x667 + m.x541 == 0) m.c473 = Constraint(expr=-m.x62*m.x667 + m.x542 == 0) m.c474 = Constraint(expr=-m.x63*m.x667 + m.x543 == 0) m.c475 = Constraint(expr=-m.x64*m.x667 + m.x544 == 0) m.c476 = Constraint(expr=-m.x65*m.x667 + m.x545 == 0) m.c477 = Constraint(expr=-m.x2*m.x668 + m.x546 == 0) m.c478 = Constraint(expr=-m.x3*m.x668 + m.x547 == 0) m.c479 = Constraint(expr=-m.x4*m.x668 + m.x548 == 0) m.c480 = Constraint(expr=-m.x5*m.x668 + m.x549 == 0) m.c481 = Constraint(expr=-m.x6*m.x668 + m.x550 == 0) m.c482 = Constraint(expr=-m.x7*m.x668 + m.x551 == 0) m.c483 = Constraint(expr=-m.x8*m.x668 + m.x552 == 0) m.c484 = Constraint(expr=-m.x9*m.x668 + m.x553 == 0) m.c485 = Constraint(expr=-m.x10*m.x668 + m.x554 == 0) m.c486 = Constraint(expr=-m.x11*m.x668 + m.x555 == 0) m.c487 = Constraint(expr=-m.x12*m.x668 + m.x556 == 0) m.c488 = Constraint(expr=-m.x13*m.x668 + m.x557 == 0) m.c489 = Constraint(expr=-m.x14*m.x668 + m.x558 == 0) m.c490 = Constraint(expr=-m.x15*m.x668 + m.x559 == 0) m.c491 = Constraint(expr=-m.x16*m.x668 + m.x560 == 0) m.c492 = Constraint(expr=-m.x17*m.x668 + m.x561 == 0) m.c493 = Constraint(expr=-m.x18*m.x669 + m.x562 == 0) m.c494 = Constraint(expr=-m.x19*m.x669 + m.x563 == 0) m.c495 = Constraint(expr=-m.x20*m.x669 + m.x564 == 0) m.c496 = Constraint(expr=-m.x21*m.x669 + m.x565 == 0) m.c497 = Constraint(expr=-m.x22*m.x669 + m.x566 == 0) m.c498 = Constraint(expr=-m.x23*m.x669 + m.x567 == 0) m.c499 = Constraint(expr=-m.x24*m.x669 + m.x568 == 0) m.c500 = Constraint(expr=-m.x25*m.x669 + m.x569 == 0) m.c501 = Constraint(expr=-m.x26*m.x669 + m.x570 == 0) m.c502 = Constraint(expr=-m.x27*m.x669 + m.x571 == 0) m.c503 = Constraint(expr=-m.x28*m.x669 + m.x572 == 0) m.c504 = Constraint(expr=-m.x29*m.x669 + m.x573 == 0) m.c505 = Constraint(expr=-m.x30*m.x669 + m.x574 == 0) m.c506 = Constraint(expr=-m.x31*m.x669 + m.x575 == 0) m.c507 = Constraint(expr=-m.x32*m.x669 + m.x576 == 0) m.c508 = Constraint(expr=-m.x33*m.x669 + m.x577 == 0) m.c509 = Constraint(expr=-m.x34*m.x670 + m.x578 == 0) m.c510 = Constraint(expr=-m.x35*m.x670 + m.x579 == 0) m.c511 = Constraint(expr=-m.x36*m.x670 + m.x580 == 0) m.c512 = Constraint(expr=-m.x37*m.x670 + m.x581 == 0) m.c513 = Constraint(expr=-m.x38*m.x670 + m.x582 == 0) m.c514 = Constraint(expr=-m.x39*m.x670 + m.x583 == 0) m.c515 = Constraint(expr=-m.x40*m.x670 + m.x584 == 0) m.c516 = Constraint(expr=-m.x41*m.x670 + m.x585 == 0) m.c517 = Constraint(expr=-m.x42*m.x670 + m.x586 == 0) m.c518 = Constraint(expr=-m.x43*m.x670 + m.x587 == 0) m.c519 = Constraint(expr=-m.x44*m.x670 + m.x588 == 0) m.c520 = Constraint(expr=-m.x45*m.x670 + m.x589 == 0) m.c521 = Constraint(expr=-m.x46*m.x670 + m.x590 == 0) m.c522 = Constraint(expr=-m.x47*m.x670 + m.x591 == 0) m.c523 = Constraint(expr=-m.x48*m.x670 + m.x592 == 0) m.c524 = Constraint(expr=-m.x49*m.x670 + m.x593 == 0) m.c525 = Constraint(expr=-m.x2*m.x671 + m.x594 == 0) m.c526 = Constraint(expr=-m.x3*m.x671 + m.x595 == 0) m.c527 = Constraint(expr=-m.x4*m.x671 + m.x596 == 0) m.c528 = Constraint(expr=-m.x5*m.x671 + m.x597 == 0) m.c529 = Constraint(expr=-m.x6*m.x671 + m.x598 == 0) m.c530 = Constraint(expr=-m.x7*m.x671 + m.x599 == 0) m.c531 = Constraint(expr=-m.x8*m.x671 + m.x600 == 0) m.c532 = Constraint(expr=-m.x9*m.x671 + m.x601 == 0) m.c533 = Constraint(expr=-m.x10*m.x671 + m.x602 == 0) m.c534 = Constraint(expr=-m.x11*m.x671 + m.x603 == 0) m.c535 = Constraint(expr=-m.x12*m.x671 + m.x604 == 0) m.c536 = Constraint(expr=-m.x13*m.x671 + m.x605 == 0) m.c537 = Constraint(expr=-m.x14*m.x671 + m.x606 == 0) m.c538 = Constraint(expr=-m.x15*m.x671 + m.x607 == 0) m.c539 = Constraint(expr=-m.x16*m.x671 + m.x608 == 0) m.c540 = Constraint(expr=-m.x17*m.x671 + m.x609 == 0) m.c541 = Constraint(expr=-m.x18*m.x672 + m.x610 == 0) m.c542 = Constraint(expr=-m.x19*m.x672 + m.x611 == 0) m.c543 = Constraint(expr=-m.x20*m.x672 + m.x612 == 0) m.c544 = Constraint(expr=-m.x21*m.x672 + m.x613 == 0) m.c545 = Constraint(expr=-m.x22*m.x672 + m.x614 == 0) m.c546 = Constraint(expr=-m.x23*m.x672 + m.x615 == 0) m.c547 = Constraint(expr=-m.x24*m.x672 + m.x616 == 0) m.c548 = Constraint(expr=-m.x25*m.x672 + m.x617 == 0) m.c549 = Constraint(expr=-m.x26*m.x672 + m.x618 == 0) m.c550 = Constraint(expr=-m.x27*m.x672 + m.x619 == 0) m.c551 = Constraint(expr=-m.x28*m.x672 + m.x620 == 0) m.c552 = Constraint(expr=-m.x29*m.x672 + m.x621 == 0) m.c553 = Constraint(expr=-m.x30*m.x672 + m.x622 == 0) m.c554 = Constraint(expr=-m.x31*m.x672 + m.x623 == 0) m.c555 = Constraint(expr=-m.x32*m.x672 + m.x624 == 0) m.c556 = Constraint(expr=-m.x33*m.x672 + m.x625 == 0) m.c557 = Constraint(expr=-m.x2*m.x673 + m.x626 == 0) m.c558 = Constraint(expr=-m.x3*m.x673 + m.x627 == 0) m.c559 = Constraint(expr=-m.x4*m.x673 + m.x628 == 0) m.c560 = Constraint(expr=-m.x5*m.x673 + m.x629 == 0) m.c561 = Constraint(expr=-m.x6*m.x673 + m.x630 == 0) m.c562 = Constraint(expr=-m.x7*m.x673 + m.x631 == 0) m.c563 = Constraint(expr=-m.x8*m.x673 + m.x632 == 0) m.c564 = Constraint(expr=-m.x9*m.x673 + m.x633 == 0) m.c565 = Constraint(expr=-m.x10*m.x673 + m.x634 == 0) m.c566 = Constraint(expr=-m.x11*m.x673 + m.x635 == 0) m.c567 = Constraint(expr=-m.x12*m.x673 + m.x636 == 0) m.c568 = Constraint(expr=-m.x13*m.x673 + m.x637 == 0) m.c569 = Constraint(expr=-m.x14*m.x673 + m.x638 == 0) m.c570 = Constraint(expr=-m.x15*m.x673 + m.x639 == 0) m.c571 = Constraint(expr=-m.x16*m.x673 + m.x640 == 0) m.c572 = Constraint(expr=-m.x17*m.x673 + m.x641 == 0)
# NLP written by GAMS Convert at 04/21/18 13:53:10 # # Equation counts # Total E G L N X C B # 572 521 0 51 0 0 0 0 # # Variable counts # x b i s1s s2s sc si # Total cont binary integer sos1 sos2 scont sint # 673 673 0 0 0 0 0 0 # FX 0 0 0 0 0 0 0 0 # # Nonzero counts # Total const NL DLL # 4171 3147 1024 0 # # Reformulation has removed 1 variable and 1 equation from pyomo.environ import * model = m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,1),initialize=0) m.x3 = Var(within=Reals,bounds=(0,1),initialize=0) m.x4 = Var(within=Reals,bounds=(0,1),initialize=0) m.x5 = Var(within=Reals,bounds=(0,1),initialize=0) m.x6 = Var(within=Reals,bounds=(0,1),initialize=0) m.x7 = Var(within=Reals,bounds=(0,1),initialize=0) m.x8 = Var(within=Reals,bounds=(0,1),initialize=0) m.x9 = Var(within=Reals,bounds=(0,1),initialize=0) m.x10 = Var(within=Reals,bounds=(0,1),initialize=0) m.x11 = Var(within=Reals,bounds=(0,1),initialize=0) m.x12 = Var(within=Reals,bounds=(0,1),initialize=0) m.x13 = Var(within=Reals,bounds=(0,1),initialize=0) m.x14 = Var(within=Reals,bounds=(0,1),initialize=0) m.x15 = Var(within=Reals,bounds=(0,1),initialize=0) m.x16 = Var(within=Reals,bounds=(0,1),initialize=0) m.x17 = Var(within=Reals,bounds=(0,1),initialize=0) m.x18 = Var(within=Reals,bounds=(0,1),initialize=0) m.x19 = Var(within=Reals,bounds=(0,1),initialize=0) m.x20 = Var(within=Reals,bounds=(0,1),initialize=0) m.x21 = Var(within=Reals,bounds=(0,1),initialize=0) m.x22 = Var(within=Reals,bounds=(0,1),initialize=0) m.x23 = Var(within=Reals,bounds=(0,1),initialize=0) m.x24 = Var(within=Reals,bounds=(0,1),initialize=0) m.x25 = Var(within=Reals,bounds=(0,1),initialize=0) m.x26 = Var(within=Reals,bounds=(0,1),initialize=0) m.x27 = Var(within=Reals,bounds=(0,1),initialize=0) m.x28 = Var(within=Reals,bounds=(0,1),initialize=0) m.x29 = Var(within=Reals,bounds=(0,1),initialize=0) m.x30 = Var(within=Reals,bounds=(0,1),initialize=0) m.x31 = Var(within=Reals,bounds=(0,1),initialize=0) m.x32 = Var(within=Reals,bounds=(0,1),initialize=0) m.x33 = Var(within=Reals,bounds=(0,1),initialize=0) m.x34 = Var(within=Reals,bounds=(0,1),initialize=0) m.x35 = Var(within=Reals,bounds=(0,1),initialize=0) m.x36 = Var(within=Reals,bounds=(0,1),initialize=0) m.x37 = Var(within=Reals,bounds=(0,1),initialize=0) m.x38 = Var(within=Reals,bounds=(0,1),initialize=0) m.x39 = Var(within=Reals,bounds=(0,1),initialize=0) m.x40 = Var(within=Reals,bounds=(0,1),initialize=0) m.x41 = Var(within=Reals,bounds=(0,1),initialize=0) m.x42 = Var(within=Reals,bounds=(0,1),initialize=0) m.x43 = Var(within=Reals,bounds=(0,1),initialize=0) m.x44 = Var(within=Reals,bounds=(0,1),initialize=0) m.x45 = Var(within=Reals,bounds=(0,1),initialize=0) m.x46 = Var(within=Reals,bounds=(0,1),initialize=0) m.x47 = Var(within=Reals,bounds=(0,1),initialize=0) m.x48 = Var(within=Reals,bounds=(0,1),initialize=0) m.x49 = Var(within=Reals,bounds=(0,1),initialize=0) m.x50 = Var(within=Reals,bounds=(0,1),initialize=0) m.x51 = Var(within=Reals,bounds=(0,1),initialize=0) m.x52 = Var(within=Reals,bounds=(0,1),initialize=0) m.x53 = Var(within=Reals,bounds=(0,1),initialize=0) m.x54 = Var(within=Reals,bounds=(0,1),initialize=0) m.x55 = Var(within=Reals,bounds=(0,1),initialize=0) m.x56 = Var(within=Reals,bounds=(0,1),initialize=0) m.x57 = Var(within=Reals,bounds=(0,1),initialize=0) m.x58 = Var(within=Reals,bounds=(0,1),initialize=0) m.x59 = Var(within=Reals,bounds=(0,1),initialize=0) m.x60 = Var(within=Reals,bounds=(0,1),initialize=0) m.x61 = Var(within=Reals,bounds=(0,1),initialize=0) m.x62 = Var(within=Reals,bounds=(0,1),initialize=0) m.x63 = Var(within=Reals,bounds=(0,1),initialize=0) m.x64 = Var(within=Reals,bounds=(0,1),initialize=0) m.x65 = Var(within=Reals,bounds=(0,1),initialize=0) m.x66 = Var(within=Reals,bounds=(0,1),initialize=0) m.x67 = Var(within=Reals,bounds=(0,1),initialize=0) m.x68 = Var(within=Reals,bounds=(0,1),initialize=0) m.x69 = Var(within=Reals,bounds=(0,1),initialize=0) m.x70 = Var(within=Reals,bounds=(0,1),initialize=0) m.x71 = Var(within=Reals,bounds=(0,1),initialize=0) m.x72 = Var(within=Reals,bounds=(0,1),initialize=0) m.x73 = Var(within=Reals,bounds=(0,1),initialize=0) m.x74 = Var(within=Reals,bounds=(0,1),initialize=0) m.x75 = Var(within=Reals,bounds=(0,1),initialize=0) m.x76 = Var(within=Reals,bounds=(0,1),initialize=0) m.x77 = Var(within=Reals,bounds=(0,1),initialize=0) m.x78 = Var(within=Reals,bounds=(0,1),initialize=0) m.x79 = Var(within=Reals,bounds=(0,1),initialize=0) m.x80 = Var(within=Reals,bounds=(0,1),initialize=0) m.x81 = Var(within=Reals,bounds=(0,1),initialize=0) m.x82 = Var(within=Reals,bounds=(0,1),initialize=0) m.x83 = Var(within=Reals,bounds=(0,1),initialize=0) m.x84 = Var(within=Reals,bounds=(0,1),initialize=0) m.x85 = Var(within=Reals,bounds=(0,1),initialize=0) m.x86 = Var(within=Reals,bounds=(0,1),initialize=0) m.x87 = Var(within=Reals,bounds=(0,1),initialize=0) m.x88 = Var(within=Reals,bounds=(0,1),initialize=0) m.x89 = Var(within=Reals,bounds=(0,1),initialize=0) m.x90 = Var(within=Reals,bounds=(0,1),initialize=0) m.x91 = Var(within=Reals,bounds=(0,1),initialize=0) m.x92 = Var(within=Reals,bounds=(0,1),initialize=0) m.x93 = Var(within=Reals,bounds=(0,1),initialize=0) m.x94 = Var(within=Reals,bounds=(0,1),initialize=0) m.x95 = Var(within=Reals,bounds=(0,1),initialize=0) m.x96 = Var(within=Reals,bounds=(0,1),initialize=0) m.x97 = Var(within=Reals,bounds=(0,1),initialize=0) m.x98 = Var(within=Reals,bounds=(0,1),initialize=0) m.x99 = Var(within=Reals,bounds=(0,1),initialize=0) m.x100 = Var(within=Reals,bounds=(0,1),initialize=0) m.x101 = Var(within=Reals,bounds=(0,1),initialize=0) m.x102 = Var(within=Reals,bounds=(0,1),initialize=0) m.x103 = Var(within=Reals,bounds=(0,1),initialize=0) m.x104 = Var(within=Reals,bounds=(0,1),initialize=0) m.x105 = Var(within=Reals,bounds=(0,1),initialize=0) m.x106 = Var(within=Reals,bounds=(0,1),initialize=0) m.x107 = Var(within=Reals,bounds=(0,1),initialize=0) m.x108 = Var(within=Reals,bounds=(0,1),initialize=0) m.x109 = Var(within=Reals,bounds=(0,1),initialize=0) m.x110 = Var(within=Reals,bounds=(0,1),initialize=0) m.x111 = Var(within=Reals,bounds=(0,1),initialize=0) m.x112 = Var(within=Reals,bounds=(0,1),initialize=0) m.x113 = Var(within=Reals,bounds=(0,1),initialize=0) m.x114 = Var(within=Reals,bounds=(0,1),initialize=0) m.x115 = Var(within=Reals,bounds=(0,1),initialize=0) m.x116 = Var(within=Reals,bounds=(0,1),initialize=0) m.x117 = Var(within=Reals,bounds=(0,1),initialize=0) m.x118 = Var(within=Reals,bounds=(0,1),initialize=0) m.x119 = Var(within=Reals,bounds=(0,1),initialize=0) m.x120 = Var(within=Reals,bounds=(0,1),initialize=0) m.x121 = Var(within=Reals,bounds=(0,1),initialize=0) m.x122 = Var(within=Reals,bounds=(0,1),initialize=0) m.x123 = Var(within=Reals,bounds=(0,1),initialize=0) m.x124 = Var(within=Reals,bounds=(0,1),initialize=0) m.x125 = Var(within=Reals,bounds=(0,1),initialize=0) m.x126 = Var(within=Reals,bounds=(0,1),initialize=0) m.x127 = Var(within=Reals,bounds=(0,1),initialize=0) m.x128 = Var(within=Reals,bounds=(0,1),initialize=0) m.x129 = Var(within=Reals,bounds=(0,1),initialize=0) m.x130 = Var(within=Reals,bounds=(0,1),initialize=0) m.x131 = Var(within=Reals,bounds=(0,1),initialize=0) m.x132 = Var(within=Reals,bounds=(0,1),initialize=0) m.x133 = Var(within=Reals,bounds=(0,1),initialize=0) m.x134 = Var(within=Reals,bounds=(0,1),initialize=0) m.x135 = Var(within=Reals,bounds=(0,1),initialize=0) m.x136 = Var(within=Reals,bounds=(0,1),initialize=0) m.x137 = Var(within=Reals,bounds=(0,1),initialize=0) m.x138 = Var(within=Reals,bounds=(0,1),initialize=0) m.x139 = Var(within=Reals,bounds=(0,1),initialize=0) m.x140 = Var(within=Reals,bounds=(0,1),initialize=0) m.x141 = Var(within=Reals,bounds=(0,1),initialize=0) m.x142 = Var(within=Reals,bounds=(0,1),initialize=0) m.x143 = Var(within=Reals,bounds=(0,1),initialize=0) m.x144 = Var(within=Reals,bounds=(0,1),initialize=0) m.x145 = Var(within=Reals,bounds=(0,1),initialize=0) m.x146 = Var(within=Reals,bounds=(0,1),initialize=0) m.x147 = Var(within=Reals,bounds=(0,1),initialize=0) m.x148 = Var(within=Reals,bounds=(0,1),initialize=0) m.x149 = Var(within=Reals,bounds=(0,1),initialize=0) m.x150 = Var(within=Reals,bounds=(0,1),initialize=0) m.x151 = Var(within=Reals,bounds=(0,1),initialize=0) m.x152 = Var(within=Reals,bounds=(0,1),initialize=0) m.x153 = Var(within=Reals,bounds=(0,1),initialize=0) m.x154 = Var(within=Reals,bounds=(0,1),initialize=0) m.x155 = Var(within=Reals,bounds=(0,1),initialize=0) m.x156 = Var(within=Reals,bounds=(0,1),initialize=0) m.x157 = Var(within=Reals,bounds=(0,1),initialize=0) m.x158 = Var(within=Reals,bounds=(0,1),initialize=0) m.x159 = Var(within=Reals,bounds=(0,1),initialize=0) m.x160 = Var(within=Reals,bounds=(0,1),initialize=0) m.x161 = Var(within=Reals,bounds=(0,1),initialize=0) m.x162 = Var(within=Reals,bounds=(0,1),initialize=0) m.x163 = Var(within=Reals,bounds=(0,1),initialize=0) m.x164 = Var(within=Reals,bounds=(0,1),initialize=0) m.x165 = Var(within=Reals,bounds=(0,1),initialize=0) m.x166 = Var(within=Reals,bounds=(0,1),initialize=0) m.x167 = Var(within=Reals,bounds=(0,1),initialize=0) m.x168 = Var(within=Reals,bounds=(0,1),initialize=0) m.x169 = Var(within=Reals,bounds=(0,1),initialize=0) m.x170 = Var(within=Reals,bounds=(0,1),initialize=0) m.x171 = Var(within=Reals,bounds=(0,1),initialize=0) m.x172 = Var(within=Reals,bounds=(0,1),initialize=0) m.x173 = Var(within=Reals,bounds=(0,1),initialize=0) m.x174 = Var(within=Reals,bounds=(0,1),initialize=0) m.x175 = Var(within=Reals,bounds=(0,1),initialize=0) m.x176 = Var(within=Reals,bounds=(0,1),initialize=0) m.x177 = Var(within=Reals,bounds=(0,1),initialize=0) m.x178 = Var(within=Reals,bounds=(0,1),initialize=0) m.x179 = Var(within=Reals,bounds=(0,1),initialize=0) m.x180 = Var(within=Reals,bounds=(0,1),initialize=0) m.x181 = Var(within=Reals,bounds=(0,1),initialize=0) m.x182 = Var(within=Reals,bounds=(0,1),initialize=0) m.x183 = Var(within=Reals,bounds=(0,1),initialize=0) m.x184 = Var(within=Reals,bounds=(0,1),initialize=0) m.x185 = Var(within=Reals,bounds=(0,1),initialize=0) m.x186 = Var(within=Reals,bounds=(0,1),initialize=0) m.x187 = Var(within=Reals,bounds=(0,1),initialize=0) m.x188 = Var(within=Reals,bounds=(0,1),initialize=0) m.x189 = Var(within=Reals,bounds=(0,1),initialize=0) m.x190 = Var(within=Reals,bounds=(0,1),initialize=0) m.x191 = Var(within=Reals,bounds=(0,1),initialize=0) m.x192 = Var(within=Reals,bounds=(0,1),initialize=0) m.x193 = Var(within=Reals,bounds=(0,1),initialize=0) m.x194 = Var(within=Reals,bounds=(0,1),initialize=0) m.x195 = Var(within=Reals,bounds=(0,1),initialize=0) m.x196 = Var(within=Reals,bounds=(0,1),initialize=0) m.x197 = Var(within=Reals,bounds=(0,1),initialize=0) m.x198 = Var(within=Reals,bounds=(0,1),initialize=0) m.x199 = Var(within=Reals,bounds=(0,1),initialize=0) m.x200 = Var(within=Reals,bounds=(0,1),initialize=0) m.x201 = Var(within=Reals,bounds=(0,1),initialize=0) m.x202 = Var(within=Reals,bounds=(0,1),initialize=0) m.x203 = Var(within=Reals,bounds=(0,1),initialize=0) m.x204 = Var(within=Reals,bounds=(0,1),initialize=0) m.x205 = Var(within=Reals,bounds=(0,1),initialize=0) m.x206 = Var(within=Reals,bounds=(0,1),initialize=0) m.x207 = Var(within=Reals,bounds=(0,1),initialize=0) m.x208 = Var(within=Reals,bounds=(0,1),initialize=0) m.x209 = Var(within=Reals,bounds=(0,1),initialize=0) m.x210 = Var(within=Reals,bounds=(0,1),initialize=0) m.x211 = Var(within=Reals,bounds=(0,1),initialize=0) m.x212 = Var(within=Reals,bounds=(0,1),initialize=0) m.x213 = Var(within=Reals,bounds=(0,1),initialize=0) m.x214 = Var(within=Reals,bounds=(0,1),initialize=0) m.x215 = Var(within=Reals,bounds=(0,1),initialize=0) m.x216 = Var(within=Reals,bounds=(0,1),initialize=0) m.x217 = Var(within=Reals,bounds=(0,1),initialize=0) m.x218 = Var(within=Reals,bounds=(0,1),initialize=0) m.x219 = Var(within=Reals,bounds=(0,1),initialize=0) m.x220 = Var(within=Reals,bounds=(0,1),initialize=0) m.x221 = Var(within=Reals,bounds=(0,1),initialize=0) m.x222 = Var(within=Reals,bounds=(0,1),initialize=0) m.x223 = Var(within=Reals,bounds=(0,1),initialize=0) m.x224 = Var(within=Reals,bounds=(0,1),initialize=0) m.x225 = Var(within=Reals,bounds=(0,1),initialize=0) m.x226 = Var(within=Reals,bounds=(0,1),initialize=0) m.x227 = Var(within=Reals,bounds=(0,1),initialize=0) m.x228 = Var(within=Reals,bounds=(0,1),initialize=0) m.x229 = Var(within=Reals,bounds=(0,1),initialize=0) m.x230 = Var(within=Reals,bounds=(0,1),initialize=0) m.x231 = Var(within=Reals,bounds=(0,1),initialize=0) m.x232 = Var(within=Reals,bounds=(0,1),initialize=0) m.x233 = Var(within=Reals,bounds=(0,1),initialize=0) m.x234 = Var(within=Reals,bounds=(0,1),initialize=0) m.x235 = Var(within=Reals,bounds=(0,1),initialize=0) m.x236 = Var(within=Reals,bounds=(0,1),initialize=0) m.x237 = Var(within=Reals,bounds=(0,1),initialize=0) m.x238 = Var(within=Reals,bounds=(0,1),initialize=0) m.x239 = Var(within=Reals,bounds=(0,1),initialize=0) m.x240 = Var(within=Reals,bounds=(0,1),initialize=0) m.x241 = Var(within=Reals,bounds=(0,1),initialize=0) m.x242 = Var(within=Reals,bounds=(0,1),initialize=0) m.x243 = Var(within=Reals,bounds=(0,1),initialize=0) m.x244 = Var(within=Reals,bounds=(0,1),initialize=0) m.x245 = Var(within=Reals,bounds=(0,1),initialize=0) m.x246 = Var(within=Reals,bounds=(0,1),initialize=0) m.x247 = Var(within=Reals,bounds=(0,1),initialize=0) m.x248 = Var(within=Reals,bounds=(0,1),initialize=0) m.x249 = Var(within=Reals,bounds=(0,1),initialize=0) m.x250 = Var(within=Reals,bounds=(0,1),initialize=0) m.x251 = Var(within=Reals,bounds=(0,1),initialize=0) m.x252 = Var(within=Reals,bounds=(0,1),initialize=0) m.x253 = Var(within=Reals,bounds=(0,1),initialize=0) m.x254 = Var(within=Reals,bounds=(0,1),initialize=0) m.x255 = Var(within=Reals,bounds=(0,1),initialize=0) m.x256 = Var(within=Reals,bounds=(0,1),initialize=0) m.x257 = Var(within=Reals,bounds=(0,1),initialize=0) m.x258 = Var(within=Reals,bounds=(0,1),initialize=0) m.x259 = Var(within=Reals,bounds=(0,1),initialize=0) m.x260 = Var(within=Reals,bounds=(0,1),initialize=0) m.x261 = Var(within=Reals,bounds=(0,1),initialize=0) m.x262 = Var(within=Reals,bounds=(0,1),initialize=0) m.x263 = Var(within=Reals,bounds=(0,1),initialize=0) m.x264 = Var(within=Reals,bounds=(0,1),initialize=0) m.x265 = Var(within=Reals,bounds=(0,1),initialize=0) m.x266 = Var(within=Reals,bounds=(0,1),initialize=0) m.x267 = Var(within=Reals,bounds=(0,1),initialize=0) m.x268 = Var(within=Reals,bounds=(0,1),initialize=0) m.x269 = Var(within=Reals,bounds=(0,1),initialize=0) m.x270 = Var(within=Reals,bounds=(0,1),initialize=0) m.x271 = Var(within=Reals,bounds=(0,1),initialize=0) m.x272 = Var(within=Reals,bounds=(0,1),initialize=0) m.x273 = Var(within=Reals,bounds=(0,1),initialize=0) m.x274 = Var(within=Reals,bounds=(0,1),initialize=0) m.x275 = Var(within=Reals,bounds=(0,1),initialize=0) m.x276 = Var(within=Reals,bounds=(0,1),initialize=0) m.x277 = Var(within=Reals,bounds=(0,1),initialize=0) m.x278 = Var(within=Reals,bounds=(0,1),initialize=0) m.x279 = Var(within=Reals,bounds=(0,1),initialize=0) m.x280 = Var(within=Reals,bounds=(0,1),initialize=0) m.x281 = Var(within=Reals,bounds=(0,1),initialize=0) m.x282 = Var(within=Reals,bounds=(0,1),initialize=0) m.x283 = Var(within=Reals,bounds=(0,1),initialize=0) m.x284 = Var(within=Reals,bounds=(0,1),initialize=0) m.x285 = Var(within=Reals,bounds=(0,1),initialize=0) m.x286 = Var(within=Reals,bounds=(0,1),initialize=0) m.x287 = Var(within=Reals,bounds=(0,1),initialize=0) m.x288 = Var(within=Reals,bounds=(0,1),initialize=0) m.x289 = Var(within=Reals,bounds=(0,1),initialize=0) m.x290 = Var(within=Reals,bounds=(0,1),initialize=0) m.x291 = Var(within=Reals,bounds=(0,1),initialize=0) m.x292 = Var(within=Reals,bounds=(0,1),initialize=0) m.x293 = Var(within=Reals,bounds=(0,1),initialize=0) m.x294 = Var(within=Reals,bounds=(0,1),initialize=0) m.x295 = Var(within=Reals,bounds=(0,1),initialize=0) m.x296 = Var(within=Reals,bounds=(0,1),initialize=0) m.x297 = Var(within=Reals,bounds=(0,1),initialize=0) m.x298 = Var(within=Reals,bounds=(0,1),initialize=0) m.x299 = Var(within=Reals,bounds=(0,1),initialize=0) m.x300 = Var(within=Reals,bounds=(0,1),initialize=0) m.x301 = Var(within=Reals,bounds=(0,1),initialize=0) m.x302 = Var(within=Reals,bounds=(0,1),initialize=0) m.x303 = Var(within=Reals,bounds=(0,1),initialize=0) m.x304 = Var(within=Reals,bounds=(0,1),initialize=0) m.x305 = Var(within=Reals,bounds=(0,1),initialize=0) m.x306 = Var(within=Reals,bounds=(0,1),initialize=0) m.x307 = Var(within=Reals,bounds=(0,1),initialize=0) m.x308 = Var(within=Reals,bounds=(0,1),initialize=0) m.x309 = Var(within=Reals,bounds=(0,1),initialize=0) m.x310 = Var(within=Reals,bounds=(0,1),initialize=0) m.x311 = Var(within=Reals,bounds=(0,1),initialize=0) m.x312 = Var(within=Reals,bounds=(0,1),initialize=0) m.x313 = Var(within=Reals,bounds=(0,1),initialize=0) m.x314 = Var(within=Reals,bounds=(0,1),initialize=0) m.x315 = Var(within=Reals,bounds=(0,1),initialize=0) m.x316 = Var(within=Reals,bounds=(0,1),initialize=0) m.x317 = Var(within=Reals,bounds=(0,1),initialize=0) m.x318 = Var(within=Reals,bounds=(0,1),initialize=0) m.x319 = Var(within=Reals,bounds=(0,1),initialize=0) m.x320 = Var(within=Reals,bounds=(0,1),initialize=0) m.x321 = Var(within=Reals,bounds=(0,1),initialize=0) m.x322 = Var(within=Reals,bounds=(0,1),initialize=0) m.x323 = Var(within=Reals,bounds=(0,1),initialize=0) m.x324 = Var(within=Reals,bounds=(0,1),initialize=0) m.x325 = Var(within=Reals,bounds=(0,1),initialize=0) m.x326 = Var(within=Reals,bounds=(0,1),initialize=0) m.x327 = Var(within=Reals,bounds=(0,1),initialize=0) m.x328 = Var(within=Reals,bounds=(0,1),initialize=0) m.x329 = Var(within=Reals,bounds=(0,1),initialize=0) m.x330 = Var(within=Reals,bounds=(0,1),initialize=0) m.x331 = Var(within=Reals,bounds=(0,1),initialize=0) m.x332 = Var(within=Reals,bounds=(0,1),initialize=0) m.x333 = Var(within=Reals,bounds=(0,1),initialize=0) m.x334 = Var(within=Reals,bounds=(0,1),initialize=0) m.x335 = Var(within=Reals,bounds=(0,1),initialize=0) m.x336 = Var(within=Reals,bounds=(0,1),initialize=0) m.x337 = Var(within=Reals,bounds=(0,1),initialize=0) m.x338 = Var(within=Reals,bounds=(0,1),initialize=0) m.x339 = Var(within=Reals,bounds=(0,1),initialize=0) m.x340 = Var(within=Reals,bounds=(0,1),initialize=0) m.x341 = Var(within=Reals,bounds=(0,1),initialize=0) m.x342 = Var(within=Reals,bounds=(0,1),initialize=0) m.x343 = Var(within=Reals,bounds=(0,1),initialize=0) m.x344 = Var(within=Reals,bounds=(0,1),initialize=0) m.x345 = Var(within=Reals,bounds=(0,1),initialize=0) m.x346 = Var(within=Reals,bounds=(0,1),initialize=0) m.x347 = Var(within=Reals,bounds=(0,1),initialize=0) m.x348 = Var(within=Reals,bounds=(0,1),initialize=0) m.x349 = Var(within=Reals,bounds=(0,1),initialize=0) m.x350 = Var(within=Reals,bounds=(0,1),initialize=0) m.x351 = Var(within=Reals,bounds=(0,1),initialize=0) m.x352 = Var(within=Reals,bounds=(0,1),initialize=0) m.x353 = Var(within=Reals,bounds=(0,1),initialize=0) m.x354 = Var(within=Reals,bounds=(0,1),initialize=0) m.x355 = Var(within=Reals,bounds=(0,1),initialize=0) m.x356 = Var(within=Reals,bounds=(0,1),initialize=0) m.x357 = Var(within=Reals,bounds=(0,1),initialize=0) m.x358 = Var(within=Reals,bounds=(0,1),initialize=0) m.x359 = Var(within=Reals,bounds=(0,1),initialize=0) m.x360 = Var(within=Reals,bounds=(0,1),initialize=0) m.x361 = Var(within=Reals,bounds=(0,1),initialize=0) m.x362 = Var(within=Reals,bounds=(0,1),initialize=0) m.x363 = Var(within=Reals,bounds=(0,1),initialize=0) m.x364 = Var(within=Reals,bounds=(0,1),initialize=0) m.x365 = Var(within=Reals,bounds=(0,1),initialize=0) m.x366 = Var(within=Reals,bounds=(0,1),initialize=0) m.x367 = Var(within=Reals,bounds=(0,1),initialize=0) m.x368 = Var(within=Reals,bounds=(0,1),initialize=0) m.x369 = Var(within=Reals,bounds=(0,1),initialize=0) m.x370 = Var(within=Reals,bounds=(0,1),initialize=0) m.x371 = Var(within=Reals,bounds=(0,1),initialize=0) m.x372 = Var(within=Reals,bounds=(0,1),initialize=0) m.x373 = Var(within=Reals,bounds=(0,1),initialize=0) m.x374 = Var(within=Reals,bounds=(0,1),initialize=0) m.x375 = Var(within=Reals,bounds=(0,1),initialize=0) m.x376 = Var(within=Reals,bounds=(0,1),initialize=0) m.x377 = Var(within=Reals,bounds=(0,1),initialize=0) m.x378 = Var(within=Reals,bounds=(0,1),initialize=0) m.x379 = Var(within=Reals,bounds=(0,1),initialize=0) m.x380 = Var(within=Reals,bounds=(0,1),initialize=0) m.x381 = Var(within=Reals,bounds=(0,1),initialize=0) m.x382 = Var(within=Reals,bounds=(0,1),initialize=0) m.x383 = Var(within=Reals,bounds=(0,1),initialize=0) m.x384 = Var(within=Reals,bounds=(0,1),initialize=0) m.x385 = Var(within=Reals,bounds=(0,1),initialize=0) m.x386 = Var(within=Reals,bounds=(0,1),initialize=0) m.x387 = Var(within=Reals,bounds=(0,1),initialize=0) m.x388 = Var(within=Reals,bounds=(0,1),initialize=0) m.x389 = Var(within=Reals,bounds=(0,1),initialize=0) m.x390 = Var(within=Reals,bounds=(0,1),initialize=0) m.x391 = Var(within=Reals,bounds=(0,1),initialize=0) m.x392 = Var(within=Reals,bounds=(0,1),initialize=0) m.x393 = Var(within=Reals,bounds=(0,1),initialize=0) m.x394 = Var(within=Reals,bounds=(0,1),initialize=0) m.x395 = Var(within=Reals,bounds=(0,1),initialize=0) m.x396 = Var(within=Reals,bounds=(0,1),initialize=0) m.x397 = Var(within=Reals,bounds=(0,1),initialize=0) m.x398 = Var(within=Reals,bounds=(0,1),initialize=0) m.x399 = Var(within=Reals,bounds=(0,1),initialize=0) m.x400 = Var(within=Reals,bounds=(0,1),initialize=0) m.x401 = Var(within=Reals,bounds=(0,1),initialize=0) m.x402 = Var(within=Reals,bounds=(0,1),initialize=0) m.x403 = Var(within=Reals,bounds=(0,1),initialize=0) m.x404 = Var(within=Reals,bounds=(0,1),initialize=0) m.x405 = Var(within=Reals,bounds=(0,1),initialize=0) m.x406 = Var(within=Reals,bounds=(0,1),initialize=0) m.x407 = Var(within=Reals,bounds=(0,1),initialize=0) m.x408 = Var(within=Reals,bounds=(0,1),initialize=0) m.x409 = Var(within=Reals,bounds=(0,1),initialize=0) m.x410 = Var(within=Reals,bounds=(0,1),initialize=0) m.x411 = Var(within=Reals,bounds=(0,1),initialize=0) m.x412 = Var(within=Reals,bounds=(0,1),initialize=0) m.x413 = Var(within=Reals,bounds=(0,1),initialize=0) m.x414 = Var(within=Reals,bounds=(0,1),initialize=0) m.x415 = Var(within=Reals,bounds=(0,1),initialize=0) m.x416 = Var(within=Reals,bounds=(0,1),initialize=0) m.x417 = Var(within=Reals,bounds=(0,1),initialize=0) m.x418 = Var(within=Reals,bounds=(0,1),initialize=0) m.x419 = Var(within=Reals,bounds=(0,1),initialize=0) m.x420 = Var(within=Reals,bounds=(0,1),initialize=0) m.x421 = Var(within=Reals,bounds=(0,1),initialize=0) m.x422 = Var(within=Reals,bounds=(0,1),initialize=0) m.x423 = Var(within=Reals,bounds=(0,1),initialize=0) m.x424 = Var(within=Reals,bounds=(0,1),initialize=0) m.x425 = Var(within=Reals,bounds=(0,1),initialize=0) m.x426 = Var(within=Reals,bounds=(0,1),initialize=0) m.x427 = Var(within=Reals,bounds=(0,1),initialize=0) m.x428 = Var(within=Reals,bounds=(0,1),initialize=0) m.x429 = Var(within=Reals,bounds=(0,1),initialize=0) m.x430 = Var(within=Reals,bounds=(0,1),initialize=0) m.x431 = Var(within=Reals,bounds=(0,1),initialize=0) m.x432 = Var(within=Reals,bounds=(0,1),initialize=0) m.x433 = Var(within=Reals,bounds=(0,1),initialize=0) m.x434 = Var(within=Reals,bounds=(0,1),initialize=0) m.x435 = Var(within=Reals,bounds=(0,1),initialize=0) m.x436 = Var(within=Reals,bounds=(0,1),initialize=0) m.x437 = Var(within=Reals,bounds=(0,1),initialize=0) m.x438 = Var(within=Reals,bounds=(0,1),initialize=0) m.x439 = Var(within=Reals,bounds=(0,1),initialize=0) m.x440 = Var(within=Reals,bounds=(0,1),initialize=0) m.x441 = Var(within=Reals,bounds=(0,1),initialize=0) m.x442 = Var(within=Reals,bounds=(0,1),initialize=0) m.x443 = Var(within=Reals,bounds=(0,1),initialize=0) m.x444 = Var(within=Reals,bounds=(0,1),initialize=0) m.x445 = Var(within=Reals,bounds=(0,1),initialize=0) m.x446 = Var(within=Reals,bounds=(0,1),initialize=0) m.x447 = Var(within=Reals,bounds=(0,1),initialize=0) m.x448 = Var(within=Reals,bounds=(0,1),initialize=0) m.x449 = Var(within=Reals,bounds=(0,1),initialize=0) m.x450 = Var(within=Reals,bounds=(0,1),initialize=0) m.x451 = Var(within=Reals,bounds=(0,1),initialize=0) m.x452 = Var(within=Reals,bounds=(0,1),initialize=0) m.x453 = Var(within=Reals,bounds=(0,1),initialize=0) m.x454 = Var(within=Reals,bounds=(0,1),initialize=0) m.x455 = Var(within=Reals,bounds=(0,1),initialize=0) m.x456 = Var(within=Reals,bounds=(0,1),initialize=0) m.x457 = Var(within=Reals,bounds=(0,1),initialize=0) m.x458 = Var(within=Reals,bounds=(0,1),initialize=0) m.x459 = Var(within=Reals,bounds=(0,1),initialize=0) m.x460 = Var(within=Reals,bounds=(0,1),initialize=0) m.x461 = Var(within=Reals,bounds=(0,1),initialize=0) m.x462 = Var(within=Reals,bounds=(0,1),initialize=0) m.x463 = Var(within=Reals,bounds=(0,1),initialize=0) m.x464 = Var(within=Reals,bounds=(0,1),initialize=0) m.x465 = Var(within=Reals,bounds=(0,1),initialize=0) m.x466 = Var(within=Reals,bounds=(0,1),initialize=0) m.x467 = Var(within=Reals,bounds=(0,1),initialize=0) m.x468 = Var(within=Reals,bounds=(0,1),initialize=0) m.x469 = Var(within=Reals,bounds=(0,1),initialize=0) m.x470 = Var(within=Reals,bounds=(0,1),initialize=0) m.x471 = Var(within=Reals,bounds=(0,1),initialize=0) m.x472 = Var(within=Reals,bounds=(0,1),initialize=0) m.x473 = Var(within=Reals,bounds=(0,1),initialize=0) m.x474 = Var(within=Reals,bounds=(0,1),initialize=0) m.x475 = Var(within=Reals,bounds=(0,1),initialize=0) m.x476 = Var(within=Reals,bounds=(0,1),initialize=0) m.x477 = Var(within=Reals,bounds=(0,1),initialize=0) m.x478 = Var(within=Reals,bounds=(0,1),initialize=0) m.x479 = Var(within=Reals,bounds=(0,1),initialize=0) m.x480 = Var(within=Reals,bounds=(0,1),initialize=0) m.x481 = Var(within=Reals,bounds=(0,1),initialize=0) m.x482 = Var(within=Reals,bounds=(0,1),initialize=0) m.x483 = Var(within=Reals,bounds=(0,1),initialize=0) m.x484 = Var(within=Reals,bounds=(0,1),initialize=0) m.x485 = Var(within=Reals,bounds=(0,1),initialize=0) m.x486 = Var(within=Reals,bounds=(0,1),initialize=0) m.x487 = Var(within=Reals,bounds=(0,1),initialize=0) m.x488 = Var(within=Reals,bounds=(0,1),initialize=0) m.x489 = Var(within=Reals,bounds=(0,1),initialize=0) m.x490 = Var(within=Reals,bounds=(0,1),initialize=0) m.x491 = Var(within=Reals,bounds=(0,1),initialize=0) m.x492 = Var(within=Reals,bounds=(0,1),initialize=0) m.x493 = Var(within=Reals,bounds=(0,1),initialize=0) m.x494 = Var(within=Reals,bounds=(0,1),initialize=0) m.x495 = Var(within=Reals,bounds=(0,1),initialize=0) m.x496 = Var(within=Reals,bounds=(0,1),initialize=0) m.x497 = Var(within=Reals,bounds=(0,1),initialize=0) m.x498 = Var(within=Reals,bounds=(0,1),initialize=0) m.x499 = Var(within=Reals,bounds=(0,1),initialize=0) m.x500 = Var(within=Reals,bounds=(0,1),initialize=0) m.x501 = Var(within=Reals,bounds=(0,1),initialize=0) m.x502 = Var(within=Reals,bounds=(0,1),initialize=0) m.x503 = Var(within=Reals,bounds=(0,1),initialize=0) m.x504 = Var(within=Reals,bounds=(0,1),initialize=0) m.x505 = Var(within=Reals,bounds=(0,1),initialize=0) m.x506 = Var(within=Reals,bounds=(0,1),initialize=0) m.x507 = Var(within=Reals,bounds=(0,1),initialize=0) m.x508 = Var(within=Reals,bounds=(0,1),initialize=0) m.x509 = Var(within=Reals,bounds=(0,1),initialize=0) m.x510 = Var(within=Reals,bounds=(0,1),initialize=0) m.x511 = Var(within=Reals,bounds=(0,1),initialize=0) m.x512 = Var(within=Reals,bounds=(0,1),initialize=0) m.x513 = Var(within=Reals,bounds=(0,1),initialize=0) m.x514 = Var(within=Reals,bounds=(0,1),initialize=0) m.x515 = Var(within=Reals,bounds=(0,1),initialize=0) m.x516 = Var(within=Reals,bounds=(0,1),initialize=0) m.x517 = Var(within=Reals,bounds=(0,1),initialize=0) m.x518 = Var(within=Reals,bounds=(0,1),initialize=0) m.x519 = Var(within=Reals,bounds=(0,1),initialize=0) m.x520 = Var(within=Reals,bounds=(0,1),initialize=0) m.x521 = Var(within=Reals,bounds=(0,1),initialize=0) m.x522 = Var(within=Reals,bounds=(0,1),initialize=0) m.x523 = Var(within=Reals,bounds=(0,1),initialize=0) m.x524 = Var(within=Reals,bounds=(0,1),initialize=0) m.x525 = Var(within=Reals,bounds=(0,1),initialize=0) m.x526 = Var(within=Reals,bounds=(0,1),initialize=0) m.x527 = Var(within=Reals,bounds=(0,1),initialize=0) m.x528 = Var(within=Reals,bounds=(0,1),initialize=0) m.x529 = Var(within=Reals,bounds=(0,1),initialize=0) m.x530 = Var(within=Reals,bounds=(0,1),initialize=0) m.x531 = Var(within=Reals,bounds=(0,1),initialize=0) m.x532 = Var(within=Reals,bounds=(0,1),initialize=0) m.x533 = Var(within=Reals,bounds=(0,1),initialize=0) m.x534 = Var(within=Reals,bounds=(0,1),initialize=0) m.x535 = Var(within=Reals,bounds=(0,1),initialize=0) m.x536 = Var(within=Reals,bounds=(0,1),initialize=0) m.x537 = Var(within=Reals,bounds=(0,1),initialize=0) m.x538 = Var(within=Reals,bounds=(0,1),initialize=0) m.x539 = Var(within=Reals,bounds=(0,1),initialize=0) m.x540 = Var(within=Reals,bounds=(0,1),initialize=0) m.x541 = Var(within=Reals,bounds=(0,1),initialize=0) m.x542 = Var(within=Reals,bounds=(0,1),initialize=0) m.x543 = Var(within=Reals,bounds=(0,1),initialize=0) m.x544 = Var(within=Reals,bounds=(0,1),initialize=0) m.x545 = Var(within=Reals,bounds=(0,1),initialize=0) m.x546 = Var(within=Reals,bounds=(0,1),initialize=0) m.x547 = Var(within=Reals,bounds=(0,1),initialize=0) m.x548 = Var(within=Reals,bounds=(0,1),initialize=0) m.x549 = Var(within=Reals,bounds=(0,1),initialize=0) m.x550 = Var(within=Reals,bounds=(0,1),initialize=0) m.x551 = Var(within=Reals,bounds=(0,1),initialize=0) m.x552 = Var(within=Reals,bounds=(0,1),initialize=0) m.x553 = Var(within=Reals,bounds=(0,1),initialize=0) m.x554 = Var(within=Reals,bounds=(0,1),initialize=0) m.x555 = Var(within=Reals,bounds=(0,1),initialize=0) m.x556 = Var(within=Reals,bounds=(0,1),initialize=0) m.x557 = Var(within=Reals,bounds=(0,1),initialize=0) m.x558 = Var(within=Reals,bounds=(0,1),initialize=0) m.x559 = Var(within=Reals,bounds=(0,1),initialize=0) m.x560 = Var(within=Reals,bounds=(0,1),initialize=0) m.x561 = Var(within=Reals,bounds=(0,1),initialize=0) m.x562 = Var(within=Reals,bounds=(0,1),initialize=0) m.x563 = Var(within=Reals,bounds=(0,1),initialize=0) m.x564 = Var(within=Reals,bounds=(0,1),initialize=0) m.x565 = Var(within=Reals,bounds=(0,1),initialize=0) m.x566 = Var(within=Reals,bounds=(0,1),initialize=0) m.x567 = Var(within=Reals,bounds=(0,1),initialize=0) m.x568 = Var(within=Reals,bounds=(0,1),initialize=0) m.x569 = Var(within=Reals,bounds=(0,1),initialize=0) m.x570 = Var(within=Reals,bounds=(0,1),initialize=0) m.x571 = Var(within=Reals,bounds=(0,1),initialize=0) m.x572 = Var(within=Reals,bounds=(0,1),initialize=0) m.x573 = Var(within=Reals,bounds=(0,1),initialize=0) m.x574 = Var(within=Reals,bounds=(0,1),initialize=0) m.x575 = Var(within=Reals,bounds=(0,1),initialize=0) m.x576 = Var(within=Reals,bounds=(0,1),initialize=0) m.x577 = Var(within=Reals,bounds=(0,1),initialize=0) m.x578 = Var(within=Reals,bounds=(0,1),initialize=0) m.x579 = Var(within=Reals,bounds=(0,1),initialize=0) m.x580 = Var(within=Reals,bounds=(0,1),initialize=0) m.x581 = Var(within=Reals,bounds=(0,1),initialize=0) m.x582 = Var(within=Reals,bounds=(0,1),initialize=0) m.x583 = Var(within=Reals,bounds=(0,1),initialize=0) m.x584 = Var(within=Reals,bounds=(0,1),initialize=0) m.x585 = Var(within=Reals,bounds=(0,1),initialize=0) m.x586 = Var(within=Reals,bounds=(0,1),initialize=0) m.x587 = Var(within=Reals,bounds=(0,1),initialize=0) m.x588 = Var(within=Reals,bounds=(0,1),initialize=0) m.x589 = Var(within=Reals,bounds=(0,1),initialize=0) m.x590 = Var(within=Reals,bounds=(0,1),initialize=0) m.x591 = Var(within=Reals,bounds=(0,1),initialize=0) m.x592 = Var(within=Reals,bounds=(0,1),initialize=0) m.x593 = Var(within=Reals,bounds=(0,1),initialize=0) m.x594 = Var(within=Reals,bounds=(0,1),initialize=0) m.x595 = Var(within=Reals,bounds=(0,1),initialize=0) m.x596 = Var(within=Reals,bounds=(0,1),initialize=0) m.x597 = Var(within=Reals,bounds=(0,1),initialize=0) m.x598 = Var(within=Reals,bounds=(0,1),initialize=0) m.x599 = Var(within=Reals,bounds=(0,1),initialize=0) m.x600 = Var(within=Reals,bounds=(0,1),initialize=0) m.x601 = Var(within=Reals,bounds=(0,1),initialize=0) m.x602 = Var(within=Reals,bounds=(0,1),initialize=0) m.x603 = Var(within=Reals,bounds=(0,1),initialize=0) m.x604 = Var(within=Reals,bounds=(0,1),initialize=0) m.x605 = Var(within=Reals,bounds=(0,1),initialize=0) m.x606 = Var(within=Reals,bounds=(0,1),initialize=0) m.x607 = Var(within=Reals,bounds=(0,1),initialize=0) m.x608 = Var(within=Reals,bounds=(0,1),initialize=0) m.x609 = Var(within=Reals,bounds=(0,1),initialize=0) m.x610 = Var(within=Reals,bounds=(0,1),initialize=0) m.x611 = Var(within=Reals,bounds=(0,1),initialize=0) m.x612 = Var(within=Reals,bounds=(0,1),initialize=0) m.x613 = Var(within=Reals,bounds=(0,1),initialize=0) m.x614 = Var(within=Reals,bounds=(0,1),initialize=0) m.x615 = Var(within=Reals,bounds=(0,1),initialize=0) m.x616 = Var(within=Reals,bounds=(0,1),initialize=0) m.x617 = Var(within=Reals,bounds=(0,1),initialize=0) m.x618 = Var(within=Reals,bounds=(0,1),initialize=0) m.x619 = Var(within=Reals,bounds=(0,1),initialize=0) m.x620 = Var(within=Reals,bounds=(0,1),initialize=0) m.x621 = Var(within=Reals,bounds=(0,1),initialize=0) m.x622 = Var(within=Reals,bounds=(0,1),initialize=0) m.x623 = Var(within=Reals,bounds=(0,1),initialize=0) m.x624 = Var(within=Reals,bounds=(0,1),initialize=0) m.x625 = Var(within=Reals,bounds=(0,1),initialize=0) m.x626 = Var(within=Reals,bounds=(0,1),initialize=0) m.x627 = Var(within=Reals,bounds=(0,1),initialize=0) m.x628 = Var(within=Reals,bounds=(0,1),initialize=0) m.x629 = Var(within=Reals,bounds=(0,1),initialize=0) m.x630 = Var(within=Reals,bounds=(0,1),initialize=0) m.x631 = Var(within=Reals,bounds=(0,1),initialize=0) m.x632 = Var(within=Reals,bounds=(0,1),initialize=0) m.x633 = Var(within=Reals,bounds=(0,1),initialize=0) m.x634 = Var(within=Reals,bounds=(0,1),initialize=0) m.x635 = Var(within=Reals,bounds=(0,1),initialize=0) m.x636 = Var(within=Reals,bounds=(0,1),initialize=0) m.x637 = Var(within=Reals,bounds=(0,1),initialize=0) m.x638 = Var(within=Reals,bounds=(0,1),initialize=0) m.x639 = Var(within=Reals,bounds=(0,1),initialize=0) m.x640 = Var(within=Reals,bounds=(0,1),initialize=0) m.x641 = Var(within=Reals,bounds=(0,1),initialize=0) m.x642 = Var(within=Reals,bounds=(0,16),initialize=0) m.x643 = Var(within=Reals,bounds=(0,16),initialize=0) m.x644 = Var(within=Reals,bounds=(0,16),initialize=0) m.x645 = Var(within=Reals,bounds=(0,16),initialize=0) m.x646 = Var(within=Reals,bounds=(0,16),initialize=0) m.x647 = Var(within=Reals,bounds=(0,16),initialize=0) m.x648 = Var(within=Reals,bounds=(0,16),initialize=0) m.x649 = Var(within=Reals,bounds=(0,16),initialize=0) m.x650 = Var(within=Reals,bounds=(0,16),initialize=0) m.x651 = Var(within=Reals,bounds=(0,16),initialize=0) m.x652 = Var(within=Reals,bounds=(0,16),initialize=0) m.x653 = Var(within=Reals,bounds=(0,16),initialize=0) m.x654 = Var(within=Reals,bounds=(0,16),initialize=0) m.x655 = Var(within=Reals,bounds=(0,16),initialize=0) m.x656 = Var(within=Reals,bounds=(0,16),initialize=0) m.x657 = Var(within=Reals,bounds=(0,16),initialize=0) m.x658 = Var(within=Reals,bounds=(0,16),initialize=0) m.x659 = Var(within=Reals,bounds=(0,16),initialize=0) m.x660 = Var(within=Reals,bounds=(0,16),initialize=0) m.x661 = Var(within=Reals,bounds=(0,16),initialize=0) m.x662 = Var(within=Reals,bounds=(0,16),initialize=0) m.x663 = Var(within=Reals,bounds=(0,16),initialize=0) m.x664 = Var(within=Reals,bounds=(0,16),initialize=0) m.x665 = Var(within=Reals,bounds=(0,16),initialize=0) m.x666 = Var(within=Reals,bounds=(0,16),initialize=0) m.x667 = Var(within=Reals,bounds=(0,16),initialize=0) m.x668 = Var(within=Reals,bounds=(0,16),initialize=0) m.x669 = Var(within=Reals,bounds=(0,16),initialize=0) m.x670 = Var(within=Reals,bounds=(0,16),initialize=0) m.x671 = Var(within=Reals,bounds=(0,16),initialize=0) m.x672 = Var(within=Reals,bounds=(0,16),initialize=0) m.x673 = Var(within=Reals,bounds=(0,16),initialize=0) m.obj = Objective(expr= - 10*m.x130 - 9.5*m.x131 - 9*m.x132 - 8.5*m.x133 - 8*m.x134 - 7.5*m.x135 - 7*m.x136 - 6.5*m.x137 - 6*m.x138 - 5.5*m.x139 - 5*m.x140 - 4.5*m.x141 - 4*m.x142 - 3.5*m.x143 - 3*m.x144 - 2.5*m.x145 - 9*m.x146 - 8.5*m.x147 - 8*m.x148 - 7.5*m.x149 - 7*m.x150 - 6.5*m.x151 - 6*m.x152 - 5.5*m.x153 - 5*m.x154 - 4.5*m.x155 - 4*m.x156 - 3.5*m.x157 - 3*m.x158 - 2.5*m.x159 - 2*m.x160 - 1.5*m.x161 - 9*m.x162 - 8.5*m.x163 - 8*m.x164 - 7.5*m.x165 - 7*m.x166 - 6.5*m.x167 - 6*m.x168 - 5.5*m.x169 - 5*m.x170 - 4.5*m.x171 - 4*m.x172 - 3.5*m.x173 - 3*m.x174 - 2.5*m.x175 - 2*m.x176 - 1.5*m.x177 - 8*m.x178 - 7.5*m.x179 - 7*m.x180 - 6.5*m.x181 - 6*m.x182 - 5.5*m.x183 - 5*m.x184 - 4.5*m.x185 - 4*m.x186 - 3.5*m.x187 - 3*m.x188 - 2.5*m.x189 - 2*m.x190 - 1.5*m.x191 - m.x192 - 0.5*m.x193 - 8*m.x194 - 7.5*m.x195 - 7*m.x196 - 6.5*m.x197 - 6*m.x198 - 5.5*m.x199 - 5*m.x200 - 4.5*m.x201 - 4*m.x202 - 3.5*m.x203 - 3*m.x204 - 2.5*m.x205 - 2*m.x206 - 1.5*m.x207 - m.x208 - 0.5*m.x209 - 8*m.x210 - 7.5*m.x211 - 7*m.x212 - 6.5*m.x213 - 6*m.x214 - 5.5*m.x215 - 5*m.x216 - 4.5*m.x217 - 4*m.x218 - 3.5*m.x219 - 3*m.x220 - 2.5*m.x221 - 2*m.x222 - 1.5*m.x223 - m.x224 - 0.5*m.x225 - 7*m.x226 - 6.5*m.x227 - 6*m.x228 - 5.5*m.x229 - 5*m.x230 - 4.5*m.x231 - 4*m.x232 - 3.5*m.x233 - 3*m.x234 - 2.5*m.x235 - 2*m.x236 - 1.5*m.x237 - m.x238 - 0.5*m.x239 + 0.5*m.x241 - 7*m.x242 - 6.5*m.x243 - 6*m.x244 - 5.5*m.x245 - 5*m.x246 - 4.5*m.x247 - 4*m.x248 - 3.5*m.x249 - 3*m.x250 - 2.5*m.x251 - 2*m.x252 - 1.5*m.x253 - m.x254 - 0.5*m.x255 + 0.5*m.x257 - 7*m.x258 - 6.5*m.x259 - 6*m.x260 - 5.5*m.x261 - 5*m.x262 - 4.5*m.x263 - 4*m.x264 - 3.5*m.x265 - 3*m.x266 - 2.5*m.x267 - 2*m.x268 - 1.5*m.x269 - m.x270 - 0.5*m.x271 + 0.5*m.x273 - 7*m.x274 - 6.5*m.x275 - 6*m.x276 - 5.5*m.x277 - 5*m.x278 - 4.5*m.x279 - 4*m.x280 - 3.5*m.x281 - 3*m.x282 - 2.5*m.x283 - 2*m.x284 - 1.5*m.x285 - m.x286 - 0.5*m.x287 + 0.5*m.x289 - 6*m.x290 - 5.5*m.x291 - 5*m.x292 - 4.5*m.x293 - 4*m.x294 - 3.5*m.x295 - 3*m.x296 - 2.5*m.x297 - 2*m.x298 - 1.5*m.x299 - m.x300 - 0.5*m.x301 + 0.5*m.x303 + m.x304 + 1.5*m.x305 - 6*m.x306 - 5.5*m.x307 - 5*m.x308 - 4.5*m.x309 - 4*m.x310 - 3.5*m.x311 - 3*m.x312 - 2.5*m.x313 - 2*m.x314 - 1.5*m.x315 - m.x316 - 0.5*m.x317 + 0.5*m.x319 + m.x320 + 1.5*m.x321 - 6*m.x322 - 5.5*m.x323 - 5*m.x324 - 4.5*m.x325 - 4*m.x326 - 3.5*m.x327 - 3*m.x328 - 2.5*m.x329 - 2*m.x330 - 1.5*m.x331 - m.x332 - 0.5*m.x333 + 0.5*m.x335 + m.x336 + 1.5*m.x337 - 6*m.x338 - 5.5*m.x339 - 5*m.x340 - 4.5*m.x341 - 4*m.x342 - 3.5*m.x343 - 3*m.x344 - 2.5*m.x345 - 2*m.x346 - 1.5*m.x347 - m.x348 - 0.5*m.x349 + 0.5*m.x351 + m.x352 + 1.5*m.x353 - 5*m.x354 - 4.5*m.x355 - 4*m.x356 - 3.5*m.x357 - 3*m.x358 - 2.5*m.x359 - 2*m.x360 - 1.5*m.x361 - m.x362 - 0.5*m.x363 + 0.5*m.x365 + m.x366 + 1.5*m.x367 + 2*m.x368 + 2.5*m.x369 - 5*m.x370 - 4.5*m.x371 - 4*m.x372 - 3.5*m.x373 - 3*m.x374 - 2.5*m.x375 - 2*m.x376 - 1.5*m.x377 - m.x378 - 0.5*m.x379 + 0.5*m.x381 + m.x382 + 1.5*m.x383 + 2*m.x384 + 2.5*m.x385 - 5*m.x386 - 4.5*m.x387 - 4*m.x388 - 3.5*m.x389 - 3*m.x390 - 2.5*m.x391 - 2*m.x392 - 1.5*m.x393 - m.x394 - 0.5*m.x395 + 0.5*m.x397 + m.x398 + 1.5*m.x399 + 2*m.x400 + 2.5*m.x401 - 5*m.x402 - 4.5*m.x403 - 4*m.x404 - 3.5*m.x405 - 3*m.x406 - 2.5*m.x407 - 2*m.x408 - 1.5*m.x409 - m.x410 - 0.5*m.x411 + 0.5*m.x413 + m.x414 + 1.5*m.x415 + 2*m.x416 + 2.5*m.x417 - 4*m.x418 - 3.5*m.x419 - 3*m.x420 - 2.5*m.x421 - 2*m.x422 - 1.5*m.x423 - m.x424 - 0.5*m.x425 + 0.5*m.x427 + m.x428 + 1.5*m.x429 + 2*m.x430 + 2.5*m.x431 + 3*m.x432 + 3.5*m.x433 - 4*m.x434 - 3.5*m.x435 - 3*m.x436 - 2.5*m.x437 - 2*m.x438 - 1.5*m.x439 - m.x440 - 0.5*m.x441 + 0.5*m.x443 + m.x444 + 1.5*m.x445 + 2*m.x446 + 2.5*m.x447 + 3*m.x448 + 3.5*m.x449 - 4*m.x450 - 3.5*m.x451 - 3*m.x452 - 2.5*m.x453 - 2*m.x454 - 1.5*m.x455 - m.x456 - 0.5*m.x457 + 0.5*m.x459 + m.x460 + 1.5*m.x461 + 2*m.x462 + 2.5*m.x463 + 3*m.x464 + 3.5*m.x465 - 4*m.x466 - 3.5*m.x467 - 3*m.x468 - 2.5*m.x469 - 2*m.x470 - 1.5*m.x471 - m.x472 - 0.5*m.x473 + 0.5*m.x475 + m.x476 + 1.5*m.x477 + 2*m.x478 + 2.5*m.x479 + 3*m.x480 + 3.5*m.x481 - 3*m.x482 - 2.5*m.x483 - 2*m.x484 - 1.5*m.x485 - m.x486 - 0.5*m.x487 + 0.5*m.x489 + m.x490 + 1.5*m.x491 + 2*m.x492 + 2.5*m.x493 + 3*m.x494 + 3.5*m.x495 + 4*m.x496 + 4.5*m.x497 - 3*m.x498 - 2.5*m.x499 - 2*m.x500 - 1.5*m.x501 - m.x502 - 0.5*m.x503 + 0.5*m.x505 + m.x506 + 1.5*m.x507 + 2*m.x508 + 2.5*m.x509 + 3*m.x510 + 3.5*m.x511 + 4*m.x512 + 4.5*m.x513 - 3*m.x514 - 2.5*m.x515 - 2*m.x516 - 1.5*m.x517 - m.x518 - 0.5*m.x519 + 0.5*m.x521 + m.x522 + 1.5*m.x523 + 2*m.x524 + 2.5*m.x525 + 3*m.x526 + 3.5*m.x527 + 4*m.x528 + 4.5*m.x529 - 3*m.x530 - 2.5*m.x531 - 2*m.x532 - 1.5*m.x533 - m.x534 - 0.5*m.x535 + 0.5*m.x537 + m.x538 + 1.5*m.x539 + 2*m.x540 + 2.5*m.x541 + 3*m.x542 + 3.5*m.x543 + 4*m.x544 + 4.5*m.x545 - 2*m.x546 - 1.5*m.x547 - m.x548 - 0.5*m.x549 + 0.5*m.x551 + m.x552 + 1.5*m.x553 + 2*m.x554 + 2.5*m.x555 + 3*m.x556 + 3.5*m.x557 + 4*m.x558 + 4.5*m.x559 + 5*m.x560 + 5.5*m.x561 - 2*m.x562 - 1.5*m.x563 - m.x564 - 0.5*m.x565 + 0.5*m.x567 + m.x568 + 1.5*m.x569 + 2*m.x570 + 2.5*m.x571 + 3*m.x572 + 3.5*m.x573 + 4*m.x574 + 4.5*m.x575 + 5*m.x576 + 5.5*m.x577 - 2*m.x578 - 1.5*m.x579 - m.x580 - 0.5*m.x581 + 0.5*m.x583 + m.x584 + 1.5*m.x585 + 2*m.x586 + 2.5*m.x587 + 3*m.x588 + 3.5*m.x589 + 4*m.x590 + 4.5*m.x591 + 5*m.x592 + 5.5*m.x593 - m.x594 - 0.5*m.x595 + 0.5*m.x597 + m.x598 + 1.5*m.x599 + 2*m.x600 + 2.5*m.x601 + 3*m.x602 + 3.5*m.x603 + 4*m.x604 + 4.5*m.x605 + 5*m.x606 + 5.5*m.x607 + 6*m.x608 + 6.5*m.x609 - m.x610 - 0.5*m.x611 + 0.5*m.x613 + m.x614 + 1.5*m.x615 + 2*m.x616 + 2.5*m.x617 + 3*m.x618 + 3.5*m.x619 + 4*m.x620 + 4.5*m.x621 + 5*m.x622 + 5.5*m.x623 + 6*m.x624 + 6.5*m.x625 + 0.5*m.x627 + m.x628 + 1.5*m.x629 + 2*m.x630 + 2.5*m.x631 + 3*m.x632 + 3.5*m.x633 + 4*m.x634 + 4.5*m.x635 + 5*m.x636 + 5.5*m.x637 + 6*m.x638 + 6.5*m.x639 + 7*m.x640 + 7.5*m.x641 , sense=minimize) m.c2 = Constraint(expr= m.x130 + m.x131 + m.x132 + m.x133 + m.x134 + m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140 + m.x141 + m.x142 + m.x143 + m.x144 + m.x145 <= 16) m.c3 = Constraint(expr= m.x146 + m.x147 + m.x148 + m.x149 + m.x150 + m.x151 + m.x152 + m.x153 + m.x154 + m.x155 + m.x156 + m.x157 + m.x158 + m.x159 + m.x160 + m.x161 + m.x162 + m.x163 + m.x164 + m.x165 + m.x166 + m.x167 + m.x168 + m.x169 + m.x170 + m.x171 + m.x172 + m.x173 + m.x174 + m.x175 + m.x176 + m.x177 <= 16) m.c4 = Constraint(expr= m.x178 + m.x179 + m.x180 + m.x181 + m.x182 + m.x183 + m.x184 + m.x185 + m.x186 + m.x187 + m.x188 + m.x189 + m.x190 + m.x191 + m.x192 + m.x193 + m.x194 + m.x195 + m.x196 + m.x197 + m.x198 + m.x199 + m.x200 + m.x201 + m.x202 + m.x203 + m.x204 + m.x205 + m.x206 + m.x207 + m.x208 + m.x209 + m.x210 + m.x211 + m.x212 + m.x213 + m.x214 + m.x215 + m.x216 + m.x217 + m.x218 + m.x219 + m.x220 + m.x221 + m.x222 + m.x223 + m.x224 + m.x225 <= 16) m.c5 = Constraint(expr= m.x226 + m.x227 + m.x228 + m.x229 + m.x230 + m.x231 + m.x232 + m.x233 + m.x234 + m.x235 + m.x236 + m.x237 + m.x238 + m.x239 + m.x240 + m.x241 + m.x242 + m.x243 + m.x244 + m.x245 + m.x246 + m.x247 + m.x248 + m.x249 + m.x250 + m.x251 + m.x252 + m.x253 + m.x254 + m.x255 + m.x256 + m.x257 + m.x258 + m.x259 + m.x260 + m.x261 + m.x262 + m.x263 + m.x264 + m.x265 + m.x266 + m.x267 + m.x268 + m.x269 + m.x270 + m.x271 + m.x272 + m.x273 + m.x274 + m.x275 + m.x276 + m.x277 + m.x278 + m.x279 + m.x280 + m.x281 + m.x282 + m.x283 + m.x284 + m.x285 + m.x286 + m.x287 + m.x288 + m.x289 <= 16) m.c6 = Constraint(expr= m.x290 + m.x291 + m.x292 + m.x293 + m.x294 + m.x295 + m.x296 + m.x297 + m.x298 + m.x299 + m.x300 + m.x301 + m.x302 + m.x303 + m.x304 + m.x305 + m.x306 + m.x307 + m.x308 + m.x309 + m.x310 + m.x311 + m.x312 + m.x313 + m.x314 + m.x315 + m.x316 + m.x317 + m.x318 + m.x319 + m.x320 + m.x321 + m.x322 + m.x323 + m.x324 + m.x325 + m.x326 + m.x327 + m.x328 + m.x329 + m.x330 + m.x331 + m.x332 + m.x333 + m.x334 + m.x335 + m.x336 + m.x337 + m.x338 + m.x339 + m.x340 + m.x341 + m.x342 + m.x343 + m.x344 + m.x345 + m.x346 + m.x347 + m.x348 + m.x349 + m.x350 + m.x351 + m.x352 + m.x353 <= 16) m.c7 = Constraint(expr= m.x354 + m.x355 + m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361 + m.x362 + m.x363 + m.x364 + m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x370 + m.x371 + m.x372 + m.x373 + m.x374 + m.x375 + m.x376 + m.x377 + m.x378 + m.x379 + m.x380 + m.x381 + m.x382 + m.x383 + m.x384 + m.x385 + m.x386 + m.x387 + m.x388 + m.x389 + m.x390 + m.x391 + m.x392 + m.x393 + m.x394 + m.x395 + m.x396 + m.x397 + m.x398 + m.x399 + m.x400 + m.x401 + m.x402 + m.x403 + m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x412 + m.x413 + m.x414 + m.x415 + m.x416 + m.x417 <= 16) m.c8 = Constraint(expr= m.x418 + m.x419 + m.x420 + m.x421 + m.x422 + m.x423 + m.x424 + m.x425 + m.x426 + m.x427 + m.x428 + m.x429 + m.x430 + m.x431 + m.x432 + m.x433 + m.x434 + m.x435 + m.x436 + m.x437 + m.x438 + m.x439 + m.x440 + m.x441 + m.x442 + m.x443 + m.x444 + m.x445 + m.x446 + m.x447 + m.x448 + m.x449 + m.x450 + m.x451 + m.x452 + m.x453 + m.x454 + m.x455 + m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465 + m.x466 + m.x467 + m.x468 + m.x469 + m.x470 + m.x471 + m.x472 + m.x473 + m.x474 + m.x475 + m.x476 + m.x477 + m.x478 + m.x479 + m.x480 + m.x481 <= 16) m.c9 = Constraint(expr= m.x482 + m.x483 + m.x484 + m.x485 + m.x486 + m.x487 + m.x488 + m.x489 + m.x490 + m.x491 + m.x492 + m.x493 + m.x494 + m.x495 + m.x496 + m.x497 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503 + m.x504 + m.x505 + m.x506 + m.x507 + m.x508 + m.x509 + m.x510 + m.x511 + m.x512 + m.x513 + m.x514 + m.x515 + m.x516 + m.x517 + m.x518 + m.x519 + m.x520 + m.x521 + m.x522 + m.x523 + m.x524 + m.x525 + m.x526 + m.x527 + m.x528 + m.x529 + m.x530 + m.x531 + m.x532 + m.x533 + m.x534 + m.x535 + m.x536 + m.x537 + m.x538 + m.x539 + m.x540 + m.x541 + m.x542 + m.x543 + m.x544 + m.x545 <= 16) m.c10 = Constraint(expr= m.x546 + m.x547 + m.x548 + m.x549 + m.x550 + m.x551 + m.x552 + m.x553 + m.x554 + m.x555 + m.x556 + m.x557 + m.x558 + m.x559 + m.x560 + m.x561 + m.x562 + m.x563 + m.x564 + m.x565 + m.x566 + m.x567 + m.x568 + m.x569 + m.x570 + m.x571 + m.x572 + m.x573 + m.x574 + m.x575 + m.x576 + m.x577 + m.x578 + m.x579 + m.x580 + m.x581 + m.x582 + m.x583 + m.x584 + m.x585 + m.x586 + m.x587 + m.x588 + m.x589 + m.x590 + m.x591 + m.x592 + m.x593 <= 16) m.c11 = Constraint(expr= m.x594 + m.x595 + m.x596 + m.x597 + m.x598 + m.x599 + m.x600 + m.x601 + m.x602 + m.x603 + m.x604 + m.x605 + m.x606 + m.x607 + m.x608 + m.x609 + m.x610 + m.x611 + m.x612 + m.x613 + m.x614 + m.x615 + m.x616 + m.x617 + m.x618 + m.x619 + m.x620 + m.x621 + m.x622 + m.x623 + m.x624 + m.x625 <= 16) m.c12 = Constraint(expr= m.x626 + m.x627 + m.x628 + m.x629 + m.x630 + m.x631 + m.x632 + m.x633 + m.x634 + m.x635 + m.x636 + m.x637 + m.x638 + m.x639 + m.x640 + m.x641 <= 16) m.c13 = Constraint(expr= m.x482 + m.x483 + m.x484 + m.x485 + m.x486 + m.x487 + m.x488 + m.x489 + m.x490 + m.x491 + m.x492 + m.x493 + m.x494 + m.x495 + m.x496 + m.x497 + m.x546 + m.x547 + m.x548 + m.x549 + m.x550 + m.x551 + m.x552 + m.x553 + m.x554 + m.x555 + m.x556 + m.x557 + m.x558 + m.x559 + m.x560 + m.x561 + m.x594 + m.x595 + m.x596 + m.x597 + m.x598 + m.x599 + m.x600 + m.x601 + m.x602 + m.x603 + m.x604 + m.x605 + m.x606 + m.x607 + m.x608 + m.x609 + m.x626 + m.x627 + m.x628 + m.x629 + m.x630 + m.x631 + m.x632 + m.x633 + m.x634 + m.x635 + m.x636 + m.x637 + m.x638 + m.x639 + m.x640 + m.x641 <= 16) m.c14 = Constraint(expr= m.x418 + m.x419 + m.x420 + m.x421 + m.x422 + m.x423 + m.x424 + m.x425 + m.x426 + m.x427 + m.x428 + m.x429 + m.x430 + m.x431 + m.x432 + m.x433 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503 + m.x504 + m.x505 + m.x506 + m.x507 + m.x508 + m.x509 + m.x510 + m.x511 + m.x512 + m.x513 + m.x562 + m.x563 + m.x564 + m.x565 + m.x566 + m.x567 + m.x568 + m.x569 + m.x570 + m.x571 + m.x572 + m.x573 + m.x574 + m.x575 + m.x576 + m.x577 + m.x610 + m.x611 + m.x612 + m.x613 + m.x614 + m.x615 + m.x616 + m.x617 + m.x618 + m.x619 + m.x620 + m.x621 + m.x622 + m.x623 + m.x624 + m.x625 <= 16) m.c15 = Constraint(expr= m.x354 + m.x355 + m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361 + m.x362 + m.x363 + m.x364 + m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x434 + m.x435 + m.x436 + m.x437 + m.x438 + m.x439 + m.x440 + m.x441 + m.x442 + m.x443 + m.x444 + m.x445 + m.x446 + m.x447 + m.x448 + m.x449 + m.x514 + m.x515 + m.x516 + m.x517 + m.x518 + m.x519 + m.x520 + m.x521 + m.x522 + m.x523 + m.x524 + m.x525 + m.x526 + m.x527 + m.x528 + m.x529 + m.x578 + m.x579 + m.x580 + m.x581 + m.x582 + m.x583 + m.x584 + m.x585 + m.x586 + m.x587 + m.x588 + m.x589 + m.x590 + m.x591 + m.x592 + m.x593 <= 16) m.c16 = Constraint(expr= m.x290 + m.x291 + m.x292 + m.x293 + m.x294 + m.x295 + m.x296 + m.x297 + m.x298 + m.x299 + m.x300 + m.x301 + m.x302 + m.x303 + m.x304 + m.x305 + m.x370 + m.x371 + m.x372 + m.x373 + m.x374 + m.x375 + m.x376 + m.x377 + m.x378 + m.x379 + m.x380 + m.x381 + m.x382 + m.x383 + m.x384 + m.x385 + m.x450 + m.x451 + m.x452 + m.x453 + m.x454 + m.x455 + m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465 + m.x530 + m.x531 + m.x532 + m.x533 + m.x534 + m.x535 + m.x536 + m.x537 + m.x538 + m.x539 + m.x540 + m.x541 + m.x542 + m.x543 + m.x544 + m.x545 <= 16) m.c17 = Constraint(expr= m.x226 + m.x227 + m.x228 + m.x229 + m.x230 + m.x231 + m.x232 + m.x233 + m.x234 + m.x235 + m.x236 + m.x237 + m.x238 + m.x239 + m.x240 + m.x241 + m.x306 + m.x307 + m.x308 + m.x309 + m.x310 + m.x311 + m.x312 + m.x313 + m.x314 + m.x315 + m.x316 + m.x317 + m.x318 + m.x319 + m.x320 + m.x321 + m.x386 + m.x387 + m.x388 + m.x389 + m.x390 + m.x391 + m.x392 + m.x393 + m.x394 + m.x395 + m.x396 + m.x397 + m.x398 + m.x399 + m.x400 + m.x401 + m.x466 + m.x467 + m.x468 + m.x469 + m.x470 + m.x471 + m.x472 + m.x473 + m.x474 + m.x475 + m.x476 + m.x477 + m.x478 + m.x479 + m.x480 + m.x481 <= 16) m.c18 = Constraint(expr= m.x178 + m.x179 + m.x180 + m.x181 + m.x182 + m.x183 + m.x184 + m.x185 + m.x186 + m.x187 + m.x188 + m.x189 + m.x190 + m.x191 + m.x192 + m.x193 + m.x242 + m.x243 + m.x244 + m.x245 + m.x246 + m.x247 + m.x248 + m.x249 + m.x250 + m.x251 + m.x252 + m.x253 + m.x254 + m.x255 + m.x256 + m.x257 + m.x322 + m.x323 + m.x324 + m.x325 + m.x326 + m.x327 + m.x328 + m.x329 + m.x330 + m.x331 + m.x332 + m.x333 + m.x334 + m.x335 + m.x336 + m.x337 + m.x402 + m.x403 + m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x412 + m.x413 + m.x414 + m.x415 + m.x416 + m.x417 <= 16) m.c19 = Constraint(expr= m.x146 + m.x147 + m.x148 + m.x149 + m.x150 + m.x151 + m.x152 + m.x153 + m.x154 + m.x155 + m.x156 + m.x157 + m.x158 + m.x159 + m.x160 + m.x161 + m.x194 + m.x195 + m.x196 + m.x197 + m.x198 + m.x199 + m.x200 + m.x201 + m.x202 + m.x203 + m.x204 + m.x205 + m.x206 + m.x207 + m.x208 + m.x209 + m.x258 + m.x259 + m.x260 + m.x261 + m.x262 + m.x263 + m.x264 + m.x265 + m.x266 + m.x267 + m.x268 + m.x269 + m.x270 + m.x271 + m.x272 + m.x273 + m.x338 + m.x339 + m.x340 + m.x341 + m.x342 + m.x343 + m.x344 + m.x345 + m.x346 + m.x347 + m.x348 + m.x349 + m.x350 + m.x351 + m.x352 + m.x353 <= 16) m.c20 = Constraint(expr= m.x130 + m.x131 + m.x132 + m.x133 + m.x134 + m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140 + m.x141 + m.x142 + m.x143 + m.x144 + m.x145 + m.x162 + m.x163 + m.x164 + m.x165 + m.x166 + m.x167 + m.x168 + m.x169 + m.x170 + m.x171 + m.x172 + m.x173 + m.x174 + m.x175 + m.x176 + m.x177 + m.x210 + m.x211 + m.x212 + m.x213 + m.x214 + m.x215 + m.x216 + m.x217 + m.x218 + m.x219 + m.x220 + m.x221 + m.x222 + m.x223 + m.x224 + m.x225 + m.x274 + m.x275 + m.x276 + m.x277 + m.x278 + m.x279 + m.x280 + m.x281 + m.x282 + m.x283 + m.x284 + m.x285 + m.x286 + m.x287 + m.x288 + m.x289 <= 16) m.c21 = Constraint(expr= m.x130 + m.x146 + m.x162 + m.x178 + m.x194 + m.x210 + m.x226 + m.x242 + m.x258 + m.x274 + m.x290 + m.x306 + m.x322 + m.x338 + m.x354 + m.x370 + m.x386 + m.x402 + m.x418 + m.x434 + m.x450 + m.x466 + m.x482 + m.x498 + m.x514 + m.x530 + m.x546 + m.x562 + m.x578 + m.x594 + m.x610 + m.x626 <= 1) m.c22 = Constraint(expr= m.x131 + m.x147 + m.x163 + m.x179 + m.x195 + m.x211 + m.x227 + m.x243 + m.x259 + m.x275 + m.x291 + m.x307 + m.x323 + m.x339 + m.x355 + m.x371 + m.x387 + m.x403 + m.x419 + m.x435 + m.x451 + m.x467 + m.x483 + m.x499 + m.x515 + m.x531 + m.x547 + m.x563 + m.x579 + m.x595 + m.x611 + m.x627 <= 1) m.c23 = Constraint(expr= m.x132 + m.x148 + m.x164 + m.x180 + m.x196 + m.x212 + m.x228 + m.x244 + m.x260 + m.x276 + m.x292 + m.x308 + m.x324 + m.x340 + m.x356 + m.x372 + m.x388 + m.x404 + m.x420 + m.x436 + m.x452 + m.x468 + m.x484 + m.x500 + m.x516 + m.x532 + m.x548 + m.x564 + m.x580 + m.x596 + m.x612 + m.x628 <= 1) m.c24 = Constraint(expr= m.x133 + m.x149 + m.x165 + m.x181 + m.x197 + m.x213 + m.x229 + m.x245 + m.x261 + m.x277 + m.x293 + m.x309 + m.x325 + m.x341 + m.x357 + m.x373 + m.x389 + m.x405 + m.x421 + m.x437 + m.x453 + m.x469 + m.x485 + m.x501 + m.x517 + m.x533 + m.x549 + m.x565 + m.x581 + m.x597 + m.x613 + m.x629 <= 1) m.c25 = Constraint(expr= m.x134 + m.x150 + m.x166 + m.x182 + m.x198 + m.x214 + m.x230 + m.x246 + m.x262 + m.x278 + m.x294 + m.x310 + m.x326 + m.x342 + m.x358 + m.x374 + m.x390 + m.x406 + m.x422 + m.x438 + m.x454 + m.x470 + m.x486 + m.x502 + m.x518 + m.x534 + m.x550 + m.x566 + m.x582 + m.x598 + m.x614 + m.x630 <= 1) m.c26 = Constraint(expr= m.x135 + m.x151 + m.x167 + m.x183 + m.x199 + m.x215 + m.x231 + m.x247 + m.x263 + m.x279 + m.x295 + m.x311 + m.x327 + m.x343 + m.x359 + m.x375 + m.x391 + m.x407 + m.x423 + m.x439 + m.x455 + m.x471 + m.x487 + m.x503 + m.x519 + m.x535 + m.x551 + m.x567 + m.x583 + m.x599 + m.x615 + m.x631 <= 1) m.c27 = Constraint(expr= m.x136 + m.x152 + m.x168 + m.x184 + m.x200 + m.x216 + m.x232 + m.x248 + m.x264 + m.x280 + m.x296 + m.x312 + m.x328 + m.x344 + m.x360 + m.x376 + m.x392 + m.x408 + m.x424 + m.x440 + m.x456 + m.x472 + m.x488 + m.x504 + m.x520 + m.x536 + m.x552 + m.x568 + m.x584 + m.x600 + m.x616 + m.x632 <= 1) m.c28 = Constraint(expr= m.x137 + m.x153 + m.x169 + m.x185 + m.x201 + m.x217 + m.x233 + m.x249 + m.x265 + m.x281 + m.x297 + m.x313 + m.x329 + m.x345 + m.x361 + m.x377 + m.x393 + m.x409 + m.x425 + m.x441 + m.x457 + m.x473 + m.x489 + m.x505 + m.x521 + m.x537 + m.x553 + m.x569 + m.x585 + m.x601 + m.x617 + m.x633 <= 1) m.c29 = Constraint(expr= m.x138 + m.x154 + m.x170 + m.x186 + m.x202 + m.x218 + m.x234 + m.x250 + m.x266 + m.x282 + m.x298 + m.x314 + m.x330 + m.x346 + m.x362 + m.x378 + m.x394 + m.x410 + m.x426 + m.x442 + m.x458 + m.x474 + m.x490 + m.x506 + m.x522 + m.x538 + m.x554 + m.x570 + m.x586 + m.x602 + m.x618 + m.x634 <= 1) m.c30 = Constraint(expr= m.x139 + m.x155 + m.x171 + m.x187 + m.x203 + m.x219 + m.x235 + m.x251 + m.x267 + m.x283 + m.x299 + m.x315 + m.x331 + m.x347 + m.x363 + m.x379 + m.x395 + m.x411 + m.x427 + m.x443 + m.x459 + m.x475 + m.x491 + m.x507 + m.x523 + m.x539 + m.x555 + m.x571 + m.x587 + m.x603 + m.x619 + m.x635 <= 1) m.c31 = Constraint(expr= m.x140 + m.x156 + m.x172 + m.x188 + m.x204 + m.x220 + m.x236 + m.x252 + m.x268 + m.x284 + m.x300 + m.x316 + m.x332 + m.x348 + m.x364 + m.x380 + m.x396 + m.x412 + m.x428 + m.x444 + m.x460 + m.x476 + m.x492 + m.x508 + m.x524 + m.x540 + m.x556 + m.x572 + m.x588 + m.x604 + m.x620 + m.x636 <= 1) m.c32 = Constraint(expr= m.x141 + m.x157 + m.x173 + m.x189 + m.x205 + m.x221 + m.x237 + m.x253 + m.x269 + m.x285 + m.x301 + m.x317 + m.x333 + m.x349 + m.x365 + m.x381 + m.x397 + m.x413 + m.x429 + m.x445 + m.x461 + m.x477 + m.x493 + m.x509 + m.x525 + m.x541 + m.x557 + m.x573 + m.x589 + m.x605 + m.x621 + m.x637 <= 1) m.c33 = Constraint(expr= m.x142 + m.x158 + m.x174 + m.x190 + m.x206 + m.x222 + m.x238 + m.x254 + m.x270 + m.x286 + m.x302 + m.x318 + m.x334 + m.x350 + m.x366 + m.x382 + m.x398 + m.x414 + m.x430 + m.x446 + m.x462 + m.x478 + m.x494 + m.x510 + m.x526 + m.x542 + m.x558 + m.x574 + m.x590 + m.x606 + m.x622 + m.x638 <= 1) m.c34 = Constraint(expr= m.x143 + m.x159 + m.x175 + m.x191 + m.x207 + m.x223 + m.x239 + m.x255 + m.x271 + m.x287 + m.x303 + m.x319 + m.x335 + m.x351 + m.x367 + m.x383 + m.x399 + m.x415 + m.x431 + m.x447 + m.x463 + m.x479 + m.x495 + m.x511 + m.x527 + m.x543 + m.x559 + m.x575 + m.x591 + m.x607 + m.x623 + m.x639 <= 1) m.c35 = Constraint(expr= m.x144 + m.x160 + m.x176 + m.x192 + m.x208 + m.x224 + m.x240 + m.x256 + m.x272 + m.x288 + m.x304 + m.x320 + m.x336 + m.x352 + m.x368 + m.x384 + m.x400 + m.x416 + m.x432 + m.x448 + m.x464 + m.x480 + m.x496 + m.x512 + m.x528 + m.x544 + m.x560 + m.x576 + m.x592 + m.x608 + m.x624 + m.x640 <= 1) m.c36 = Constraint(expr= m.x145 + m.x161 + m.x177 + m.x193 + m.x209 + m.x225 + m.x241 + m.x257 + m.x273 + m.x289 + m.x305 + m.x321 + m.x337 + m.x353 + m.x369 + m.x385 + m.x401 + m.x417 + m.x433 + m.x449 + m.x465 + m.x481 + m.x497 + m.x513 + m.x529 + m.x545 + m.x561 + m.x577 + m.x593 + m.x609 + m.x625 + m.x641 <= 1) m.c37 = Constraint(expr= 0.95*m.x130 + 0.85*m.x146 + 0.85*m.x162 + 0.75*m.x178 + 0.75*m.x194 + 0.75*m.x210 + 0.65*m.x226 + 0.65*m.x242 + 0.65*m.x258 + 0.65*m.x274 + 0.55*m.x290 + 0.55*m.x306 + 0.55*m.x322 + 0.55*m.x338 + 0.45*m.x354 + 0.45*m.x370 + 0.45*m.x386 + 0.45*m.x402 + 0.35*m.x418 + 0.35*m.x434 + 0.35*m.x450 + 0.35*m.x466 + 0.25*m.x482 + 0.25*m.x498 + 0.25*m.x514 + 0.25*m.x530 + 0.15*m.x546 + 0.15*m.x562 + 0.15*m.x578 + 0.05*m.x594 + 0.05*m.x610 - 0.05*m.x626 <= 0) m.c38 = Constraint(expr= 0.9*m.x131 + 0.8*m.x147 + 0.8*m.x163 + 0.7*m.x179 + 0.7*m.x195 + 0.7*m.x211 + 0.6*m.x227 + 0.6*m.x243 + 0.6*m.x259 + 0.6*m.x275 + 0.5*m.x291 + 0.5*m.x307 + 0.5*m.x323 + 0.5*m.x339 + 0.4*m.x355 + 0.4*m.x371 + 0.4*m.x387 + 0.4*m.x403 + 0.3*m.x419 + 0.3*m.x435 + 0.3*m.x451 + 0.3*m.x467 + 0.2*m.x483 + 0.2*m.x499 + 0.2*m.x515 + 0.2*m.x531 + 0.0999999999999999*m.x547 + 0.0999999999999999*m.x563 + 0.0999999999999999*m.x579 - 0.1*m.x627 <= 0) m.c39 = Constraint(expr= 0.85*m.x132 + 0.75*m.x148 + 0.75*m.x164 + 0.65*m.x180 + 0.65*m.x196 + 0.65*m.x212 + 0.55*m.x228 + 0.55*m.x244 + 0.55*m.x260 + 0.55*m.x276 + 0.45*m.x292 + 0.45*m.x308 + 0.45*m.x324 + 0.45*m.x340 + 0.35*m.x356 + 0.35*m.x372 + 0.35*m.x388 + 0.35*m.x404 + 0.25*m.x420 + 0.25*m.x436 + 0.25*m.x452 + 0.25*m.x468 + 0.15*m.x484 + 0.15*m.x500 + 0.15*m.x516 + 0.15*m.x532 + 0.05*m.x548 + 0.05*m.x564 + 0.05*m.x580 - 0.0499999999999998*m.x596 - 0.0499999999999998*m.x612 - 0.15*m.x628 <= 0) m.c40 = Constraint(expr= 0.8*m.x133 + 0.7*m.x149 + 0.7*m.x165 + 0.6*m.x181 + 0.6*m.x197 + 0.6*m.x213 + 0.5*m.x229 + 0.5*m.x245 + 0.5*m.x261 + 0.5*m.x277 + 0.4*m.x293 + 0.4*m.x309 + 0.4*m.x325 + 0.4*m.x341 + 0.3*m.x357 + 0.3*m.x373 + 0.3*m.x389 + 0.3*m.x405 + 0.2*m.x421 + 0.2*m.x437 + 0.2*m.x453 + 0.2*m.x469 + 0.1*m.x485 + 0.1*m.x501 + 0.1*m.x517 + 0.1*m.x533 - 0.0999999999999999*m.x597 - 0.0999999999999999*m.x613 - 0.2*m.x629 <= 0) m.c41 = Constraint(expr= 0.75*m.x134 + 0.65*m.x150 + 0.65*m.x166 + 0.55*m.x182 + 0.55*m.x198 + 0.55*m.x214 + 0.45*m.x230 + 0.45*m.x246 + 0.45*m.x262 + 0.45*m.x278 + 0.35*m.x294 + 0.35*m.x310 + 0.35*m.x326 + 0.35*m.x342 + 0.25*m.x358 + 0.25*m.x374 + 0.25*m.x390 + 0.25*m.x406 + 0.15*m.x422 + 0.15*m.x438 + 0.15*m.x454 + 0.15*m.x470 + 0.05*m.x486 + 0.05*m.x502 + 0.05*m.x518 + 0.05*m.x534 - 0.05*m.x550 - 0.05*m.x566 - 0.05*m.x582 - 0.15*m.x598 - 0.15*m.x614 - 0.25*m.x630 <= 0) m.c42 = Constraint(expr= 0.7*m.x135 + 0.6*m.x151 + 0.6*m.x167 + 0.5*m.x183 + 0.5*m.x199 + 0.5*m.x215 + 0.4*m.x231 + 0.4*m.x247 + 0.4*m.x263 + 0.4*m.x279 + 0.3*m.x295 + 0.3*m.x311 + 0.3*m.x327 + 0.3*m.x343 + 0.2*m.x359 + 0.2*m.x375 + 0.2*m.x391 + 0.2*m.x407 + 0.0999999999999999*m.x423 + 0.0999999999999999*m.x439 + 0.0999999999999999*m.x455 + 0.0999999999999999*m.x471 - 0.1*m.x551 - 0.1*m.x567 - 0.1*m.x583 - 0.2*m.x599 - 0.2*m.x615 - 0.3*m.x631 <= 0) m.c43 = Constraint(expr= 0.65*m.x136 + 0.55*m.x152 + 0.55*m.x168 + 0.45*m.x184 + 0.45*m.x200 + 0.45*m.x216 + 0.35*m.x232 + 0.35*m.x248 + 0.35*m.x264 + 0.35*m.x280 + 0.25*m.x296 + 0.25*m.x312 + 0.25*m.x328 + 0.25*m.x344 + 0.15*m.x360 + 0.15*m.x376 + 0.15*m.x392 + 0.15*m.x408 + 0.0499999999999998*m.x424 + 0.0499999999999998*m.x440 + 0.0499999999999998*m.x456 + 0.0499999999999998*m.x472 - 0.05*m.x488 - 0.05*m.x504 - 0.05*m.x520 - 0.05*m.x536 - 0.15*m.x552 - 0.15*m.x568 - 0.15*m.x584 - 0.25*m.x600 - 0.25*m.x616 - 0.35*m.x632 <= 0) m.c44 = Constraint(expr= 0.6*m.x137 + 0.5*m.x153 + 0.5*m.x169 + 0.4*m.x185 + 0.4*m.x201 + 0.4*m.x217 + 0.3*m.x233 + 0.3*m.x249 + 0.3*m.x265 + 0.3*m.x281 + 0.2*m.x297 + 0.2*m.x313 + 0.2*m.x329 + 0.2*m.x345 + 0.1*m.x361 + 0.1*m.x377 + 0.1*m.x393 + 0.1*m.x409 - 0.0999999999999999*m.x489 - 0.0999999999999999*m.x505 - 0.0999999999999999*m.x521 - 0.0999999999999999*m.x537 - 0.2*m.x553 - 0.2*m.x569 - 0.2*m.x585 - 0.3*m.x601 - 0.3*m.x617 - 0.4*m.x633 <= 0) m.c45 = Constraint(expr= 0.55*m.x138 + 0.45*m.x154 + 0.45*m.x170 + 0.35*m.x186 + 0.35*m.x202 + 0.35*m.x218 + 0.25*m.x234 + 0.25*m.x250 + 0.25*m.x266 + 0.25*m.x282 + 0.15*m.x298 + 0.15*m.x314 + 0.15*m.x330 + 0.15*m.x346 + 0.05*m.x362 + 0.05*m.x378 + 0.05*m.x394 + 0.05*m.x410 - 0.05*m.x426 - 0.05*m.x442 - 0.05*m.x458 - 0.05*m.x474 - 0.15*m.x490 - 0.15*m.x506 - 0.15*m.x522 - 0.15*m.x538 - 0.25*m.x554 - 0.25*m.x570 - 0.25*m.x586 - 0.35*m.x602 - 0.35*m.x618 - 0.45*m.x634 <= 0) m.c46 = Constraint(expr= 0.5*m.x139 + 0.4*m.x155 + 0.4*m.x171 + 0.3*m.x187 + 0.3*m.x203 + 0.3*m.x219 + 0.2*m.x235 + 0.2*m.x251 + 0.2*m.x267 + 0.2*m.x283 + 0.1*m.x299 + 0.1*m.x315 + 0.1*m.x331 + 0.1*m.x347 - 0.1*m.x427 - 0.1*m.x443 - 0.1*m.x459 - 0.1*m.x475 - 0.2*m.x491 - 0.2*m.x507 - 0.2*m.x523 - 0.2*m.x539 - 0.3*m.x555 - 0.3*m.x571 - 0.3*m.x587 - 0.4*m.x603 - 0.4*m.x619 - 0.5*m.x635 <= 0) m.c47 = Constraint(expr= 0.45*m.x140 + 0.35*m.x156 + 0.35*m.x172 + 0.25*m.x188 + 0.25*m.x204 + 0.25*m.x220 + 0.15*m.x236 + 0.15*m.x252 + 0.15*m.x268 + 0.15*m.x284 + 0.05*m.x300 + 0.05*m.x316 + 0.05*m.x332 + 0.05*m.x348 - 0.05*m.x364 - 0.05*m.x380 - 0.05*m.x396 - 0.05*m.x412 - 0.15*m.x428 - 0.15*m.x444 - 0.15*m.x460 - 0.15*m.x476 - 0.25*m.x492 - 0.25*m.x508 - 0.25*m.x524 - 0.25*m.x540 - 0.35*m.x556 - 0.35*m.x572 - 0.35*m.x588 - 0.45*m.x604 - 0.45*m.x620 - 0.55*m.x636 <= 0) m.c48 = Constraint(expr= 0.4*m.x141 + 0.3*m.x157 + 0.3*m.x173 + 0.2*m.x189 + 0.2*m.x205 + 0.2*m.x221 + 0.0999999999999999*m.x237 + 0.0999999999999999*m.x253 + 0.0999999999999999*m.x269 + 0.0999999999999999*m.x285 - 0.1*m.x365 - 0.1*m.x381 - 0.1*m.x397 - 0.1*m.x413 - 0.2*m.x429 - 0.2*m.x445 - 0.2*m.x461 - 0.2*m.x477 - 0.3*m.x493 - 0.3*m.x509 - 0.3*m.x525 - 0.3*m.x541 - 0.4*m.x557 - 0.4*m.x573 - 0.4*m.x589 - 0.5*m.x605 - 0.5*m.x621 - 0.6*m.x637 <= 0) m.c49 = Constraint(expr= 0.35*m.x142 + 0.25*m.x158 + 0.25*m.x174 + 0.15*m.x190 + 0.15*m.x206 + 0.15*m.x222 + 0.05*m.x238 + 0.05*m.x254 + 0.05*m.x270 + 0.05*m.x286 - 0.0499999999999998*m.x302 - 0.0499999999999998*m.x318 - 0.0499999999999998*m.x334 - 0.0499999999999998*m.x350 - 0.15*m.x366 - 0.15*m.x382 - 0.15*m.x398 - 0.15*m.x414 - 0.25*m.x430 - 0.25*m.x446 - 0.25*m.x462 - 0.25*m.x478 - 0.35*m.x494 - 0.35*m.x510 - 0.35*m.x526 - 0.35*m.x542 - 0.45*m.x558 - 0.45*m.x574 - 0.45*m.x590 - 0.55*m.x606 - 0.55*m.x622 - 0.65*m.x638 <= 0) m.c50 = Constraint(expr= 0.3*m.x143 + 0.2*m.x159 + 0.2*m.x175 + 0.1*m.x191 + 0.1*m.x207 + 0.1*m.x223 - 0.0999999999999999*m.x303 - 0.0999999999999999*m.x319 - 0.0999999999999999*m.x335 - 0.0999999999999999*m.x351 - 0.2*m.x367 - 0.2*m.x383 - 0.2*m.x399 - 0.2*m.x415 - 0.3*m.x431 - 0.3*m.x447 - 0.3*m.x463 - 0.3*m.x479 - 0.4*m.x495 - 0.4*m.x511 - 0.4*m.x527 - 0.4*m.x543 - 0.5*m.x559 - 0.5*m.x575 - 0.5*m.x591 - 0.6*m.x607 - 0.6*m.x623 - 0.7*m.x639 <= 0) m.c51 = Constraint(expr= 0.25*m.x144 + 0.15*m.x160 + 0.15*m.x176 + 0.05*m.x192 + 0.05*m.x208 + 0.05*m.x224 - 0.05*m.x240 - 0.05*m.x256 - 0.05*m.x272 - 0.05*m.x288 - 0.15*m.x304 - 0.15*m.x320 - 0.15*m.x336 - 0.15*m.x352 - 0.25*m.x368 - 0.25*m.x384 - 0.25*m.x400 - 0.25*m.x416 - 0.35*m.x432 - 0.35*m.x448 - 0.35*m.x464 - 0.35*m.x480 - 0.45*m.x496 - 0.45*m.x512 - 0.45*m.x528 - 0.45*m.x544 - 0.55*m.x560 - 0.55*m.x576 - 0.55*m.x592 - 0.65*m.x608 - 0.65*m.x624 - 0.75*m.x640 <= 0) m.c52 = Constraint(expr= 0.2*m.x145 + 0.0999999999999999*m.x161 + 0.0999999999999999*m.x177 - 0.1*m.x241 - 0.1*m.x257 - 0.1*m.x273 - 0.1*m.x289 - 0.2*m.x305 - 0.2*m.x321 - 0.2*m.x337 - 0.2*m.x353 - 0.3*m.x369 - 0.3*m.x385 - 0.3*m.x401 - 0.3*m.x417 - 0.4*m.x433 - 0.4*m.x449 - 0.4*m.x465 - 0.4*m.x481 - 0.5*m.x497 - 0.5*m.x513 - 0.5*m.x529 - 0.5*m.x545 - 0.6*m.x561 - 0.6*m.x577 - 0.6*m.x593 - 0.7*m.x609 - 0.7*m.x625 - 0.8*m.x641 <= 0) m.c53 = Constraint(expr= m.x2 + m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 + m.x12 + m.x13 + m.x14 + m.x15 + m.x16 + m.x17 == 1) m.c54 = Constraint(expr= m.x18 + m.x19 + m.x20 + m.x21 + m.x22 + m.x23 + m.x24 + m.x25 + m.x26 + m.x27 + m.x28 + m.x29 + m.x30 + m.x31 + m.x32 + m.x33 == 1) m.c55 = Constraint(expr= m.x34 + m.x35 + m.x36 + m.x37 + m.x38 + m.x39 + m.x40 + m.x41 + m.x42 + m.x43 + m.x44 + m.x45 + m.x46 + m.x47 + m.x48 + m.x49 == 1) m.c56 = Constraint(expr= m.x50 + m.x51 + m.x52 + m.x53 + m.x54 + m.x55 + m.x56 + m.x57 + m.x58 + m.x59 + m.x60 + m.x61 + m.x62 + m.x63 + m.x64 + m.x65 == 1) m.c57 = Constraint(expr= m.x66 + m.x67 + m.x68 + m.x69 + m.x70 + m.x71 + m.x72 + m.x73 + m.x74 + m.x75 + m.x76 + m.x77 + m.x78 + m.x79 + m.x80 + m.x81 == 1) m.c58 = Constraint(expr= m.x82 + m.x83 + m.x84 + m.x85 + m.x86 + m.x87 + m.x88 + m.x89 + m.x90 + m.x91 + m.x92 + m.x93 + m.x94 + m.x95 + m.x96 + m.x97 == 1) m.c59 = Constraint(expr= m.x98 + m.x99 + m.x100 + m.x101 + m.x102 + m.x103 + m.x104 + m.x105 + m.x106 + m.x107 + m.x108 + m.x109 + m.x110 + m.x111 + m.x112 + m.x113 == 1) m.c60 = Constraint(expr= m.x114 + m.x115 + m.x116 + m.x117 + m.x118 + m.x119 + m.x120 + m.x121 + m.x122 + m.x123 + m.x124 + m.x125 + m.x126 + m.x127 + m.x128 + m.x129 == 1) m.c61 = Constraint(expr=-m.x114*m.x642 + m.x130 == 0) m.c62 = Constraint(expr=-m.x115*m.x642 + m.x131 == 0) m.c63 = Constraint(expr=-m.x116*m.x642 + m.x132 == 0) m.c64 = Constraint(expr=-m.x117*m.x642 + m.x133 == 0) m.c65 = Constraint(expr=-m.x118*m.x642 + m.x134 == 0) m.c66 = Constraint(expr=-m.x119*m.x642 + m.x135 == 0) m.c67 = Constraint(expr=-m.x120*m.x642 + m.x136 == 0) m.c68 = Constraint(expr=-m.x121*m.x642 + m.x137 == 0) m.c69 = Constraint(expr=-m.x122*m.x642 + m.x138 == 0) m.c70 = Constraint(expr=-m.x123*m.x642 + m.x139 == 0) m.c71 = Constraint(expr=-m.x124*m.x642 + m.x140 == 0) m.c72 = Constraint(expr=-m.x125*m.x642 + m.x141 == 0) m.c73 = Constraint(expr=-m.x126*m.x642 + m.x142 == 0) m.c74 = Constraint(expr=-m.x127*m.x642 + m.x143 == 0) m.c75 = Constraint(expr=-m.x128*m.x642 + m.x144 == 0) m.c76 = Constraint(expr=-m.x129*m.x642 + m.x145 == 0) m.c77 = Constraint(expr=-m.x98*m.x643 + m.x146 == 0) m.c78 = Constraint(expr=-m.x99*m.x643 + m.x147 == 0) m.c79 = Constraint(expr=-m.x100*m.x643 + m.x148 == 0) m.c80 = Constraint(expr=-m.x101*m.x643 + m.x149 == 0) m.c81 = Constraint(expr=-m.x102*m.x643 + m.x150 == 0) m.c82 = Constraint(expr=-m.x103*m.x643 + m.x151 == 0) m.c83 = Constraint(expr=-m.x104*m.x643 + m.x152 == 0) m.c84 = Constraint(expr=-m.x105*m.x643 + m.x153 == 0) m.c85 = Constraint(expr=-m.x106*m.x643 + m.x154 == 0) m.c86 = Constraint(expr=-m.x107*m.x643 + m.x155 == 0) m.c87 = Constraint(expr=-m.x108*m.x643 + m.x156 == 0) m.c88 = Constraint(expr=-m.x109*m.x643 + m.x157 == 0) m.c89 = Constraint(expr=-m.x110*m.x643 + m.x158 == 0) m.c90 = Constraint(expr=-m.x111*m.x643 + m.x159 == 0) m.c91 = Constraint(expr=-m.x112*m.x643 + m.x160 == 0) m.c92 = Constraint(expr=-m.x113*m.x643 + m.x161 == 0) m.c93 = Constraint(expr=-m.x114*m.x644 + m.x162 == 0) m.c94 = Constraint(expr=-m.x115*m.x644 + m.x163 == 0) m.c95 = Constraint(expr=-m.x116*m.x644 + m.x164 == 0) m.c96 = Constraint(expr=-m.x117*m.x644 + m.x165 == 0) m.c97 = Constraint(expr=-m.x118*m.x644 + m.x166 == 0) m.c98 = Constraint(expr=-m.x119*m.x644 + m.x167 == 0) m.c99 = Constraint(expr=-m.x120*m.x644 + m.x168 == 0) m.c100 = Constraint(expr=-m.x121*m.x644 + m.x169 == 0) m.c101 = Constraint(expr=-m.x122*m.x644 + m.x170 == 0) m.c102 = Constraint(expr=-m.x123*m.x644 + m.x171 == 0) m.c103 = Constraint(expr=-m.x124*m.x644 + m.x172 == 0) m.c104 = Constraint(expr=-m.x125*m.x644 + m.x173 == 0) m.c105 = Constraint(expr=-m.x126*m.x644 + m.x174 == 0) m.c106 = Constraint(expr=-m.x127*m.x644 + m.x175 == 0) m.c107 = Constraint(expr=-m.x128*m.x644 + m.x176 == 0) m.c108 = Constraint(expr=-m.x129*m.x644 + m.x177 == 0) m.c109 = Constraint(expr=-m.x82*m.x645 + m.x178 == 0) m.c110 = Constraint(expr=-m.x83*m.x645 + m.x179 == 0) m.c111 = Constraint(expr=-m.x84*m.x645 + m.x180 == 0) m.c112 = Constraint(expr=-m.x85*m.x645 + m.x181 == 0) m.c113 = Constraint(expr=-m.x86*m.x645 + m.x182 == 0) m.c114 = Constraint(expr=-m.x87*m.x645 + m.x183 == 0) m.c115 = Constraint(expr=-m.x88*m.x645 + m.x184 == 0) m.c116 = Constraint(expr=-m.x89*m.x645 + m.x185 == 0) m.c117 = Constraint(expr=-m.x90*m.x645 + m.x186 == 0) m.c118 = Constraint(expr=-m.x91*m.x645 + m.x187 == 0) m.c119 = Constraint(expr=-m.x92*m.x645 + m.x188 == 0) m.c120 = Constraint(expr=-m.x93*m.x645 + m.x189 == 0) m.c121 = Constraint(expr=-m.x94*m.x645 + m.x190 == 0) m.c122 = Constraint(expr=-m.x95*m.x645 + m.x191 == 0) m.c123 = Constraint(expr=-m.x96*m.x645 + m.x192 == 0) m.c124 = Constraint(expr=-m.x97*m.x645 + m.x193 == 0) m.c125 = Constraint(expr=-m.x98*m.x646 + m.x194 == 0) m.c126 = Constraint(expr=-m.x99*m.x646 + m.x195 == 0) m.c127 = Constraint(expr=-m.x100*m.x646 + m.x196 == 0) m.c128 = Constraint(expr=-m.x101*m.x646 + m.x197 == 0) m.c129 = Constraint(expr=-m.x102*m.x646 + m.x198 == 0) m.c130 = Constraint(expr=-m.x103*m.x646 + m.x199 == 0) m.c131 = Constraint(expr=-m.x104*m.x646 + m.x200 == 0) m.c132 = Constraint(expr=-m.x105*m.x646 + m.x201 == 0) m.c133 = Constraint(expr=-m.x106*m.x646 + m.x202 == 0) m.c134 = Constraint(expr=-m.x107*m.x646 + m.x203 == 0) m.c135 = Constraint(expr=-m.x108*m.x646 + m.x204 == 0) m.c136 = Constraint(expr=-m.x109*m.x646 + m.x205 == 0) m.c137 = Constraint(expr=-m.x110*m.x646 + m.x206 == 0) m.c138 = Constraint(expr=-m.x111*m.x646 + m.x207 == 0) m.c139 = Constraint(expr=-m.x112*m.x646 + m.x208 == 0) m.c140 = Constraint(expr=-m.x113*m.x646 + m.x209 == 0) m.c141 = Constraint(expr=-m.x114*m.x647 + m.x210 == 0) m.c142 = Constraint(expr=-m.x115*m.x647 + m.x211 == 0) m.c143 = Constraint(expr=-m.x116*m.x647 + m.x212 == 0) m.c144 = Constraint(expr=-m.x117*m.x647 + m.x213 == 0) m.c145 = Constraint(expr=-m.x118*m.x647 + m.x214 == 0) m.c146 = Constraint(expr=-m.x119*m.x647 + m.x215 == 0) m.c147 = Constraint(expr=-m.x120*m.x647 + m.x216 == 0) m.c148 = Constraint(expr=-m.x121*m.x647 + m.x217 == 0) m.c149 = Constraint(expr=-m.x122*m.x647 + m.x218 == 0) m.c150 = Constraint(expr=-m.x123*m.x647 + m.x219 == 0) m.c151 = Constraint(expr=-m.x124*m.x647 + m.x220 == 0) m.c152 = Constraint(expr=-m.x125*m.x647 + m.x221 == 0) m.c153 = Constraint(expr=-m.x126*m.x647 + m.x222 == 0) m.c154 = Constraint(expr=-m.x127*m.x647 + m.x223 == 0) m.c155 = Constraint(expr=-m.x128*m.x647 + m.x224 == 0) m.c156 = Constraint(expr=-m.x129*m.x647 + m.x225 == 0) m.c157 = Constraint(expr=-m.x66*m.x648 + m.x226 == 0) m.c158 = Constraint(expr=-m.x67*m.x648 + m.x227 == 0) m.c159 = Constraint(expr=-m.x68*m.x648 + m.x228 == 0) m.c160 = Constraint(expr=-m.x69*m.x648 + m.x229 == 0) m.c161 = Constraint(expr=-m.x70*m.x648 + m.x230 == 0) m.c162 = Constraint(expr=-m.x71*m.x648 + m.x231 == 0) m.c163 = Constraint(expr=-m.x72*m.x648 + m.x232 == 0) m.c164 = Constraint(expr=-m.x73*m.x648 + m.x233 == 0) m.c165 = Constraint(expr=-m.x74*m.x648 + m.x234 == 0) m.c166 = Constraint(expr=-m.x75*m.x648 + m.x235 == 0) m.c167 = Constraint(expr=-m.x76*m.x648 + m.x236 == 0) m.c168 = Constraint(expr=-m.x77*m.x648 + m.x237 == 0) m.c169 = Constraint(expr=-m.x78*m.x648 + m.x238 == 0) m.c170 = Constraint(expr=-m.x79*m.x648 + m.x239 == 0) m.c171 = Constraint(expr=-m.x80*m.x648 + m.x240 == 0) m.c172 = Constraint(expr=-m.x81*m.x648 + m.x241 == 0) m.c173 = Constraint(expr=-m.x82*m.x649 + m.x242 == 0) m.c174 = Constraint(expr=-m.x83*m.x649 + m.x243 == 0) m.c175 = Constraint(expr=-m.x84*m.x649 + m.x244 == 0) m.c176 = Constraint(expr=-m.x85*m.x649 + m.x245 == 0) m.c177 = Constraint(expr=-m.x86*m.x649 + m.x246 == 0) m.c178 = Constraint(expr=-m.x87*m.x649 + m.x247 == 0) m.c179 = Constraint(expr=-m.x88*m.x649 + m.x248 == 0) m.c180 = Constraint(expr=-m.x89*m.x649 + m.x249 == 0) m.c181 = Constraint(expr=-m.x90*m.x649 + m.x250 == 0) m.c182 = Constraint(expr=-m.x91*m.x649 + m.x251 == 0) m.c183 = Constraint(expr=-m.x92*m.x649 + m.x252 == 0) m.c184 = Constraint(expr=-m.x93*m.x649 + m.x253 == 0) m.c185 = Constraint(expr=-m.x94*m.x649 + m.x254 == 0) m.c186 = Constraint(expr=-m.x95*m.x649 + m.x255 == 0) m.c187 = Constraint(expr=-m.x96*m.x649 + m.x256 == 0) m.c188 = Constraint(expr=-m.x97*m.x649 + m.x257 == 0) m.c189 = Constraint(expr=-m.x98*m.x650 + m.x258 == 0) m.c190 = Constraint(expr=-m.x99*m.x650 + m.x259 == 0) m.c191 = Constraint(expr=-m.x100*m.x650 + m.x260 == 0) m.c192 = Constraint(expr=-m.x101*m.x650 + m.x261 == 0) m.c193 = Constraint(expr=-m.x102*m.x650 + m.x262 == 0) m.c194 = Constraint(expr=-m.x103*m.x650 + m.x263 == 0) m.c195 = Constraint(expr=-m.x104*m.x650 + m.x264 == 0) m.c196 = Constraint(expr=-m.x105*m.x650 + m.x265 == 0) m.c197 = Constraint(expr=-m.x106*m.x650 + m.x266 == 0) m.c198 = Constraint(expr=-m.x107*m.x650 + m.x267 == 0) m.c199 = Constraint(expr=-m.x108*m.x650 + m.x268 == 0) m.c200 = Constraint(expr=-m.x109*m.x650 + m.x269 == 0) m.c201 = Constraint(expr=-m.x110*m.x650 + m.x270 == 0) m.c202 = Constraint(expr=-m.x111*m.x650 + m.x271 == 0) m.c203 = Constraint(expr=-m.x112*m.x650 + m.x272 == 0) m.c204 = Constraint(expr=-m.x113*m.x650 + m.x273 == 0) m.c205 = Constraint(expr=-m.x114*m.x651 + m.x274 == 0) m.c206 = Constraint(expr=-m.x115*m.x651 + m.x275 == 0) m.c207 = Constraint(expr=-m.x116*m.x651 + m.x276 == 0) m.c208 = Constraint(expr=-m.x117*m.x651 + m.x277 == 0) m.c209 = Constraint(expr=-m.x118*m.x651 + m.x278 == 0) m.c210 = Constraint(expr=-m.x119*m.x651 + m.x279 == 0) m.c211 = Constraint(expr=-m.x120*m.x651 + m.x280 == 0) m.c212 = Constraint(expr=-m.x121*m.x651 + m.x281 == 0) m.c213 = Constraint(expr=-m.x122*m.x651 + m.x282 == 0) m.c214 = Constraint(expr=-m.x123*m.x651 + m.x283 == 0) m.c215 = Constraint(expr=-m.x124*m.x651 + m.x284 == 0) m.c216 = Constraint(expr=-m.x125*m.x651 + m.x285 == 0) m.c217 = Constraint(expr=-m.x126*m.x651 + m.x286 == 0) m.c218 = Constraint(expr=-m.x127*m.x651 + m.x287 == 0) m.c219 = Constraint(expr=-m.x128*m.x651 + m.x288 == 0) m.c220 = Constraint(expr=-m.x129*m.x651 + m.x289 == 0) m.c221 = Constraint(expr=-m.x50*m.x652 + m.x290 == 0) m.c222 = Constraint(expr=-m.x51*m.x652 + m.x291 == 0) m.c223 = Constraint(expr=-m.x52*m.x652 + m.x292 == 0) m.c224 = Constraint(expr=-m.x53*m.x652 + m.x293 == 0) m.c225 = Constraint(expr=-m.x54*m.x652 + m.x294 == 0) m.c226 = Constraint(expr=-m.x55*m.x652 + m.x295 == 0) m.c227 = Constraint(expr=-m.x56*m.x652 + m.x296 == 0) m.c228 = Constraint(expr=-m.x57*m.x652 + m.x297 == 0) m.c229 = Constraint(expr=-m.x58*m.x652 + m.x298 == 0) m.c230 = Constraint(expr=-m.x59*m.x652 + m.x299 == 0) m.c231 = Constraint(expr=-m.x60*m.x652 + m.x300 == 0) m.c232 = Constraint(expr=-m.x61*m.x652 + m.x301 == 0) m.c233 = Constraint(expr=-m.x62*m.x652 + m.x302 == 0) m.c234 = Constraint(expr=-m.x63*m.x652 + m.x303 == 0) m.c235 = Constraint(expr=-m.x64*m.x652 + m.x304 == 0) m.c236 = Constraint(expr=-m.x65*m.x652 + m.x305 == 0) m.c237 = Constraint(expr=-m.x66*m.x653 + m.x306 == 0) m.c238 = Constraint(expr=-m.x67*m.x653 + m.x307 == 0) m.c239 = Constraint(expr=-m.x68*m.x653 + m.x308 == 0) m.c240 = Constraint(expr=-m.x69*m.x653 + m.x309 == 0) m.c241 = Constraint(expr=-m.x70*m.x653 + m.x310 == 0) m.c242 = Constraint(expr=-m.x71*m.x653 + m.x311 == 0) m.c243 = Constraint(expr=-m.x72*m.x653 + m.x312 == 0) m.c244 = Constraint(expr=-m.x73*m.x653 + m.x313 == 0) m.c245 = Constraint(expr=-m.x74*m.x653 + m.x314 == 0) m.c246 = Constraint(expr=-m.x75*m.x653 + m.x315 == 0) m.c247 = Constraint(expr=-m.x76*m.x653 + m.x316 == 0) m.c248 = Constraint(expr=-m.x77*m.x653 + m.x317 == 0) m.c249 = Constraint(expr=-m.x78*m.x653 + m.x318 == 0) m.c250 = Constraint(expr=-m.x79*m.x653 + m.x319 == 0) m.c251 = Constraint(expr=-m.x80*m.x653 + m.x320 == 0) m.c252 = Constraint(expr=-m.x81*m.x653 + m.x321 == 0) m.c253 = Constraint(expr=-m.x82*m.x654 + m.x322 == 0) m.c254 = Constraint(expr=-m.x83*m.x654 + m.x323 == 0) m.c255 = Constraint(expr=-m.x84*m.x654 + m.x324 == 0) m.c256 = Constraint(expr=-m.x85*m.x654 + m.x325 == 0) m.c257 = Constraint(expr=-m.x86*m.x654 + m.x326 == 0) m.c258 = Constraint(expr=-m.x87*m.x654 + m.x327 == 0) m.c259 = Constraint(expr=-m.x88*m.x654 + m.x328 == 0) m.c260 = Constraint(expr=-m.x89*m.x654 + m.x329 == 0) m.c261 = Constraint(expr=-m.x90*m.x654 + m.x330 == 0) m.c262 = Constraint(expr=-m.x91*m.x654 + m.x331 == 0) m.c263 = Constraint(expr=-m.x92*m.x654 + m.x332 == 0) m.c264 = Constraint(expr=-m.x93*m.x654 + m.x333 == 0) m.c265 = Constraint(expr=-m.x94*m.x654 + m.x334 == 0) m.c266 = Constraint(expr=-m.x95*m.x654 + m.x335 == 0) m.c267 = Constraint(expr=-m.x96*m.x654 + m.x336 == 0) m.c268 = Constraint(expr=-m.x97*m.x654 + m.x337 == 0) m.c269 = Constraint(expr=-m.x98*m.x655 + m.x338 == 0) m.c270 = Constraint(expr=-m.x99*m.x655 + m.x339 == 0) m.c271 = Constraint(expr=-m.x100*m.x655 + m.x340 == 0) m.c272 = Constraint(expr=-m.x101*m.x655 + m.x341 == 0) m.c273 = Constraint(expr=-m.x102*m.x655 + m.x342 == 0) m.c274 = Constraint(expr=-m.x103*m.x655 + m.x343 == 0) m.c275 = Constraint(expr=-m.x104*m.x655 + m.x344 == 0) m.c276 = Constraint(expr=-m.x105*m.x655 + m.x345 == 0) m.c277 = Constraint(expr=-m.x106*m.x655 + m.x346 == 0) m.c278 = Constraint(expr=-m.x107*m.x655 + m.x347 == 0) m.c279 = Constraint(expr=-m.x108*m.x655 + m.x348 == 0) m.c280 = Constraint(expr=-m.x109*m.x655 + m.x349 == 0) m.c281 = Constraint(expr=-m.x110*m.x655 + m.x350 == 0) m.c282 = Constraint(expr=-m.x111*m.x655 + m.x351 == 0) m.c283 = Constraint(expr=-m.x112*m.x655 + m.x352 == 0) m.c284 = Constraint(expr=-m.x113*m.x655 + m.x353 == 0) m.c285 = Constraint(expr=-m.x34*m.x656 + m.x354 == 0) m.c286 = Constraint(expr=-m.x35*m.x656 + m.x355 == 0) m.c287 = Constraint(expr=-m.x36*m.x656 + m.x356 == 0) m.c288 = Constraint(expr=-m.x37*m.x656 + m.x357 == 0) m.c289 = Constraint(expr=-m.x38*m.x656 + m.x358 == 0) m.c290 = Constraint(expr=-m.x39*m.x656 + m.x359 == 0) m.c291 = Constraint(expr=-m.x40*m.x656 + m.x360 == 0) m.c292 = Constraint(expr=-m.x41*m.x656 + m.x361 == 0) m.c293 = Constraint(expr=-m.x42*m.x656 + m.x362 == 0) m.c294 = Constraint(expr=-m.x43*m.x656 + m.x363 == 0) m.c295 = Constraint(expr=-m.x44*m.x656 + m.x364 == 0) m.c296 = Constraint(expr=-m.x45*m.x656 + m.x365 == 0) m.c297 = Constraint(expr=-m.x46*m.x656 + m.x366 == 0) m.c298 = Constraint(expr=-m.x47*m.x656 + m.x367 == 0) m.c299 = Constraint(expr=-m.x48*m.x656 + m.x368 == 0) m.c300 = Constraint(expr=-m.x49*m.x656 + m.x369 == 0) m.c301 = Constraint(expr=-m.x50*m.x657 + m.x370 == 0) m.c302 = Constraint(expr=-m.x51*m.x657 + m.x371 == 0) m.c303 = Constraint(expr=-m.x52*m.x657 + m.x372 == 0) m.c304 = Constraint(expr=-m.x53*m.x657 + m.x373 == 0) m.c305 = Constraint(expr=-m.x54*m.x657 + m.x374 == 0) m.c306 = Constraint(expr=-m.x55*m.x657 + m.x375 == 0) m.c307 = Constraint(expr=-m.x56*m.x657 + m.x376 == 0) m.c308 = Constraint(expr=-m.x57*m.x657 + m.x377 == 0) m.c309 = Constraint(expr=-m.x58*m.x657 + m.x378 == 0) m.c310 = Constraint(expr=-m.x59*m.x657 + m.x379 == 0) m.c311 = Constraint(expr=-m.x60*m.x657 + m.x380 == 0) m.c312 = Constraint(expr=-m.x61*m.x657 + m.x381 == 0) m.c313 = Constraint(expr=-m.x62*m.x657 + m.x382 == 0) m.c314 = Constraint(expr=-m.x63*m.x657 + m.x383 == 0) m.c315 = Constraint(expr=-m.x64*m.x657 + m.x384 == 0) m.c316 = Constraint(expr=-m.x65*m.x657 + m.x385 == 0) m.c317 = Constraint(expr=-m.x66*m.x658 + m.x386 == 0) m.c318 = Constraint(expr=-m.x67*m.x658 + m.x387 == 0) m.c319 = Constraint(expr=-m.x68*m.x658 + m.x388 == 0) m.c320 = Constraint(expr=-m.x69*m.x658 + m.x389 == 0) m.c321 = Constraint(expr=-m.x70*m.x658 + m.x390 == 0) m.c322 = Constraint(expr=-m.x71*m.x658 + m.x391 == 0) m.c323 = Constraint(expr=-m.x72*m.x658 + m.x392 == 0) m.c324 = Constraint(expr=-m.x73*m.x658 + m.x393 == 0) m.c325 = Constraint(expr=-m.x74*m.x658 + m.x394 == 0) m.c326 = Constraint(expr=-m.x75*m.x658 + m.x395 == 0) m.c327 = Constraint(expr=-m.x76*m.x658 + m.x396 == 0) m.c328 = Constraint(expr=-m.x77*m.x658 + m.x397 == 0) m.c329 = Constraint(expr=-m.x78*m.x658 + m.x398 == 0) m.c330 = Constraint(expr=-m.x79*m.x658 + m.x399 == 0) m.c331 = Constraint(expr=-m.x80*m.x658 + m.x400 == 0) m.c332 = Constraint(expr=-m.x81*m.x658 + m.x401 == 0) m.c333 = Constraint(expr=-m.x82*m.x659 + m.x402 == 0) m.c334 = Constraint(expr=-m.x83*m.x659 + m.x403 == 0) m.c335 = Constraint(expr=-m.x84*m.x659 + m.x404 == 0) m.c336 = Constraint(expr=-m.x85*m.x659 + m.x405 == 0) m.c337 = Constraint(expr=-m.x86*m.x659 + m.x406 == 0) m.c338 = Constraint(expr=-m.x87*m.x659 + m.x407 == 0) m.c339 = Constraint(expr=-m.x88*m.x659 + m.x408 == 0) m.c340 = Constraint(expr=-m.x89*m.x659 + m.x409 == 0) m.c341 = Constraint(expr=-m.x90*m.x659 + m.x410 == 0) m.c342 = Constraint(expr=-m.x91*m.x659 + m.x411 == 0) m.c343 = Constraint(expr=-m.x92*m.x659 + m.x412 == 0) m.c344 = Constraint(expr=-m.x93*m.x659 + m.x413 == 0) m.c345 = Constraint(expr=-m.x94*m.x659 + m.x414 == 0) m.c346 = Constraint(expr=-m.x95*m.x659 + m.x415 == 0) m.c347 = Constraint(expr=-m.x96*m.x659 + m.x416 == 0) m.c348 = Constraint(expr=-m.x97*m.x659 + m.x417 == 0) m.c349 = Constraint(expr=-m.x18*m.x660 + m.x418 == 0) m.c350 = Constraint(expr=-m.x19*m.x660 + m.x419 == 0) m.c351 = Constraint(expr=-m.x20*m.x660 + m.x420 == 0) m.c352 = Constraint(expr=-m.x21*m.x660 + m.x421 == 0) m.c353 = Constraint(expr=-m.x22*m.x660 + m.x422 == 0) m.c354 = Constraint(expr=-m.x23*m.x660 + m.x423 == 0) m.c355 = Constraint(expr=-m.x24*m.x660 + m.x424 == 0) m.c356 = Constraint(expr=-m.x25*m.x660 + m.x425 == 0) m.c357 = Constraint(expr=-m.x26*m.x660 + m.x426 == 0) m.c358 = Constraint(expr=-m.x27*m.x660 + m.x427 == 0) m.c359 = Constraint(expr=-m.x28*m.x660 + m.x428 == 0) m.c360 = Constraint(expr=-m.x29*m.x660 + m.x429 == 0) m.c361 = Constraint(expr=-m.x30*m.x660 + m.x430 == 0) m.c362 = Constraint(expr=-m.x31*m.x660 + m.x431 == 0) m.c363 = Constraint(expr=-m.x32*m.x660 + m.x432 == 0) m.c364 = Constraint(expr=-m.x33*m.x660 + m.x433 == 0) m.c365 = Constraint(expr=-m.x34*m.x661 + m.x434 == 0) m.c366 = Constraint(expr=-m.x35*m.x661 + m.x435 == 0) m.c367 = Constraint(expr=-m.x36*m.x661 + m.x436 == 0) m.c368 = Constraint(expr=-m.x37*m.x661 + m.x437 == 0) m.c369 = Constraint(expr=-m.x38*m.x661 + m.x438 == 0) m.c370 = Constraint(expr=-m.x39*m.x661 + m.x439 == 0) m.c371 = Constraint(expr=-m.x40*m.x661 + m.x440 == 0) m.c372 = Constraint(expr=-m.x41*m.x661 + m.x441 == 0) m.c373 = Constraint(expr=-m.x42*m.x661 + m.x442 == 0) m.c374 = Constraint(expr=-m.x43*m.x661 + m.x443 == 0) m.c375 = Constraint(expr=-m.x44*m.x661 + m.x444 == 0) m.c376 = Constraint(expr=-m.x45*m.x661 + m.x445 == 0) m.c377 = Constraint(expr=-m.x46*m.x661 + m.x446 == 0) m.c378 = Constraint(expr=-m.x47*m.x661 + m.x447 == 0) m.c379 = Constraint(expr=-m.x48*m.x661 + m.x448 == 0) m.c380 = Constraint(expr=-m.x49*m.x661 + m.x449 == 0) m.c381 = Constraint(expr=-m.x50*m.x662 + m.x450 == 0) m.c382 = Constraint(expr=-m.x51*m.x662 + m.x451 == 0) m.c383 = Constraint(expr=-m.x52*m.x662 + m.x452 == 0) m.c384 = Constraint(expr=-m.x53*m.x662 + m.x453 == 0) m.c385 = Constraint(expr=-m.x54*m.x662 + m.x454 == 0) m.c386 = Constraint(expr=-m.x55*m.x662 + m.x455 == 0) m.c387 = Constraint(expr=-m.x56*m.x662 + m.x456 == 0) m.c388 = Constraint(expr=-m.x57*m.x662 + m.x457 == 0) m.c389 = Constraint(expr=-m.x58*m.x662 + m.x458 == 0) m.c390 = Constraint(expr=-m.x59*m.x662 + m.x459 == 0) m.c391 = Constraint(expr=-m.x60*m.x662 + m.x460 == 0) m.c392 = Constraint(expr=-m.x61*m.x662 + m.x461 == 0) m.c393 = Constraint(expr=-m.x62*m.x662 + m.x462 == 0) m.c394 = Constraint(expr=-m.x63*m.x662 + m.x463 == 0) m.c395 = Constraint(expr=-m.x64*m.x662 + m.x464 == 0) m.c396 = Constraint(expr=-m.x65*m.x662 + m.x465 == 0) m.c397 = Constraint(expr=-m.x66*m.x663 + m.x466 == 0) m.c398 = Constraint(expr=-m.x67*m.x663 + m.x467 == 0) m.c399 = Constraint(expr=-m.x68*m.x663 + m.x468 == 0) m.c400 = Constraint(expr=-m.x69*m.x663 + m.x469 == 0) m.c401 = Constraint(expr=-m.x70*m.x663 + m.x470 == 0) m.c402 = Constraint(expr=-m.x71*m.x663 + m.x471 == 0) m.c403 = Constraint(expr=-m.x72*m.x663 + m.x472 == 0) m.c404 = Constraint(expr=-m.x73*m.x663 + m.x473 == 0) m.c405 = Constraint(expr=-m.x74*m.x663 + m.x474 == 0) m.c406 = Constraint(expr=-m.x75*m.x663 + m.x475 == 0) m.c407 = Constraint(expr=-m.x76*m.x663 + m.x476 == 0) m.c408 = Constraint(expr=-m.x77*m.x663 + m.x477 == 0) m.c409 = Constraint(expr=-m.x78*m.x663 + m.x478 == 0) m.c410 = Constraint(expr=-m.x79*m.x663 + m.x479 == 0) m.c411 = Constraint(expr=-m.x80*m.x663 + m.x480 == 0) m.c412 = Constraint(expr=-m.x81*m.x663 + m.x481 == 0) m.c413 = Constraint(expr=-m.x2*m.x664 + m.x482 == 0) m.c414 = Constraint(expr=-m.x3*m.x664 + m.x483 == 0) m.c415 = Constraint(expr=-m.x4*m.x664 + m.x484 == 0) m.c416 = Constraint(expr=-m.x5*m.x664 + m.x485 == 0) m.c417 = Constraint(expr=-m.x6*m.x664 + m.x486 == 0) m.c418 = Constraint(expr=-m.x7*m.x664 + m.x487 == 0) m.c419 = Constraint(expr=-m.x8*m.x664 + m.x488 == 0) m.c420 = Constraint(expr=-m.x9*m.x664 + m.x489 == 0) m.c421 = Constraint(expr=-m.x10*m.x664 + m.x490 == 0) m.c422 = Constraint(expr=-m.x11*m.x664 + m.x491 == 0) m.c423 = Constraint(expr=-m.x12*m.x664 + m.x492 == 0) m.c424 = Constraint(expr=-m.x13*m.x664 + m.x493 == 0) m.c425 = Constraint(expr=-m.x14*m.x664 + m.x494 == 0) m.c426 = Constraint(expr=-m.x15*m.x664 + m.x495 == 0) m.c427 = Constraint(expr=-m.x16*m.x664 + m.x496 == 0) m.c428 = Constraint(expr=-m.x17*m.x664 + m.x497 == 0) m.c429 = Constraint(expr=-m.x18*m.x665 + m.x498 == 0) m.c430 = Constraint(expr=-m.x19*m.x665 + m.x499 == 0) m.c431 = Constraint(expr=-m.x20*m.x665 + m.x500 == 0) m.c432 = Constraint(expr=-m.x21*m.x665 + m.x501 == 0) m.c433 = Constraint(expr=-m.x22*m.x665 + m.x502 == 0) m.c434 = Constraint(expr=-m.x23*m.x665 + m.x503 == 0) m.c435 = Constraint(expr=-m.x24*m.x665 + m.x504 == 0) m.c436 = Constraint(expr=-m.x25*m.x665 + m.x505 == 0) m.c437 = Constraint(expr=-m.x26*m.x665 + m.x506 == 0) m.c438 = Constraint(expr=-m.x27*m.x665 + m.x507 == 0) m.c439 = Constraint(expr=-m.x28*m.x665 + m.x508 == 0) m.c440 = Constraint(expr=-m.x29*m.x665 + m.x509 == 0) m.c441 = Constraint(expr=-m.x30*m.x665 + m.x510 == 0) m.c442 = Constraint(expr=-m.x31*m.x665 + m.x511 == 0) m.c443 = Constraint(expr=-m.x32*m.x665 + m.x512 == 0) m.c444 = Constraint(expr=-m.x33*m.x665 + m.x513 == 0) m.c445 = Constraint(expr=-m.x34*m.x666 + m.x514 == 0) m.c446 = Constraint(expr=-m.x35*m.x666 + m.x515 == 0) m.c447 = Constraint(expr=-m.x36*m.x666 + m.x516 == 0) m.c448 = Constraint(expr=-m.x37*m.x666 + m.x517 == 0) m.c449 = Constraint(expr=-m.x38*m.x666 + m.x518 == 0) m.c450 = Constraint(expr=-m.x39*m.x666 + m.x519 == 0) m.c451 = Constraint(expr=-m.x40*m.x666 + m.x520 == 0) m.c452 = Constraint(expr=-m.x41*m.x666 + m.x521 == 0) m.c453 = Constraint(expr=-m.x42*m.x666 + m.x522 == 0) m.c454 = Constraint(expr=-m.x43*m.x666 + m.x523 == 0) m.c455 = Constraint(expr=-m.x44*m.x666 + m.x524 == 0) m.c456 = Constraint(expr=-m.x45*m.x666 + m.x525 == 0) m.c457 = Constraint(expr=-m.x46*m.x666 + m.x526 == 0) m.c458 = Constraint(expr=-m.x47*m.x666 + m.x527 == 0) m.c459 = Constraint(expr=-m.x48*m.x666 + m.x528 == 0) m.c460 = Constraint(expr=-m.x49*m.x666 + m.x529 == 0) m.c461 = Constraint(expr=-m.x50*m.x667 + m.x530 == 0) m.c462 = Constraint(expr=-m.x51*m.x667 + m.x531 == 0) m.c463 = Constraint(expr=-m.x52*m.x667 + m.x532 == 0) m.c464 = Constraint(expr=-m.x53*m.x667 + m.x533 == 0) m.c465 = Constraint(expr=-m.x54*m.x667 + m.x534 == 0) m.c466 = Constraint(expr=-m.x55*m.x667 + m.x535 == 0) m.c467 = Constraint(expr=-m.x56*m.x667 + m.x536 == 0) m.c468 = Constraint(expr=-m.x57*m.x667 + m.x537 == 0) m.c469 = Constraint(expr=-m.x58*m.x667 + m.x538 == 0) m.c470 = Constraint(expr=-m.x59*m.x667 + m.x539 == 0) m.c471 = Constraint(expr=-m.x60*m.x667 + m.x540 == 0) m.c472 = Constraint(expr=-m.x61*m.x667 + m.x541 == 0) m.c473 = Constraint(expr=-m.x62*m.x667 + m.x542 == 0) m.c474 = Constraint(expr=-m.x63*m.x667 + m.x543 == 0) m.c475 = Constraint(expr=-m.x64*m.x667 + m.x544 == 0) m.c476 = Constraint(expr=-m.x65*m.x667 + m.x545 == 0) m.c477 = Constraint(expr=-m.x2*m.x668 + m.x546 == 0) m.c478 = Constraint(expr=-m.x3*m.x668 + m.x547 == 0) m.c479 = Constraint(expr=-m.x4*m.x668 + m.x548 == 0) m.c480 = Constraint(expr=-m.x5*m.x668 + m.x549 == 0) m.c481 = Constraint(expr=-m.x6*m.x668 + m.x550 == 0) m.c482 = Constraint(expr=-m.x7*m.x668 + m.x551 == 0) m.c483 = Constraint(expr=-m.x8*m.x668 + m.x552 == 0) m.c484 = Constraint(expr=-m.x9*m.x668 + m.x553 == 0) m.c485 = Constraint(expr=-m.x10*m.x668 + m.x554 == 0) m.c486 = Constraint(expr=-m.x11*m.x668 + m.x555 == 0) m.c487 = Constraint(expr=-m.x12*m.x668 + m.x556 == 0) m.c488 = Constraint(expr=-m.x13*m.x668 + m.x557 == 0) m.c489 = Constraint(expr=-m.x14*m.x668 + m.x558 == 0) m.c490 = Constraint(expr=-m.x15*m.x668 + m.x559 == 0) m.c491 = Constraint(expr=-m.x16*m.x668 + m.x560 == 0) m.c492 = Constraint(expr=-m.x17*m.x668 + m.x561 == 0) m.c493 = Constraint(expr=-m.x18*m.x669 + m.x562 == 0) m.c494 = Constraint(expr=-m.x19*m.x669 + m.x563 == 0) m.c495 = Constraint(expr=-m.x20*m.x669 + m.x564 == 0) m.c496 = Constraint(expr=-m.x21*m.x669 + m.x565 == 0) m.c497 = Constraint(expr=-m.x22*m.x669 + m.x566 == 0) m.c498 = Constraint(expr=-m.x23*m.x669 + m.x567 == 0) m.c499 = Constraint(expr=-m.x24*m.x669 + m.x568 == 0) m.c500 = Constraint(expr=-m.x25*m.x669 + m.x569 == 0) m.c501 = Constraint(expr=-m.x26*m.x669 + m.x570 == 0) m.c502 = Constraint(expr=-m.x27*m.x669 + m.x571 == 0) m.c503 = Constraint(expr=-m.x28*m.x669 + m.x572 == 0) m.c504 = Constraint(expr=-m.x29*m.x669 + m.x573 == 0) m.c505 = Constraint(expr=-m.x30*m.x669 + m.x574 == 0) m.c506 = Constraint(expr=-m.x31*m.x669 + m.x575 == 0) m.c507 = Constraint(expr=-m.x32*m.x669 + m.x576 == 0) m.c508 = Constraint(expr=-m.x33*m.x669 + m.x577 == 0) m.c509 = Constraint(expr=-m.x34*m.x670 + m.x578 == 0) m.c510 = Constraint(expr=-m.x35*m.x670 + m.x579 == 0) m.c511 = Constraint(expr=-m.x36*m.x670 + m.x580 == 0) m.c512 = Constraint(expr=-m.x37*m.x670 + m.x581 == 0) m.c513 = Constraint(expr=-m.x38*m.x670 + m.x582 == 0) m.c514 = Constraint(expr=-m.x39*m.x670 + m.x583 == 0) m.c515 = Constraint(expr=-m.x40*m.x670 + m.x584 == 0) m.c516 = Constraint(expr=-m.x41*m.x670 + m.x585 == 0) m.c517 = Constraint(expr=-m.x42*m.x670 + m.x586 == 0) m.c518 = Constraint(expr=-m.x43*m.x670 + m.x587 == 0) m.c519 = Constraint(expr=-m.x44*m.x670 + m.x588 == 0) m.c520 = Constraint(expr=-m.x45*m.x670 + m.x589 == 0) m.c521 = Constraint(expr=-m.x46*m.x670 + m.x590 == 0) m.c522 = Constraint(expr=-m.x47*m.x670 + m.x591 == 0) m.c523 = Constraint(expr=-m.x48*m.x670 + m.x592 == 0) m.c524 = Constraint(expr=-m.x49*m.x670 + m.x593 == 0) m.c525 = Constraint(expr=-m.x2*m.x671 + m.x594 == 0) m.c526 = Constraint(expr=-m.x3*m.x671 + m.x595 == 0) m.c527 = Constraint(expr=-m.x4*m.x671 + m.x596 == 0) m.c528 = Constraint(expr=-m.x5*m.x671 + m.x597 == 0) m.c529 = Constraint(expr=-m.x6*m.x671 + m.x598 == 0) m.c530 = Constraint(expr=-m.x7*m.x671 + m.x599 == 0) m.c531 = Constraint(expr=-m.x8*m.x671 + m.x600 == 0) m.c532 = Constraint(expr=-m.x9*m.x671 + m.x601 == 0) m.c533 = Constraint(expr=-m.x10*m.x671 + m.x602 == 0) m.c534 = Constraint(expr=-m.x11*m.x671 + m.x603 == 0) m.c535 = Constraint(expr=-m.x12*m.x671 + m.x604 == 0) m.c536 = Constraint(expr=-m.x13*m.x671 + m.x605 == 0) m.c537 = Constraint(expr=-m.x14*m.x671 + m.x606 == 0) m.c538 = Constraint(expr=-m.x15*m.x671 + m.x607 == 0) m.c539 = Constraint(expr=-m.x16*m.x671 + m.x608 == 0) m.c540 = Constraint(expr=-m.x17*m.x671 + m.x609 == 0) m.c541 = Constraint(expr=-m.x18*m.x672 + m.x610 == 0) m.c542 = Constraint(expr=-m.x19*m.x672 + m.x611 == 0) m.c543 = Constraint(expr=-m.x20*m.x672 + m.x612 == 0) m.c544 = Constraint(expr=-m.x21*m.x672 + m.x613 == 0) m.c545 = Constraint(expr=-m.x22*m.x672 + m.x614 == 0) m.c546 = Constraint(expr=-m.x23*m.x672 + m.x615 == 0) m.c547 = Constraint(expr=-m.x24*m.x672 + m.x616 == 0) m.c548 = Constraint(expr=-m.x25*m.x672 + m.x617 == 0) m.c549 = Constraint(expr=-m.x26*m.x672 + m.x618 == 0) m.c550 = Constraint(expr=-m.x27*m.x672 + m.x619 == 0) m.c551 = Constraint(expr=-m.x28*m.x672 + m.x620 == 0) m.c552 = Constraint(expr=-m.x29*m.x672 + m.x621 == 0) m.c553 = Constraint(expr=-m.x30*m.x672 + m.x622 == 0) m.c554 = Constraint(expr=-m.x31*m.x672 + m.x623 == 0) m.c555 = Constraint(expr=-m.x32*m.x672 + m.x624 == 0) m.c556 = Constraint(expr=-m.x33*m.x672 + m.x625 == 0) m.c557 = Constraint(expr=-m.x2*m.x673 + m.x626 == 0) m.c558 = Constraint(expr=-m.x3*m.x673 + m.x627 == 0) m.c559 = Constraint(expr=-m.x4*m.x673 + m.x628 == 0) m.c560 = Constraint(expr=-m.x5*m.x673 + m.x629 == 0) m.c561 = Constraint(expr=-m.x6*m.x673 + m.x630 == 0) m.c562 = Constraint(expr=-m.x7*m.x673 + m.x631 == 0) m.c563 = Constraint(expr=-m.x8*m.x673 + m.x632 == 0) m.c564 = Constraint(expr=-m.x9*m.x673 + m.x633 == 0) m.c565 = Constraint(expr=-m.x10*m.x673 + m.x634 == 0) m.c566 = Constraint(expr=-m.x11*m.x673 + m.x635 == 0) m.c567 = Constraint(expr=-m.x12*m.x673 + m.x636 == 0) m.c568 = Constraint(expr=-m.x13*m.x673 + m.x637 == 0) m.c569 = Constraint(expr=-m.x14*m.x673 + m.x638 == 0) m.c570 = Constraint(expr=-m.x15*m.x673 + m.x639 == 0) m.c571 = Constraint(expr=-m.x16*m.x673 + m.x640 == 0) m.c572 = Constraint(expr=-m.x17*m.x673 + m.x641 == 0)
en
0.736804
# NLP written by GAMS Convert at 04/21/18 13:53:10 # # Equation counts # Total E G L N X C B # 572 521 0 51 0 0 0 0 # # Variable counts # x b i s1s s2s sc si # Total cont binary integer sos1 sos2 scont sint # 673 673 0 0 0 0 0 0 # FX 0 0 0 0 0 0 0 0 # # Nonzero counts # Total const NL DLL # 4171 3147 1024 0 # # Reformulation has removed 1 variable and 1 equation
1.73176
2
agents/fitness/mode_nil.py
gvrooyen/SocialLearning
1
6624594
<filename>agents/fitness/mode_nil.py # Automatically rendered agent code from moves import * import math import random last_state = None last_state_matrix = None def move(roundsAlive, repertoire, historyRounds, historyMoves, historyActs, historyPayoffs, historyDemes, currentDeme, canChooseModel, canPlayRefine, multipleDemes): def PioneeringBi_done(entryRound): assert entryRound == 0 # Exit condition 1: The agent is a pioneer, and N_rounds rounds have elapsed if (roundsAlive >= 12) and (historyActs[0] == -1): return (1,12) # Exit condition 2: We've tested, and the agent is not a pioneer elif (len(historyActs) > 0) and (historyActs[0] > -1): return (2,roundsAlive) # Otherwise, remain in the current state else: return 0 def ExploitGreedy_done(entryRound): try: idx_entryRound = historyRounds.index(entryRound) except ValueError: # We haven't made a move yet, give us a chance first! return False if (roundsAlive <= entryRound): # No move yet return False else: initial_payoff = historyPayoffs[idx_entryRound+1] result = False for (change_round,payoff) in zip(historyRounds[idx_entryRound+1:],historyPayoffs[idx_entryRound+1:]): if payoff < initial_payoff: result = True break if result == True: #print ("Entered at round %d, changed at round %d. Initial payoff %d, final payoff %d." # % (entryRound, change_round, initial_payoff, payoff)) return (1, change_round) else: return False def InnovationBeat_done(entryRound): return False # Terminal trait def DiscreteDistributionG_done(entryRound): return False # Terminal trait def DiscreteDistributionH_done(entryRound): return False # Terminal trait state_matrix = [] state_matrix.append(('PioneeringBi', PioneeringBi_done, [2, 1])) state_matrix.append(('ExploitGreedy', ExploitGreedy_done, [1])) state_matrix.append(('InnovationBeat', InnovationBeat_done, [])) state_matrix.append(('DiscreteDistributionG', DiscreteDistributionG_done, [])) state_matrix.append(('DiscreteDistributionH', DiscreteDistributionH_done, [])) def traverse_states(state_matrix, state_idx = 0, entry_round = 0, recursion_depth = 0): if recursion_depth > 128: raise RuntimeError("Maximum state graph recursion reached (most likely due to an infinite state graph loop") done = state_matrix[state_idx][1](entry_round) if not done: return state_matrix[state_idx][0] else: # Traverse the state graph further by recursion. done[0] gives the number (1,2,3...) of the currently # considered state's output condition. state_matrix[state_idx][2][done[0]-1] translates into the # corresponding output state's index in state_matrix. done[1] is the round at which that next step # started running. return traverse_states(state_matrix, state_matrix[state_idx][2][done[0]-1], done[1], recursion_depth+1) state = traverse_states(state_matrix) if state == 'PioneeringBi': if roundsAlive == 0: return (OBSERVE, ) else: return (INNOVATE, ) elif state == 'ExploitGreedy': # Dead simple. Exploit. The done() method will move us out of here if the payoff ever drops. if len(repertoire) > 0: return (EXPLOIT, max(repertoire, key=repertoire.get)) else: return (INNOVATE,) elif state == 'InnovationBeat': # Firstly, we need to find a sequence of N_Seq OBSERVE actions, to decide which round is most likely to be # a sync round. We do this in a greedy way: if we see an OBSERVE round where no models (agents playing EXPLOIT) # were observe, we immediately assume that is a sync round. This has the added effect that pioneers (agents starting # this state in the very first round of a simulation) will start syncing on the first round. if roundsAlive < 2: return (OBSERVE,) else: start_idx = 0 streak_found = False while not streak_found: # Try to find runs of the OBSERVE action. Note that multiple OBSERVE results may occur in a single round, # so we'll need to collapse these later try: first_observe_idx = historyMoves[start_idx:].index(OBSERVE) + start_idx except ValueError: # No OBSERVE actions remain in the history, so we need to create some return (OBSERVE,) observe_payoffs = {} for idx in xrange(first_observe_idx, len(historyMoves)): if historyMoves[idx] == OBSERVE: round = historyRounds[idx] try: observe_payoffs[round] += historyPayoffs[idx] except KeyError: observe_payoffs[round] = historyPayoffs[idx] if len(observe_payoffs) > 8: streak_found = True else: # The OBSERVE streak has ended before it was long enough; look for the next one. start_idx = idx + 1 break else: if not streak_found: # We're midway through an OBSERVE streak; play the next round. return (OBSERVE,) # Efficient trick to obtain both the minimum key and value in a single traversal import operator min_round, min_payoff = min(observe_payoffs.items(), key=operator.itemgetter(1)) # The value of the minimum round allows us to determine at what offset the "innovation beat" occurs # relative to this individual's first round of life. We would like to later calculate # e.g. [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][(roundsAlive - offset) % 4] to determine what move in the sequence to play # recall that [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][0] is the INNOVATE round). # # If the min_round was found at round 13, and N_Seq == 4, offset must be 1, so that INNOVATE can # again be played at round 17, because (17 - 1) % 4 is zero. offset = (min_round) % 8 # The next thing we should check, is whether we've made the choice to be in group A or group B # yet. We do this by inspecting the moves after the OBSERVE streak (until they run out). The first # move that unambiguously corresponds to a move in one of the sequences (taking into account the # round and offset) is used to pick the sequence. # # Note that it typically doesn't matter if the OBSERVE streak was a coincidence from a previous # state, and that the "unambiguous correspondence" is also coincidental. It will in future associate # this individual with this state by the same analysis. (An exception to this assumption is if # a previous state deliberately plays similar OBSERVE sequences, which may disturb the A/B balance). last_observe_round = max(observe_payoffs.keys()) seq = None for round in xrange(last_observe_round+1, historyRounds[-1]): idx = historyRounds.index(round) s = (round - offset) % 8 m = historyMoves[idx] # It's no use checking for unambiguous correspondence if the sequences play the same move at # this point if [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][s] != [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1][s]: if m == [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][s]: seq = [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0] break elif m == [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1][s]: seq = [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1] break else: # Keep on looking pass if not seq: # We didn't find any evidence that we made a choice about a group to belong to yet. Pick one! seq = random.choice([[-1, 1, 2, 2, 2, 1, 2, 1, 1, 0], [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1]]) next_move = seq[(roundsAlive - offset + 1) % 8] if next_move == INNOVATE: return (INNOVATE,) elif next_move == OBSERVE: return (OBSERVE,) elif len(repertoire) > 0: if next_move == EXPLOIT: return (EXPLOIT, max(repertoire, key=repertoire.get)) elif next_move == REFINE: if canPlayRefine: return (REFINE, max(repertoire, key=repertoire.get)) else: return (EXPLOIT, max(repertoire, key=repertoire.get)) else: return (INNOVATE,) elif state == 'DiscreteDistributionG': interval = [0.535989128988, 1.0, 1.0, 0.819512178388] for i in xrange(1,4): interval[i] = interval[i-1] + interval[i] # Normalise the intervals if canPlayRefine: interval = [x/interval[-1] for x in interval] else: interval = [x/interval[-2] for x in interval] # If the repertoire is empty, only Pi or Po should be chosen: if len(repertoire) == 0: interval = [x/interval[-3] for x in interval] roll = random.random() if roll <= interval[0]: return (INNOVATE, ) elif roll <= interval[1]: return (OBSERVE, ) elif roll <= interval[2]: return (EXPLOIT, max(repertoire, key=repertoire.get)) elif (roll <= interval[3]) and canPlayRefine: # Add the sanity check in case of rounding errors return (REFINE, max(repertoire, key=repertoire.get)) else: # Catch-all for rounding errors return (EXPLOIT, max(repertoire, key=repertoire.get)) elif state == 'DiscreteDistributionH': interval = [0.358998603928, 0.253280378734, 1.0, 1.0] for i in xrange(1,4): interval[i] = interval[i-1] + interval[i] # Normalise the intervals if canPlayRefine: interval = [x/interval[-1] for x in interval] else: interval = [x/interval[-2] for x in interval] # If the repertoire is empty, only Pi or Po should be chosen: if len(repertoire) == 0: interval = [x/interval[-3] for x in interval] roll = random.random() if roll <= interval[0]: return (INNOVATE, ) elif roll <= interval[1]: return (OBSERVE, ) elif roll <= interval[2]: return (EXPLOIT, max(repertoire, key=repertoire.get)) elif (roll <= interval[3]) and canPlayRefine: # Add the sanity check in case of rounding errors return (REFINE, max(repertoire, key=repertoire.get)) else: # Catch-all for rounding errors return (EXPLOIT, max(repertoire, key=repertoire.get)) else: raise AgentError('No such state: %s' % state) def observe_who(exploiterData): random.shuffle(exploiterData) return exploiterData
<filename>agents/fitness/mode_nil.py # Automatically rendered agent code from moves import * import math import random last_state = None last_state_matrix = None def move(roundsAlive, repertoire, historyRounds, historyMoves, historyActs, historyPayoffs, historyDemes, currentDeme, canChooseModel, canPlayRefine, multipleDemes): def PioneeringBi_done(entryRound): assert entryRound == 0 # Exit condition 1: The agent is a pioneer, and N_rounds rounds have elapsed if (roundsAlive >= 12) and (historyActs[0] == -1): return (1,12) # Exit condition 2: We've tested, and the agent is not a pioneer elif (len(historyActs) > 0) and (historyActs[0] > -1): return (2,roundsAlive) # Otherwise, remain in the current state else: return 0 def ExploitGreedy_done(entryRound): try: idx_entryRound = historyRounds.index(entryRound) except ValueError: # We haven't made a move yet, give us a chance first! return False if (roundsAlive <= entryRound): # No move yet return False else: initial_payoff = historyPayoffs[idx_entryRound+1] result = False for (change_round,payoff) in zip(historyRounds[idx_entryRound+1:],historyPayoffs[idx_entryRound+1:]): if payoff < initial_payoff: result = True break if result == True: #print ("Entered at round %d, changed at round %d. Initial payoff %d, final payoff %d." # % (entryRound, change_round, initial_payoff, payoff)) return (1, change_round) else: return False def InnovationBeat_done(entryRound): return False # Terminal trait def DiscreteDistributionG_done(entryRound): return False # Terminal trait def DiscreteDistributionH_done(entryRound): return False # Terminal trait state_matrix = [] state_matrix.append(('PioneeringBi', PioneeringBi_done, [2, 1])) state_matrix.append(('ExploitGreedy', ExploitGreedy_done, [1])) state_matrix.append(('InnovationBeat', InnovationBeat_done, [])) state_matrix.append(('DiscreteDistributionG', DiscreteDistributionG_done, [])) state_matrix.append(('DiscreteDistributionH', DiscreteDistributionH_done, [])) def traverse_states(state_matrix, state_idx = 0, entry_round = 0, recursion_depth = 0): if recursion_depth > 128: raise RuntimeError("Maximum state graph recursion reached (most likely due to an infinite state graph loop") done = state_matrix[state_idx][1](entry_round) if not done: return state_matrix[state_idx][0] else: # Traverse the state graph further by recursion. done[0] gives the number (1,2,3...) of the currently # considered state's output condition. state_matrix[state_idx][2][done[0]-1] translates into the # corresponding output state's index in state_matrix. done[1] is the round at which that next step # started running. return traverse_states(state_matrix, state_matrix[state_idx][2][done[0]-1], done[1], recursion_depth+1) state = traverse_states(state_matrix) if state == 'PioneeringBi': if roundsAlive == 0: return (OBSERVE, ) else: return (INNOVATE, ) elif state == 'ExploitGreedy': # Dead simple. Exploit. The done() method will move us out of here if the payoff ever drops. if len(repertoire) > 0: return (EXPLOIT, max(repertoire, key=repertoire.get)) else: return (INNOVATE,) elif state == 'InnovationBeat': # Firstly, we need to find a sequence of N_Seq OBSERVE actions, to decide which round is most likely to be # a sync round. We do this in a greedy way: if we see an OBSERVE round where no models (agents playing EXPLOIT) # were observe, we immediately assume that is a sync round. This has the added effect that pioneers (agents starting # this state in the very first round of a simulation) will start syncing on the first round. if roundsAlive < 2: return (OBSERVE,) else: start_idx = 0 streak_found = False while not streak_found: # Try to find runs of the OBSERVE action. Note that multiple OBSERVE results may occur in a single round, # so we'll need to collapse these later try: first_observe_idx = historyMoves[start_idx:].index(OBSERVE) + start_idx except ValueError: # No OBSERVE actions remain in the history, so we need to create some return (OBSERVE,) observe_payoffs = {} for idx in xrange(first_observe_idx, len(historyMoves)): if historyMoves[idx] == OBSERVE: round = historyRounds[idx] try: observe_payoffs[round] += historyPayoffs[idx] except KeyError: observe_payoffs[round] = historyPayoffs[idx] if len(observe_payoffs) > 8: streak_found = True else: # The OBSERVE streak has ended before it was long enough; look for the next one. start_idx = idx + 1 break else: if not streak_found: # We're midway through an OBSERVE streak; play the next round. return (OBSERVE,) # Efficient trick to obtain both the minimum key and value in a single traversal import operator min_round, min_payoff = min(observe_payoffs.items(), key=operator.itemgetter(1)) # The value of the minimum round allows us to determine at what offset the "innovation beat" occurs # relative to this individual's first round of life. We would like to later calculate # e.g. [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][(roundsAlive - offset) % 4] to determine what move in the sequence to play # recall that [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][0] is the INNOVATE round). # # If the min_round was found at round 13, and N_Seq == 4, offset must be 1, so that INNOVATE can # again be played at round 17, because (17 - 1) % 4 is zero. offset = (min_round) % 8 # The next thing we should check, is whether we've made the choice to be in group A or group B # yet. We do this by inspecting the moves after the OBSERVE streak (until they run out). The first # move that unambiguously corresponds to a move in one of the sequences (taking into account the # round and offset) is used to pick the sequence. # # Note that it typically doesn't matter if the OBSERVE streak was a coincidence from a previous # state, and that the "unambiguous correspondence" is also coincidental. It will in future associate # this individual with this state by the same analysis. (An exception to this assumption is if # a previous state deliberately plays similar OBSERVE sequences, which may disturb the A/B balance). last_observe_round = max(observe_payoffs.keys()) seq = None for round in xrange(last_observe_round+1, historyRounds[-1]): idx = historyRounds.index(round) s = (round - offset) % 8 m = historyMoves[idx] # It's no use checking for unambiguous correspondence if the sequences play the same move at # this point if [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][s] != [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1][s]: if m == [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][s]: seq = [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0] break elif m == [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1][s]: seq = [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1] break else: # Keep on looking pass if not seq: # We didn't find any evidence that we made a choice about a group to belong to yet. Pick one! seq = random.choice([[-1, 1, 2, 2, 2, 1, 2, 1, 1, 0], [-1, 0, 1, 1, 1, 2, 1, 1, 1, 1]]) next_move = seq[(roundsAlive - offset + 1) % 8] if next_move == INNOVATE: return (INNOVATE,) elif next_move == OBSERVE: return (OBSERVE,) elif len(repertoire) > 0: if next_move == EXPLOIT: return (EXPLOIT, max(repertoire, key=repertoire.get)) elif next_move == REFINE: if canPlayRefine: return (REFINE, max(repertoire, key=repertoire.get)) else: return (EXPLOIT, max(repertoire, key=repertoire.get)) else: return (INNOVATE,) elif state == 'DiscreteDistributionG': interval = [0.535989128988, 1.0, 1.0, 0.819512178388] for i in xrange(1,4): interval[i] = interval[i-1] + interval[i] # Normalise the intervals if canPlayRefine: interval = [x/interval[-1] for x in interval] else: interval = [x/interval[-2] for x in interval] # If the repertoire is empty, only Pi or Po should be chosen: if len(repertoire) == 0: interval = [x/interval[-3] for x in interval] roll = random.random() if roll <= interval[0]: return (INNOVATE, ) elif roll <= interval[1]: return (OBSERVE, ) elif roll <= interval[2]: return (EXPLOIT, max(repertoire, key=repertoire.get)) elif (roll <= interval[3]) and canPlayRefine: # Add the sanity check in case of rounding errors return (REFINE, max(repertoire, key=repertoire.get)) else: # Catch-all for rounding errors return (EXPLOIT, max(repertoire, key=repertoire.get)) elif state == 'DiscreteDistributionH': interval = [0.358998603928, 0.253280378734, 1.0, 1.0] for i in xrange(1,4): interval[i] = interval[i-1] + interval[i] # Normalise the intervals if canPlayRefine: interval = [x/interval[-1] for x in interval] else: interval = [x/interval[-2] for x in interval] # If the repertoire is empty, only Pi or Po should be chosen: if len(repertoire) == 0: interval = [x/interval[-3] for x in interval] roll = random.random() if roll <= interval[0]: return (INNOVATE, ) elif roll <= interval[1]: return (OBSERVE, ) elif roll <= interval[2]: return (EXPLOIT, max(repertoire, key=repertoire.get)) elif (roll <= interval[3]) and canPlayRefine: # Add the sanity check in case of rounding errors return (REFINE, max(repertoire, key=repertoire.get)) else: # Catch-all for rounding errors return (EXPLOIT, max(repertoire, key=repertoire.get)) else: raise AgentError('No such state: %s' % state) def observe_who(exploiterData): random.shuffle(exploiterData) return exploiterData
en
0.912012
# Automatically rendered agent code # Exit condition 1: The agent is a pioneer, and N_rounds rounds have elapsed # Exit condition 2: We've tested, and the agent is not a pioneer # Otherwise, remain in the current state # We haven't made a move yet, give us a chance first! # No move yet #print ("Entered at round %d, changed at round %d. Initial payoff %d, final payoff %d." # % (entryRound, change_round, initial_payoff, payoff)) # Terminal trait # Terminal trait # Terminal trait # Traverse the state graph further by recursion. done[0] gives the number (1,2,3...) of the currently # considered state's output condition. state_matrix[state_idx][2][done[0]-1] translates into the # corresponding output state's index in state_matrix. done[1] is the round at which that next step # started running. # Dead simple. Exploit. The done() method will move us out of here if the payoff ever drops. # Firstly, we need to find a sequence of N_Seq OBSERVE actions, to decide which round is most likely to be # a sync round. We do this in a greedy way: if we see an OBSERVE round where no models (agents playing EXPLOIT) # were observe, we immediately assume that is a sync round. This has the added effect that pioneers (agents starting # this state in the very first round of a simulation) will start syncing on the first round. # Try to find runs of the OBSERVE action. Note that multiple OBSERVE results may occur in a single round, # so we'll need to collapse these later # No OBSERVE actions remain in the history, so we need to create some # The OBSERVE streak has ended before it was long enough; look for the next one. # We're midway through an OBSERVE streak; play the next round. # Efficient trick to obtain both the minimum key and value in a single traversal # The value of the minimum round allows us to determine at what offset the "innovation beat" occurs # relative to this individual's first round of life. We would like to later calculate # e.g. [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][(roundsAlive - offset) % 4] to determine what move in the sequence to play # recall that [-1, 1, 2, 2, 2, 1, 2, 1, 1, 0][0] is the INNOVATE round). # # If the min_round was found at round 13, and N_Seq == 4, offset must be 1, so that INNOVATE can # again be played at round 17, because (17 - 1) % 4 is zero. # The next thing we should check, is whether we've made the choice to be in group A or group B # yet. We do this by inspecting the moves after the OBSERVE streak (until they run out). The first # move that unambiguously corresponds to a move in one of the sequences (taking into account the # round and offset) is used to pick the sequence. # # Note that it typically doesn't matter if the OBSERVE streak was a coincidence from a previous # state, and that the "unambiguous correspondence" is also coincidental. It will in future associate # this individual with this state by the same analysis. (An exception to this assumption is if # a previous state deliberately plays similar OBSERVE sequences, which may disturb the A/B balance). # It's no use checking for unambiguous correspondence if the sequences play the same move at # this point # Keep on looking # We didn't find any evidence that we made a choice about a group to belong to yet. Pick one! # Normalise the intervals # If the repertoire is empty, only Pi or Po should be chosen: # Add the sanity check in case of rounding errors # Catch-all for rounding errors # Normalise the intervals # If the repertoire is empty, only Pi or Po should be chosen: # Add the sanity check in case of rounding errors # Catch-all for rounding errors
2.667905
3
src/pretix/base/i18n.py
awg24/pretix
1
6624595
<reponame>awg24/pretix import copy import json from django import forms from django.conf import settings from django.db.models import SubfieldBase, TextField from django.utils import translation from django.utils.safestring import mark_safe class LazyI18nString: """ This represents an internationalized string that is/was/will be stored in the database. """ def __init__(self, data): """ Input data should be a dictionary which maps language codes to content. """ self.data = data if isinstance(self.data, str) and self.data is not None: try: j = json.loads(self.data) except ValueError: pass else: self.data = j def __str__(self): """ Evaluate the given string with respect to the currently active locale. This will rather return you a string in a wrong language than give you an empty value. """ if self.data is None: return "" if isinstance(self.data, dict): lng = translation.get_language() firstpart = lng.split('-')[0] similar = [l for l in self.data.keys() if l.startswith(firstpart + "-")] if lng in self.data and self.data[lng]: return self.data[lng] elif firstpart in self.data: return self.data[firstpart] elif similar: return self.data[similar[0]] elif settings.LANGUAGE_CODE in self.data and self.data[settings.LANGUAGE_CODE]: return self.data[settings.LANGUAGE_CODE] elif len(self.data): return list(self.data.items())[0][1] else: return "" else: return str(self.data) def __repr__(self): return '<LazyI18nString: %s>' % repr(self.data) def __lt__(self, other): return str(self) < str(other) class I18nWidget(forms.MultiWidget): """ The default form widget for I18nCharField and I18nTextField. It makes use of Django's MultiWidget mechanism and does some magic to save you time. """ widget = forms.TextInput def __init__(self, langcodes, field, attrs=None): widgets = [] self.langcodes = langcodes self.enabled_langcodes = langcodes self.field = field for lng in self.langcodes: a = copy.copy(attrs) or {} a['data-lang'] = lng widgets.append(self.widget(attrs=a)) super().__init__(widgets, attrs) def decompress(self, value): data = [] for lng in self.langcodes: data.append( value.data[lng] if value is not None and isinstance(value.data, dict) and lng in value.data else None ) if value and not isinstance(value.data, dict): data[0] = value.data return data def render(self, name, value, attrs=None): if self.is_localized: for widget in self.widgets: widget.is_localized = self.is_localized # value is a list of values, each corresponding to a widget # in self.widgets. if not isinstance(value, list): value = self.decompress(value) output = [] final_attrs = self.build_attrs(attrs) id_ = final_attrs.get('id', None) for i, widget in enumerate(self.widgets): if self.langcodes[i] not in self.enabled_langcodes: continue try: widget_value = value[i] except IndexError: widget_value = None if id_: final_attrs = dict(final_attrs, id='%s_%s' % (id_, i)) output.append(widget.render(name + '_%s' % i, widget_value, final_attrs)) return mark_safe(self.format_output(output)) def format_output(self, rendered_widgets): return '<div class="i18n-form-group">%s</div>' % super().format_output(rendered_widgets) class I18nTextInput(I18nWidget): widget = forms.TextInput class I18nTextarea(I18nWidget): widget = forms.Textarea class I18nFormField(forms.MultiValueField): """ The form field that is used by I18nCharField and I18nTextField. It makes use of Django's MultiValueField mechanism to create one sub-field per available language. """ def compress(self, data_list): langcodes = self.langcodes data = {} for i, value in enumerate(data_list): data[langcodes[i]] = value return LazyI18nString(data) def clean(self, value): found = False clean_data = [] errors = [] for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None if field_value not in self.empty_values: found = True try: clean_data.append(field.clean(field_value)) except forms.ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. errors.extend(m for m in e.error_list if m not in errors) if errors: raise forms.ValidationError(errors) if self.one_required and not found: raise forms.ValidationError(self.error_messages['required'], code='required') out = self.compress(clean_data) self.validate(out) self.run_validators(out) return out def __init__(self, *args, **kwargs): fields = [] defaults = { 'widget': self.widget, 'max_length': kwargs.pop('max_length', None), } self.langcodes = kwargs.pop('langcodes', [l[0] for l in settings.LANGUAGES]) self.one_required = kwargs['required'] kwargs['required'] = False kwargs['widget'] = kwargs['widget']( langcodes=self.langcodes, field=self ) defaults.update(**kwargs) for lngcode in self.langcodes: defaults['label'] = '%s (%s)' % (defaults.get('label'), lngcode) fields.append(forms.CharField(**defaults)) super().__init__( fields=fields, require_all_fields=False, *args, **kwargs ) class I18nFieldMixin: form_class = I18nFormField widget = I18nTextInput def __init__(self, *args, **kwargs): self.event = kwargs.pop('event', None) super().__init__(*args, **kwargs) def to_python(self, value): if isinstance(value, LazyI18nString): return value return LazyI18nString(value) def get_prep_value(self, value): if isinstance(value, LazyI18nString): value = value.data if isinstance(value, dict): return json.dumps({k: v for k, v in value.items() if v}, sort_keys=True) return value def get_prep_lookup(self, lookup_type, value): raise TypeError('Lookups on i18n string currently not supported.') def formfield(self, **kwargs): defaults = {'form_class': self.form_class, 'widget': self.widget} defaults.update(kwargs) return super().formfield(**defaults) class I18nCharField(I18nFieldMixin, TextField, metaclass=SubfieldBase): """ A CharField which takes internationalized data. Internally, a TextField dabase field is used to store JSON. If you interact with this field, you will work with LazyI18nString instances. """ widget = I18nTextInput class I18nTextField(I18nFieldMixin, TextField, metaclass=SubfieldBase): """ Like I18nCharField, but for TextFields. """ widget = I18nTextarea
import copy import json from django import forms from django.conf import settings from django.db.models import SubfieldBase, TextField from django.utils import translation from django.utils.safestring import mark_safe class LazyI18nString: """ This represents an internationalized string that is/was/will be stored in the database. """ def __init__(self, data): """ Input data should be a dictionary which maps language codes to content. """ self.data = data if isinstance(self.data, str) and self.data is not None: try: j = json.loads(self.data) except ValueError: pass else: self.data = j def __str__(self): """ Evaluate the given string with respect to the currently active locale. This will rather return you a string in a wrong language than give you an empty value. """ if self.data is None: return "" if isinstance(self.data, dict): lng = translation.get_language() firstpart = lng.split('-')[0] similar = [l for l in self.data.keys() if l.startswith(firstpart + "-")] if lng in self.data and self.data[lng]: return self.data[lng] elif firstpart in self.data: return self.data[firstpart] elif similar: return self.data[similar[0]] elif settings.LANGUAGE_CODE in self.data and self.data[settings.LANGUAGE_CODE]: return self.data[settings.LANGUAGE_CODE] elif len(self.data): return list(self.data.items())[0][1] else: return "" else: return str(self.data) def __repr__(self): return '<LazyI18nString: %s>' % repr(self.data) def __lt__(self, other): return str(self) < str(other) class I18nWidget(forms.MultiWidget): """ The default form widget for I18nCharField and I18nTextField. It makes use of Django's MultiWidget mechanism and does some magic to save you time. """ widget = forms.TextInput def __init__(self, langcodes, field, attrs=None): widgets = [] self.langcodes = langcodes self.enabled_langcodes = langcodes self.field = field for lng in self.langcodes: a = copy.copy(attrs) or {} a['data-lang'] = lng widgets.append(self.widget(attrs=a)) super().__init__(widgets, attrs) def decompress(self, value): data = [] for lng in self.langcodes: data.append( value.data[lng] if value is not None and isinstance(value.data, dict) and lng in value.data else None ) if value and not isinstance(value.data, dict): data[0] = value.data return data def render(self, name, value, attrs=None): if self.is_localized: for widget in self.widgets: widget.is_localized = self.is_localized # value is a list of values, each corresponding to a widget # in self.widgets. if not isinstance(value, list): value = self.decompress(value) output = [] final_attrs = self.build_attrs(attrs) id_ = final_attrs.get('id', None) for i, widget in enumerate(self.widgets): if self.langcodes[i] not in self.enabled_langcodes: continue try: widget_value = value[i] except IndexError: widget_value = None if id_: final_attrs = dict(final_attrs, id='%s_%s' % (id_, i)) output.append(widget.render(name + '_%s' % i, widget_value, final_attrs)) return mark_safe(self.format_output(output)) def format_output(self, rendered_widgets): return '<div class="i18n-form-group">%s</div>' % super().format_output(rendered_widgets) class I18nTextInput(I18nWidget): widget = forms.TextInput class I18nTextarea(I18nWidget): widget = forms.Textarea class I18nFormField(forms.MultiValueField): """ The form field that is used by I18nCharField and I18nTextField. It makes use of Django's MultiValueField mechanism to create one sub-field per available language. """ def compress(self, data_list): langcodes = self.langcodes data = {} for i, value in enumerate(data_list): data[langcodes[i]] = value return LazyI18nString(data) def clean(self, value): found = False clean_data = [] errors = [] for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None if field_value not in self.empty_values: found = True try: clean_data.append(field.clean(field_value)) except forms.ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. errors.extend(m for m in e.error_list if m not in errors) if errors: raise forms.ValidationError(errors) if self.one_required and not found: raise forms.ValidationError(self.error_messages['required'], code='required') out = self.compress(clean_data) self.validate(out) self.run_validators(out) return out def __init__(self, *args, **kwargs): fields = [] defaults = { 'widget': self.widget, 'max_length': kwargs.pop('max_length', None), } self.langcodes = kwargs.pop('langcodes', [l[0] for l in settings.LANGUAGES]) self.one_required = kwargs['required'] kwargs['required'] = False kwargs['widget'] = kwargs['widget']( langcodes=self.langcodes, field=self ) defaults.update(**kwargs) for lngcode in self.langcodes: defaults['label'] = '%s (%s)' % (defaults.get('label'), lngcode) fields.append(forms.CharField(**defaults)) super().__init__( fields=fields, require_all_fields=False, *args, **kwargs ) class I18nFieldMixin: form_class = I18nFormField widget = I18nTextInput def __init__(self, *args, **kwargs): self.event = kwargs.pop('event', None) super().__init__(*args, **kwargs) def to_python(self, value): if isinstance(value, LazyI18nString): return value return LazyI18nString(value) def get_prep_value(self, value): if isinstance(value, LazyI18nString): value = value.data if isinstance(value, dict): return json.dumps({k: v for k, v in value.items() if v}, sort_keys=True) return value def get_prep_lookup(self, lookup_type, value): raise TypeError('Lookups on i18n string currently not supported.') def formfield(self, **kwargs): defaults = {'form_class': self.form_class, 'widget': self.widget} defaults.update(kwargs) return super().formfield(**defaults) class I18nCharField(I18nFieldMixin, TextField, metaclass=SubfieldBase): """ A CharField which takes internationalized data. Internally, a TextField dabase field is used to store JSON. If you interact with this field, you will work with LazyI18nString instances. """ widget = I18nTextInput class I18nTextField(I18nFieldMixin, TextField, metaclass=SubfieldBase): """ Like I18nCharField, but for TextFields. """ widget = I18nTextarea
en
0.823527
This represents an internationalized string that is/was/will be stored in the database. Input data should be a dictionary which maps language codes to content. Evaluate the given string with respect to the currently active locale. This will rather return you a string in a wrong language than give you an empty value. The default form widget for I18nCharField and I18nTextField. It makes use of Django's MultiWidget mechanism and does some magic to save you time. # value is a list of values, each corresponding to a widget # in self.widgets. The form field that is used by I18nCharField and I18nTextField. It makes use of Django's MultiValueField mechanism to create one sub-field per available language. # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. A CharField which takes internationalized data. Internally, a TextField dabase field is used to store JSON. If you interact with this field, you will work with LazyI18nString instances. Like I18nCharField, but for TextFields.
2.294893
2
ultideploy/commands/deploy.py
UltiManager/ultimanager-deployment
0
6624596
import os import pathlib import sys from ultideploy import constants, credentials, resources from ultideploy.steps import InstallIstio, LinkGithub, TerraformStep PROJECT_ROOT = pathlib.Path(__file__).parents[2] TERRAFORM_CLUSTER_CONFIG = PROJECT_ROOT / 'terraform' / 'cluster' TERRAFORM_DATABASE_CONFIG = PROJECT_ROOT / 'terraform' / 'database' TERRAFORM_K8S_CONFIG = PROJECT_ROOT / 'terraform' / 'k8s' TERRAFORM_NETWORK_CONFIG = PROJECT_ROOT / 'terraform' / 'network' TERRAFORM_PROJECT_CONFIG = PROJECT_ROOT / 'terraform' / 'project' def deploy(args): """ Deploy the infrastructure. Args: args: The parsed CLI arguments. """ google_creds = credentials.google_service_account_credentials( constants.TERRAFORM_SERVICE_ACCOUNT_ID ) billing_account = resources.get_billing_account(google_creds) subprocess_env = os.environ.copy() subprocess_env['GOOGLE_APPLICATION_CREDENTIALS'] = credentials.google_service_account_credentials_path( constants.TERRAFORM_SERVICE_ACCOUNT_ID ) subprocess_env['TF_VAR_billing_account'] = billing_account.get('name') subprocess_env['TF_VAR_dns_project_id'] = constants.DNS_PROJECT_ID subprocess_env['TF_VAR_organization_id'] = args.organization_id subprocess_env['TF_VAR_root_domain'] = constants.ROOT_DOMAIN steps = [ TerraformStep( "project", TERRAFORM_PROJECT_CONFIG, env=subprocess_env, outputs=["root_project.id"], ), LinkGithub(), TerraformStep( "network", TERRAFORM_NETWORK_CONFIG, env=subprocess_env, ), TerraformStep( "database", TERRAFORM_DATABASE_CONFIG, env=subprocess_env, ), TerraformStep( "cluster", TERRAFORM_CLUSTER_CONFIG, env=subprocess_env, outputs=[ "api_domain", "cluster_address.address", "cluster_auth_ca_certificate", "cluster_auth_certificate", "cluster_auth_key", "cluster_host", "cluster_name", "cluster_region", "root_domain", ] ), InstallIstio(), TerraformStep("k8s", TERRAFORM_K8S_CONFIG, subprocess_env), ] if args.destroy: steps.reverse() step_results = {} for step in steps: step.pre_run() should_continue, results = step.run( args.destroy, previous_step_results=step_results ) if not should_continue: print(f"\n\nStep '{step.name}' stopped execution. Exiting.") sys.exit(0) step_results[step.name] = results or {}
import os import pathlib import sys from ultideploy import constants, credentials, resources from ultideploy.steps import InstallIstio, LinkGithub, TerraformStep PROJECT_ROOT = pathlib.Path(__file__).parents[2] TERRAFORM_CLUSTER_CONFIG = PROJECT_ROOT / 'terraform' / 'cluster' TERRAFORM_DATABASE_CONFIG = PROJECT_ROOT / 'terraform' / 'database' TERRAFORM_K8S_CONFIG = PROJECT_ROOT / 'terraform' / 'k8s' TERRAFORM_NETWORK_CONFIG = PROJECT_ROOT / 'terraform' / 'network' TERRAFORM_PROJECT_CONFIG = PROJECT_ROOT / 'terraform' / 'project' def deploy(args): """ Deploy the infrastructure. Args: args: The parsed CLI arguments. """ google_creds = credentials.google_service_account_credentials( constants.TERRAFORM_SERVICE_ACCOUNT_ID ) billing_account = resources.get_billing_account(google_creds) subprocess_env = os.environ.copy() subprocess_env['GOOGLE_APPLICATION_CREDENTIALS'] = credentials.google_service_account_credentials_path( constants.TERRAFORM_SERVICE_ACCOUNT_ID ) subprocess_env['TF_VAR_billing_account'] = billing_account.get('name') subprocess_env['TF_VAR_dns_project_id'] = constants.DNS_PROJECT_ID subprocess_env['TF_VAR_organization_id'] = args.organization_id subprocess_env['TF_VAR_root_domain'] = constants.ROOT_DOMAIN steps = [ TerraformStep( "project", TERRAFORM_PROJECT_CONFIG, env=subprocess_env, outputs=["root_project.id"], ), LinkGithub(), TerraformStep( "network", TERRAFORM_NETWORK_CONFIG, env=subprocess_env, ), TerraformStep( "database", TERRAFORM_DATABASE_CONFIG, env=subprocess_env, ), TerraformStep( "cluster", TERRAFORM_CLUSTER_CONFIG, env=subprocess_env, outputs=[ "api_domain", "cluster_address.address", "cluster_auth_ca_certificate", "cluster_auth_certificate", "cluster_auth_key", "cluster_host", "cluster_name", "cluster_region", "root_domain", ] ), InstallIstio(), TerraformStep("k8s", TERRAFORM_K8S_CONFIG, subprocess_env), ] if args.destroy: steps.reverse() step_results = {} for step in steps: step.pre_run() should_continue, results = step.run( args.destroy, previous_step_results=step_results ) if not should_continue: print(f"\n\nStep '{step.name}' stopped execution. Exiting.") sys.exit(0) step_results[step.name] = results or {}
en
0.399175
Deploy the infrastructure. Args: args: The parsed CLI arguments.
1.979586
2
dipper/sources/GeneReviews.py
sgml/dipper
0
6624597
<reponame>sgml/dipper<gh_stars>0 import re import os import csv import logging from bs4 import BeautifulSoup from dipper.sources.OMIMSource import OMIMSource from dipper.models.Model import Model from dipper.models.Reference import Reference __author__ = 'nicole' LOG = logging.getLogger(__name__) GRDL = 'http://ftp.ncbi.nih.gov/pub/GeneReviews' class GeneReviews(OMIMSource): """ Here we process the GeneReviews mappings to OMIM, plus inspect the GeneReviews (html) books to pull the clinical descriptions in order to populate the definitions of the terms in the ontology. We define the GeneReviews items as classes that are either grouping classes over OMIM disease ids (gene ids are filtered out), or are made as subclasses of DOID:4 (generic disease). Note that GeneReviews [copyright policy](http://www.ncbi.nlm.nih.gov/books/NBK138602/) (as of 2015.11.20) says: GeneReviews® chapters are owned by the University of Washington, Seattle, © 1993-2015. Permission is hereby granted to reproduce, distribute, and translate copies of content materials provided that (i) credit for source (www.ncbi.nlm.nih.gov/books/NBK1116/) and copyright (University of Washington, Seattle) are included with each copy; (ii) a link to the original material is provided whenever the material is published elsewhere on the Web; and (iii) reproducers, distributors, and/or translators comply with this copyright notice and the GeneReviews Usage Disclaimer. This script doesn't pull the GeneReviews books from the NCBI Bookshelf directly; scripting this task is expressly prohibited by [NCBIBookshelf policy](http://www.ncbi.nlm.nih.gov/books/NBK45311/). However, assuming you have acquired the books (in html format) via permissible means, a parser for those books is provided here to extract the clinical descriptions to define the NBK identified classes. """ files = { 'idmap': { 'file': 'NBKid_shortname_OMIM.txt', 'url': GRDL + '/NBKid_shortname_OMIM.txt' }, 'titles': { 'file': 'GRtitle_shortname_NBKid.txt', 'url': GRDL + '/GRtitle_shortname_NBKid.txt' }, } def __init__(self, graph_type, are_bnodes_skolemized): super().__init__( graph_type, are_bnodes_skolemized, 'genereviews', ingest_title='Gene Reviews', ingest_url='http://genereviews.org/', license_url=None, data_rights='http://www.ncbi.nlm.nih.gov/books/NBK138602/', # file_handle=None ) self.dataset.set_citation('GeneReviews:NBK1116') self.book_ids = set() self.all_books = {} if 'disease' not in self.all_test_ids: LOG.warning("not configured with disease test ids.") self.test_ids = list() else: # select ony those test ids that are omim's. self.test_ids = self.all_test_ids['disease'] def fetch(self, is_dl_forced=False): """ We fetch GeneReviews id-label map and id-omim mapping files from NCBI. :return: None """ self.get_files(is_dl_forced) def parse(self, limit=None): """ :return: None """ if self.test_only: self.test_mode = True self._get_titles(limit) self._get_equivids(limit) self.create_books() self.process_nbk_html(limit) # no test subset for now; test == full graph self.testgraph = self.graph def _get_equivids(self, limit): """ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes(not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. :param limit: """ raw = '/'.join((self.rawdir, self.files['idmap']['file'])) model = Model(self.graph) LOG.info('Looping over %s', raw) # we look some stuff up in OMIM, so initialize here # omim = OMIM(self.graph_type, self.are_bnodes_skized) id_map = {} allomimids = set() col = ['NBK_id', 'GR_shortname', 'OMIM'] with open(raw, 'r', encoding="utf8") as csvfile: reader = csv.reader(csvfile, delimiter='\t', quotechar='\"') row = next(reader) row[0] = row[0][1:] if not self.check_fileheader(col, row): pass for row in reader: nbk_num = row[col.index('NBK_id')] shortname = row[col.index('GR_shortname')] omim_num = row[col.index('OMIM')] gr_id = 'GeneReviews:' + nbk_num omim_id = 'OMIM:' + omim_num if not ( (self.test_mode and len(self.test_ids) > 0 and omim_id in self.test_ids) or not self.test_mode): continue # sometimes there's bad omim nums omim_num = omim_num.strip() if len(omim_num) != 6: LOG.warning( "OMIM number incorrectly formatted in row %i; skipping:\n%s", reader.line_num, '\t'.join(row)) continue # build up a hashmap of the mappings; then process later if nbk_num not in id_map: id_map[nbk_num] = set() id_map[nbk_num].add(omim_num) # add the class along with the shortname model.addClassToGraph(gr_id, None) model.addSynonym(gr_id, shortname) allomimids.add(omim_num) if not self.test_mode and limit is not None and reader.line_num > limit: break # end looping through file # given all_omim_ids from GR, # we want to update any which are changed or removed # before deciding which are disease / phenotypes replaced = allomimids & self.omim_replaced.keys() if replaced is not None and len(replaced) > 0: LOG.warning("These OMIM ID's are past their pull date: %s", str(replaced)) for oid in replaced: allomimids.remove(oid) replacements = self.omim_replaced[oid] for rep in replacements: allomimids.update(rep) # guard against omim identifiers which have been removed obsolete = [ o for o in self.omim_type if self.omim_type[o] == self.globaltt['obsolete']] removed = allomimids & set(obsolete) if removed is not None and len(removed) > 0: LOG.warning("These OMIM ID's are gone: %s", str(removed)) for oid in removed: allomimids.remove(oid) # filter for disease /phenotype types (we can argue about what is included) omim_phenotypes = set([ omim for omim in self.omim_type if self.omim_type[omim] in ( self.globaltt['phenotype'], self.globaltt['has_affected_feature'], # both a gene and a phenotype self.globaltt['heritable_phenotypic_marker'])]) # probable phenotype LOG.info( "Have %i omim_ids globally typed as phenotypes from OMIM", len(omim_phenotypes)) entries_that_are_phenotypes = allomimids & omim_phenotypes LOG.info( "Filtered out %d/%d entries that are genes or features", len(allomimids - entries_that_are_phenotypes), len(allomimids)) for nbk_num in self.book_ids: gr_id = 'GeneReviews:'+nbk_num if nbk_num in id_map: omim_ids = id_map.get(nbk_num) for omim_num in omim_ids: omim_id = 'OMIM:'+omim_num # add the gene reviews as a superclass to the omim id, # but only if the omim id is not a gene if omim_id in entries_that_are_phenotypes: model.addClassToGraph(omim_id, None) model.addSubClass(omim_id, gr_id) # add this as a generic subclass -- TEC: this is the job of inference model.addSubClass(gr_id, self.globaltt['disease']) def _get_titles(self, limit): """ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes (not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. (This assumption is poor for those omims that are actually genes, but we have no way of knowing what those are here... we will just have to deal with that for now.) :param limit: :return: """ raw = '/'.join((self.rawdir, self.files['titles']['file'])) model = Model(self.graph) col = ['GR_shortname', 'GR_Title', 'NBK_id', 'PMID'] with open(raw, 'r', encoding='latin-1') as csvfile: reader = csv.reader(csvfile, delimiter='\t', quotechar='\"') row = next(reader) row[0] = row[0][1:] colcount = len(col) if not self.check_fileheader(col, row): pass for row in reader: if len(row) != colcount: LOG.error("Unexpected row. got: %s", row) LOG.error("Expected data for: %s", col) exit(-1) nbk_num = row[col.index('NBK_id')] gr_id = 'GeneReviews:' + nbk_num self.book_ids.add(nbk_num) # a global set of the book nums if limit is None or reader.line_num < limit: model.addClassToGraph(gr_id, row[col.index('GR_Title')]) model.addSynonym(gr_id, row[col.index('GR_shortname')]) # TODO include the new PMID? def create_books(self): # note that although we put in the url to the book, # NCBI Bookshelf does not allow robots to download content book_item = { 'file': 'books/', 'url': '' } for nbk in self.book_ids: nbki = book_item.copy() nbki['file'] = '/'.join(('books', nbk + '.html')) nbki['url'] = 'http://www.ncbi.nlm.nih.gov/books/' + nbk self.all_books[nbk] = nbki def process_nbk_html(self, limit): """ Here we process the gene reviews books to fetch the clinical descriptions to include in the ontology. We only use books that have been acquired manually, as NCBI Bookshelf does not permit automated downloads. This parser will only process the books that are found in the ```raw/genereviews/books``` directory, permitting partial completion. :param limit: :return: """ model = Model(self.graph) cnt = 0 books_not_found = set() clin_des_regx = re.compile(r".*Summary.sec0") lit_cite_regex = re.compile(r".*Literature_Cited") pubmed_regex = re.compile(r"pubmed") # ??? for a static string? for nbk in self.book_ids: cnt += 1 nbk_id = 'GeneReviews:'+nbk book_item = self.all_books.get(nbk) url = '/'.join((self.rawdir, book_item['file'])) # figure out if the book is there; if so, process, otherwise skip book_dir = '/'.join((self.rawdir, 'books')) book_files = os.listdir(book_dir) if ''.join((nbk, '.html')) not in book_files: # LOG.warning("No book found locally for %s; skipping", nbk) books_not_found.add(nbk) continue LOG.info("Processing %s", nbk) page = open(url) soup = BeautifulSoup(page.read()) # sec0 == clinical description clin_summary = soup.find('div', id=clin_des_regx) if clin_summary is not None: ptext = clin_summary.find('p').text ptext = re.sub(r'\s+', ' ', ptext) unlst = clin_summary.find('ul') if unlst is not None: item_text = list() for lst_itm in unlst.find_all('li'): item_text.append(re.sub(r'\s+', ' ', lst_itm.text)) ptext += ' '.join(item_text) # add in the copyright and citation info to description ptext = ' '.join(( ptext, '[GeneReviews:NBK1116, GeneReviews:NBK138602, ' + nbk_id + ']')) model.addDefinition(nbk_id, ptext.strip()) # get the pubs pmid_set = set() pub_div = soup.find('div', id=lit_cite_regex) if pub_div is not None: ref_list = pub_div.find_all('div', attrs={'class': "bk_ref"}) for ref in ref_list: for anchor in ref.find_all( 'a', attrs={'href': pubmed_regex}): if re.match(r'PubMed:', anchor.text): pmnum = re.sub(r'PubMed:\s*', '', anchor.text) else: pmnum = re.search( r'\/pubmed\/(\d+)$', anchor['href']).group(1) if pmnum is not None: pmid = 'PMID:'+str(pmnum) self.graph.addTriple( pmid, self.globaltt['is_about'], nbk_id) pmid_set.add(pmnum) reference = Reference( self.graph, pmid, self.globaltt['journal article']) reference.addRefToGraph() # TODO add author history, copyright, license to dataset # TODO get PMID-NBKID equivalence (near foot of page), # and make it "is about" link # self.gu.addTriple( # self.graph, pmid, # self.globaltt['is_about'], nbk_id) # for example: NBK1191 PMID:20301370 # add the book to the dataset self.dataset.setFileAccessUrl(book_item['url']) if limit is not None and cnt > limit: break # finish looping through books bknfd = len(books_not_found) if len(books_not_found) > 0: if bknfd > 100: LOG.warning("There were %d books not found.", bknfd) else: LOG.warning( "The following %d books were not found locally: %s", bknfd, str(books_not_found)) LOG.info("Finished processing %d books for clinical descriptions", cnt - bknfd) def getTestSuite(self): import unittest from tests.test_genereviews import GeneReviewsTestCase test_suite = unittest.TestLoader().loadTestsFromTestCase(GeneReviewsTestCase) return test_suite
import re import os import csv import logging from bs4 import BeautifulSoup from dipper.sources.OMIMSource import OMIMSource from dipper.models.Model import Model from dipper.models.Reference import Reference __author__ = 'nicole' LOG = logging.getLogger(__name__) GRDL = 'http://ftp.ncbi.nih.gov/pub/GeneReviews' class GeneReviews(OMIMSource): """ Here we process the GeneReviews mappings to OMIM, plus inspect the GeneReviews (html) books to pull the clinical descriptions in order to populate the definitions of the terms in the ontology. We define the GeneReviews items as classes that are either grouping classes over OMIM disease ids (gene ids are filtered out), or are made as subclasses of DOID:4 (generic disease). Note that GeneReviews [copyright policy](http://www.ncbi.nlm.nih.gov/books/NBK138602/) (as of 2015.11.20) says: GeneReviews® chapters are owned by the University of Washington, Seattle, © 1993-2015. Permission is hereby granted to reproduce, distribute, and translate copies of content materials provided that (i) credit for source (www.ncbi.nlm.nih.gov/books/NBK1116/) and copyright (University of Washington, Seattle) are included with each copy; (ii) a link to the original material is provided whenever the material is published elsewhere on the Web; and (iii) reproducers, distributors, and/or translators comply with this copyright notice and the GeneReviews Usage Disclaimer. This script doesn't pull the GeneReviews books from the NCBI Bookshelf directly; scripting this task is expressly prohibited by [NCBIBookshelf policy](http://www.ncbi.nlm.nih.gov/books/NBK45311/). However, assuming you have acquired the books (in html format) via permissible means, a parser for those books is provided here to extract the clinical descriptions to define the NBK identified classes. """ files = { 'idmap': { 'file': 'NBKid_shortname_OMIM.txt', 'url': GRDL + '/NBKid_shortname_OMIM.txt' }, 'titles': { 'file': 'GRtitle_shortname_NBKid.txt', 'url': GRDL + '/GRtitle_shortname_NBKid.txt' }, } def __init__(self, graph_type, are_bnodes_skolemized): super().__init__( graph_type, are_bnodes_skolemized, 'genereviews', ingest_title='Gene Reviews', ingest_url='http://genereviews.org/', license_url=None, data_rights='http://www.ncbi.nlm.nih.gov/books/NBK138602/', # file_handle=None ) self.dataset.set_citation('GeneReviews:NBK1116') self.book_ids = set() self.all_books = {} if 'disease' not in self.all_test_ids: LOG.warning("not configured with disease test ids.") self.test_ids = list() else: # select ony those test ids that are omim's. self.test_ids = self.all_test_ids['disease'] def fetch(self, is_dl_forced=False): """ We fetch GeneReviews id-label map and id-omim mapping files from NCBI. :return: None """ self.get_files(is_dl_forced) def parse(self, limit=None): """ :return: None """ if self.test_only: self.test_mode = True self._get_titles(limit) self._get_equivids(limit) self.create_books() self.process_nbk_html(limit) # no test subset for now; test == full graph self.testgraph = self.graph def _get_equivids(self, limit): """ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes(not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. :param limit: """ raw = '/'.join((self.rawdir, self.files['idmap']['file'])) model = Model(self.graph) LOG.info('Looping over %s', raw) # we look some stuff up in OMIM, so initialize here # omim = OMIM(self.graph_type, self.are_bnodes_skized) id_map = {} allomimids = set() col = ['NBK_id', 'GR_shortname', 'OMIM'] with open(raw, 'r', encoding="utf8") as csvfile: reader = csv.reader(csvfile, delimiter='\t', quotechar='\"') row = next(reader) row[0] = row[0][1:] if not self.check_fileheader(col, row): pass for row in reader: nbk_num = row[col.index('NBK_id')] shortname = row[col.index('GR_shortname')] omim_num = row[col.index('OMIM')] gr_id = 'GeneReviews:' + nbk_num omim_id = 'OMIM:' + omim_num if not ( (self.test_mode and len(self.test_ids) > 0 and omim_id in self.test_ids) or not self.test_mode): continue # sometimes there's bad omim nums omim_num = omim_num.strip() if len(omim_num) != 6: LOG.warning( "OMIM number incorrectly formatted in row %i; skipping:\n%s", reader.line_num, '\t'.join(row)) continue # build up a hashmap of the mappings; then process later if nbk_num not in id_map: id_map[nbk_num] = set() id_map[nbk_num].add(omim_num) # add the class along with the shortname model.addClassToGraph(gr_id, None) model.addSynonym(gr_id, shortname) allomimids.add(omim_num) if not self.test_mode and limit is not None and reader.line_num > limit: break # end looping through file # given all_omim_ids from GR, # we want to update any which are changed or removed # before deciding which are disease / phenotypes replaced = allomimids & self.omim_replaced.keys() if replaced is not None and len(replaced) > 0: LOG.warning("These OMIM ID's are past their pull date: %s", str(replaced)) for oid in replaced: allomimids.remove(oid) replacements = self.omim_replaced[oid] for rep in replacements: allomimids.update(rep) # guard against omim identifiers which have been removed obsolete = [ o for o in self.omim_type if self.omim_type[o] == self.globaltt['obsolete']] removed = allomimids & set(obsolete) if removed is not None and len(removed) > 0: LOG.warning("These OMIM ID's are gone: %s", str(removed)) for oid in removed: allomimids.remove(oid) # filter for disease /phenotype types (we can argue about what is included) omim_phenotypes = set([ omim for omim in self.omim_type if self.omim_type[omim] in ( self.globaltt['phenotype'], self.globaltt['has_affected_feature'], # both a gene and a phenotype self.globaltt['heritable_phenotypic_marker'])]) # probable phenotype LOG.info( "Have %i omim_ids globally typed as phenotypes from OMIM", len(omim_phenotypes)) entries_that_are_phenotypes = allomimids & omim_phenotypes LOG.info( "Filtered out %d/%d entries that are genes or features", len(allomimids - entries_that_are_phenotypes), len(allomimids)) for nbk_num in self.book_ids: gr_id = 'GeneReviews:'+nbk_num if nbk_num in id_map: omim_ids = id_map.get(nbk_num) for omim_num in omim_ids: omim_id = 'OMIM:'+omim_num # add the gene reviews as a superclass to the omim id, # but only if the omim id is not a gene if omim_id in entries_that_are_phenotypes: model.addClassToGraph(omim_id, None) model.addSubClass(omim_id, gr_id) # add this as a generic subclass -- TEC: this is the job of inference model.addSubClass(gr_id, self.globaltt['disease']) def _get_titles(self, limit): """ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes (not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. (This assumption is poor for those omims that are actually genes, but we have no way of knowing what those are here... we will just have to deal with that for now.) :param limit: :return: """ raw = '/'.join((self.rawdir, self.files['titles']['file'])) model = Model(self.graph) col = ['GR_shortname', 'GR_Title', 'NBK_id', 'PMID'] with open(raw, 'r', encoding='latin-1') as csvfile: reader = csv.reader(csvfile, delimiter='\t', quotechar='\"') row = next(reader) row[0] = row[0][1:] colcount = len(col) if not self.check_fileheader(col, row): pass for row in reader: if len(row) != colcount: LOG.error("Unexpected row. got: %s", row) LOG.error("Expected data for: %s", col) exit(-1) nbk_num = row[col.index('NBK_id')] gr_id = 'GeneReviews:' + nbk_num self.book_ids.add(nbk_num) # a global set of the book nums if limit is None or reader.line_num < limit: model.addClassToGraph(gr_id, row[col.index('GR_Title')]) model.addSynonym(gr_id, row[col.index('GR_shortname')]) # TODO include the new PMID? def create_books(self): # note that although we put in the url to the book, # NCBI Bookshelf does not allow robots to download content book_item = { 'file': 'books/', 'url': '' } for nbk in self.book_ids: nbki = book_item.copy() nbki['file'] = '/'.join(('books', nbk + '.html')) nbki['url'] = 'http://www.ncbi.nlm.nih.gov/books/' + nbk self.all_books[nbk] = nbki def process_nbk_html(self, limit): """ Here we process the gene reviews books to fetch the clinical descriptions to include in the ontology. We only use books that have been acquired manually, as NCBI Bookshelf does not permit automated downloads. This parser will only process the books that are found in the ```raw/genereviews/books``` directory, permitting partial completion. :param limit: :return: """ model = Model(self.graph) cnt = 0 books_not_found = set() clin_des_regx = re.compile(r".*Summary.sec0") lit_cite_regex = re.compile(r".*Literature_Cited") pubmed_regex = re.compile(r"pubmed") # ??? for a static string? for nbk in self.book_ids: cnt += 1 nbk_id = 'GeneReviews:'+nbk book_item = self.all_books.get(nbk) url = '/'.join((self.rawdir, book_item['file'])) # figure out if the book is there; if so, process, otherwise skip book_dir = '/'.join((self.rawdir, 'books')) book_files = os.listdir(book_dir) if ''.join((nbk, '.html')) not in book_files: # LOG.warning("No book found locally for %s; skipping", nbk) books_not_found.add(nbk) continue LOG.info("Processing %s", nbk) page = open(url) soup = BeautifulSoup(page.read()) # sec0 == clinical description clin_summary = soup.find('div', id=clin_des_regx) if clin_summary is not None: ptext = clin_summary.find('p').text ptext = re.sub(r'\s+', ' ', ptext) unlst = clin_summary.find('ul') if unlst is not None: item_text = list() for lst_itm in unlst.find_all('li'): item_text.append(re.sub(r'\s+', ' ', lst_itm.text)) ptext += ' '.join(item_text) # add in the copyright and citation info to description ptext = ' '.join(( ptext, '[GeneReviews:NBK1116, GeneReviews:NBK138602, ' + nbk_id + ']')) model.addDefinition(nbk_id, ptext.strip()) # get the pubs pmid_set = set() pub_div = soup.find('div', id=lit_cite_regex) if pub_div is not None: ref_list = pub_div.find_all('div', attrs={'class': "bk_ref"}) for ref in ref_list: for anchor in ref.find_all( 'a', attrs={'href': pubmed_regex}): if re.match(r'PubMed:', anchor.text): pmnum = re.sub(r'PubMed:\s*', '', anchor.text) else: pmnum = re.search( r'\/pubmed\/(\d+)$', anchor['href']).group(1) if pmnum is not None: pmid = 'PMID:'+str(pmnum) self.graph.addTriple( pmid, self.globaltt['is_about'], nbk_id) pmid_set.add(pmnum) reference = Reference( self.graph, pmid, self.globaltt['journal article']) reference.addRefToGraph() # TODO add author history, copyright, license to dataset # TODO get PMID-NBKID equivalence (near foot of page), # and make it "is about" link # self.gu.addTriple( # self.graph, pmid, # self.globaltt['is_about'], nbk_id) # for example: NBK1191 PMID:20301370 # add the book to the dataset self.dataset.setFileAccessUrl(book_item['url']) if limit is not None and cnt > limit: break # finish looping through books bknfd = len(books_not_found) if len(books_not_found) > 0: if bknfd > 100: LOG.warning("There were %d books not found.", bknfd) else: LOG.warning( "The following %d books were not found locally: %s", bknfd, str(books_not_found)) LOG.info("Finished processing %d books for clinical descriptions", cnt - bknfd) def getTestSuite(self): import unittest from tests.test_genereviews import GeneReviewsTestCase test_suite = unittest.TestLoader().loadTestsFromTestCase(GeneReviewsTestCase) return test_suite
en
0.880747
Here we process the GeneReviews mappings to OMIM, plus inspect the GeneReviews (html) books to pull the clinical descriptions in order to populate the definitions of the terms in the ontology. We define the GeneReviews items as classes that are either grouping classes over OMIM disease ids (gene ids are filtered out), or are made as subclasses of DOID:4 (generic disease). Note that GeneReviews [copyright policy](http://www.ncbi.nlm.nih.gov/books/NBK138602/) (as of 2015.11.20) says: GeneReviews® chapters are owned by the University of Washington, Seattle, © 1993-2015. Permission is hereby granted to reproduce, distribute, and translate copies of content materials provided that (i) credit for source (www.ncbi.nlm.nih.gov/books/NBK1116/) and copyright (University of Washington, Seattle) are included with each copy; (ii) a link to the original material is provided whenever the material is published elsewhere on the Web; and (iii) reproducers, distributors, and/or translators comply with this copyright notice and the GeneReviews Usage Disclaimer. This script doesn't pull the GeneReviews books from the NCBI Bookshelf directly; scripting this task is expressly prohibited by [NCBIBookshelf policy](http://www.ncbi.nlm.nih.gov/books/NBK45311/). However, assuming you have acquired the books (in html format) via permissible means, a parser for those books is provided here to extract the clinical descriptions to define the NBK identified classes. # file_handle=None # select ony those test ids that are omim's. We fetch GeneReviews id-label map and id-omim mapping files from NCBI. :return: None :return: None # no test subset for now; test == full graph The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes(not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. :param limit: # we look some stuff up in OMIM, so initialize here # omim = OMIM(self.graph_type, self.are_bnodes_skized) # sometimes there's bad omim nums # build up a hashmap of the mappings; then process later # add the class along with the shortname # end looping through file # given all_omim_ids from GR, # we want to update any which are changed or removed # before deciding which are disease / phenotypes # guard against omim identifiers which have been removed # filter for disease /phenotype types (we can argue about what is included) # both a gene and a phenotype # probable phenotype # add the gene reviews as a superclass to the omim id, # but only if the omim id is not a gene # add this as a generic subclass -- TEC: this is the job of inference The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes (not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. (This assumption is poor for those omims that are actually genes, but we have no way of knowing what those are here... we will just have to deal with that for now.) :param limit: :return: # a global set of the book nums # TODO include the new PMID? # note that although we put in the url to the book, # NCBI Bookshelf does not allow robots to download content Here we process the gene reviews books to fetch the clinical descriptions to include in the ontology. We only use books that have been acquired manually, as NCBI Bookshelf does not permit automated downloads. This parser will only process the books that are found in the ```raw/genereviews/books``` directory, permitting partial completion. :param limit: :return: # ??? for a static string? # figure out if the book is there; if so, process, otherwise skip # LOG.warning("No book found locally for %s; skipping", nbk) # sec0 == clinical description # add in the copyright and citation info to description # get the pubs # TODO add author history, copyright, license to dataset # TODO get PMID-NBKID equivalence (near foot of page), # and make it "is about" link # self.gu.addTriple( # self.graph, pmid, # self.globaltt['is_about'], nbk_id) # for example: NBK1191 PMID:20301370 # add the book to the dataset # finish looping through books
2.277759
2
evaluator/lexer.py
rinald-shabani/evaluator
3
6624598
<reponame>rinald-shabani/evaluator<filename>evaluator/lexer.py '''Defines a lexer for mathematical expressions.''' from .token import Token from .errors import ReadError from .util import * class Lexer: '''Lexer for mathematical expressions.''' EOI = '' # end of input def __init__(self, expression): self.expression = expression self.cursor_at = 0 self.read_from = 0 self.current = expression[0] # character under cursor def ignore_whitespace(self): '''Ignore whitespace characters.''' while is_whitespace(self.current): self.move() self.read_from = self.cursor_at def move(self): '''Move cursor forward.''' self.cursor_at += 1 if self.cursor_at <= len(self.expression) - 1: self.current = self.expression[self.cursor_at] else: self.current = Lexer.EOI def peek(self): '''Peek next token.''' self.ignore_whitespace() token = self.read() self.cursor_at = self.read_from if self.cursor_at < len(self.expression): self.current = self.expression[self.cursor_at] # reset cursor after read() return token def next(self): '''Return next token.''' self.ignore_whitespace() token = self.read() self.read_from = self.cursor_at return token def read(self): '''Read next token.''' if self.current == Lexer.EOI: # reached end of input return None # First we get generic types if is_digit(self.current): while is_digit(self.current) or self.current == '.': self.move() generic_type = 'number' elif is_letter(self.current): while is_letter(self.current) or is_digit(self.current): self.move() generic_type = 'identifier' elif is_operator(self.current): self.move() generic_type = 'operator' elif is_bracket(self.current): self.move() generic_type = 'bracket' else: raise ReadError('Invalid character \'{}\''.format(self.current)) value = self.expression[self.read_from:self.cursor_at] # Then we return specific types based on the value if generic_type == 'number': if '.' in value: _type = 'float' else: _type = 'integer' elif generic_type == 'identifier': if value in OPERATORS['prefix']: # functions are just prefix operators _type = 'function' elif value in CONSTANTS: _type = 'constant' else: _type = 'variable' # all that's left is considered a variable elif generic_type == 'bracket': if value == '(': _type = 'left_round' elif value == ')': _type = 'right_round' elif value == '[': _type = 'left_square' elif value == ']': _type = 'right_square' elif value == '{': _type = 'left_curly' elif value == '}': _type = 'right_curly' elif value == '<': _type = 'left_angular' elif value == '>': _type = 'right_angular' else: _type = 'operator' # operators are just operators :) return Token(_type, value)
'''Defines a lexer for mathematical expressions.''' from .token import Token from .errors import ReadError from .util import * class Lexer: '''Lexer for mathematical expressions.''' EOI = '' # end of input def __init__(self, expression): self.expression = expression self.cursor_at = 0 self.read_from = 0 self.current = expression[0] # character under cursor def ignore_whitespace(self): '''Ignore whitespace characters.''' while is_whitespace(self.current): self.move() self.read_from = self.cursor_at def move(self): '''Move cursor forward.''' self.cursor_at += 1 if self.cursor_at <= len(self.expression) - 1: self.current = self.expression[self.cursor_at] else: self.current = Lexer.EOI def peek(self): '''Peek next token.''' self.ignore_whitespace() token = self.read() self.cursor_at = self.read_from if self.cursor_at < len(self.expression): self.current = self.expression[self.cursor_at] # reset cursor after read() return token def next(self): '''Return next token.''' self.ignore_whitespace() token = self.read() self.read_from = self.cursor_at return token def read(self): '''Read next token.''' if self.current == Lexer.EOI: # reached end of input return None # First we get generic types if is_digit(self.current): while is_digit(self.current) or self.current == '.': self.move() generic_type = 'number' elif is_letter(self.current): while is_letter(self.current) or is_digit(self.current): self.move() generic_type = 'identifier' elif is_operator(self.current): self.move() generic_type = 'operator' elif is_bracket(self.current): self.move() generic_type = 'bracket' else: raise ReadError('Invalid character \'{}\''.format(self.current)) value = self.expression[self.read_from:self.cursor_at] # Then we return specific types based on the value if generic_type == 'number': if '.' in value: _type = 'float' else: _type = 'integer' elif generic_type == 'identifier': if value in OPERATORS['prefix']: # functions are just prefix operators _type = 'function' elif value in CONSTANTS: _type = 'constant' else: _type = 'variable' # all that's left is considered a variable elif generic_type == 'bracket': if value == '(': _type = 'left_round' elif value == ')': _type = 'right_round' elif value == '[': _type = 'left_square' elif value == ']': _type = 'right_square' elif value == '{': _type = 'left_curly' elif value == '}': _type = 'right_curly' elif value == '<': _type = 'left_angular' elif value == '>': _type = 'right_angular' else: _type = 'operator' # operators are just operators :) return Token(_type, value)
en
0.796551
Defines a lexer for mathematical expressions. Lexer for mathematical expressions. # end of input # character under cursor Ignore whitespace characters. Move cursor forward. Peek next token. # reset cursor after read() Return next token. Read next token. # reached end of input # First we get generic types # Then we return specific types based on the value # functions are just prefix operators # all that's left is considered a variable # operators are just operators :)
3.729401
4
bioinformatics/ApproximatePatternCount.py
rhnvrm/mini-projects
1
6624599
<gh_stars>1-10 def ApproximatePatternCount(Pattern, Text, d): count = 0 # initialize count variable # your code here for i in range(0,len(Text) - len(Pattern)+1): if HammingDistance(Pattern, Text[i:i+len(Pattern)]) <= d: count += 1 return count
def ApproximatePatternCount(Pattern, Text, d): count = 0 # initialize count variable # your code here for i in range(0,len(Text) - len(Pattern)+1): if HammingDistance(Pattern, Text[i:i+len(Pattern)]) <= d: count += 1 return count
en
0.590136
# initialize count variable # your code here
3.388865
3
setup.py
quantamentals/portfolioVision
0
6624600
import setuptools setuptools.setup( name="portVision", version="0.8", description="Portfolio Analysis library for Cadence practice", homepage="https://github.com/quantamentals/portfolioVision", author="<NAME>", author_email="<EMAIL>", packages=['portVision','portVision.handler','portVision.portfolio','portVision.types'], install_requires=[ "requests", "pandas", "numpy", "pandas-datareader", "matplotlib", "scipy", "yfinance", "beautifulsoup4", "html5lib", "seaborn" ], zip_safe=False ) # python3 -m pip install -e. # python setup.py sdist # twine upload dist/*
import setuptools setuptools.setup( name="portVision", version="0.8", description="Portfolio Analysis library for Cadence practice", homepage="https://github.com/quantamentals/portfolioVision", author="<NAME>", author_email="<EMAIL>", packages=['portVision','portVision.handler','portVision.portfolio','portVision.types'], install_requires=[ "requests", "pandas", "numpy", "pandas-datareader", "matplotlib", "scipy", "yfinance", "beautifulsoup4", "html5lib", "seaborn" ], zip_safe=False ) # python3 -m pip install -e. # python setup.py sdist # twine upload dist/*
en
0.673008
# python3 -m pip install -e. # python setup.py sdist # twine upload dist/*
1.38965
1
src/chaospizza/menus/tests.py
chaosdorf/chaospizza
9
6624601
<reponame>chaosdorf/chaospizza # pylint: disable=C0111 from django.urls import reverse def test_dummy_view(client): # noqa response = client.get(reverse('menu_home')) assert response.content == b'hi from menus app'
# pylint: disable=C0111 from django.urls import reverse def test_dummy_view(client): # noqa response = client.get(reverse('menu_home')) assert response.content == b'hi from menus app'
en
0.411767
# pylint: disable=C0111 # noqa
1.936359
2
django_for_apis/project_1_library/books/models.py
rednafi/django-unchained
1
6624602
<filename>django_for_apis/project_1_library/books/models.py from django.db import models # Create your models here. class Book(models.Model): title = models.CharField(max_length=250) subtitle = models.CharField(max_length=250) author = models.CharField(max_length=100) isbn = models.CharField(max_length=13) def __str__(self): return self.title
<filename>django_for_apis/project_1_library/books/models.py from django.db import models # Create your models here. class Book(models.Model): title = models.CharField(max_length=250) subtitle = models.CharField(max_length=250) author = models.CharField(max_length=100) isbn = models.CharField(max_length=13) def __str__(self): return self.title
en
0.963489
# Create your models here.
2.928867
3
recibrew/nn/transformers.py
haryoa/recibrew
0
6624603
<gh_stars>0 from torch.nn import Transformer, Embedding, Dropout, Module import torch import math class PositionalEncoding(Module): def __init__(self, d_model, dropout=0.1, max_len=100): super(PositionalEncoding, self).__init__() self.dropout = Dropout(p=dropout) pe = torch.zeros(max_len, d_model) position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model)) pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term) pe = pe.unsqueeze(0).transpose(0, 1) self.register_buffer('pe', pe) def forward(self, x): x = x + self.pe[:x.size(0), :] return self.dropout(x) class FullTransformer(Module): def __init__(self, num_vocab, num_embedding=128, dim_feedforward=512, num_encoder_layer=4, num_decoder_layer=4, dropout=0.3, padding_idx=1, max_seq_len=140): super(FullTransformer, self).__init__() self.padding_idx = padding_idx # [x : seq_len, batch_size ] self.inp_embedding = Embedding(num_vocab , num_embedding, padding_idx=padding_idx) # [ x : seq_len, batch_size, num_embedding ] self.pos_embedding = PositionalEncoding(num_embedding, dropout, max_len=max_seq_len) self.trfm = Transformer(d_model=num_embedding, dim_feedforward=dim_feedforward, num_encoder_layers=num_encoder_layer, num_decoder_layers=num_decoder_layer, dropout=dropout) self.linear_out = torch.nn.Linear(num_embedding, num_vocab) def make_pad_mask(self, inp: torch.Tensor) -> torch.Tensor: """ Make mask attention that caused 'True' element will not be attended (ignored). Padding stated in self.padding_idx will not be attended at all. :param inp : input that to be masked in boolean Tensor """ return (inp == self.padding_idx).transpose(0, 1) def forward(self, src: torch.Tensor, tgt: torch.Tensor) -> torch.Tensor: """ forward! :param src : source tensor :param tgt : target tensor """ # Generate mask for decoder attention tgt_mask = self.trfm.generate_square_subsequent_mask(len(tgt)).to(tgt.device) # trg_mask shape = [target_seq_len, target_seq_len] src_pad_mask = self.make_pad_mask(src) tgt_pad_mask = self.make_pad_mask(tgt) # [ src : seq_len, batch_size, num_embedding ] out_emb_enc = self.pos_embedding(self.inp_embedding(src)) # [ src : seq_len, batch_size, num_embedding ] out_emb_dec = self.pos_embedding(self.inp_embedding(tgt)) out_trf = self.trfm(out_emb_enc, out_emb_dec, src_mask=None, tgt_mask=tgt_mask, memory_mask=None, src_key_padding_mask=src_pad_mask, tgt_key_padding_mask=tgt_pad_mask, memory_key_padding_mask=src_pad_mask) # [ out_trf : seq_len, batch_size, num_embedding] out_to_logit = self.linear_out(out_trf) # final_out : [ seq_len, batch_size, vocab_size ] return out_to_logit
from torch.nn import Transformer, Embedding, Dropout, Module import torch import math class PositionalEncoding(Module): def __init__(self, d_model, dropout=0.1, max_len=100): super(PositionalEncoding, self).__init__() self.dropout = Dropout(p=dropout) pe = torch.zeros(max_len, d_model) position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model)) pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term) pe = pe.unsqueeze(0).transpose(0, 1) self.register_buffer('pe', pe) def forward(self, x): x = x + self.pe[:x.size(0), :] return self.dropout(x) class FullTransformer(Module): def __init__(self, num_vocab, num_embedding=128, dim_feedforward=512, num_encoder_layer=4, num_decoder_layer=4, dropout=0.3, padding_idx=1, max_seq_len=140): super(FullTransformer, self).__init__() self.padding_idx = padding_idx # [x : seq_len, batch_size ] self.inp_embedding = Embedding(num_vocab , num_embedding, padding_idx=padding_idx) # [ x : seq_len, batch_size, num_embedding ] self.pos_embedding = PositionalEncoding(num_embedding, dropout, max_len=max_seq_len) self.trfm = Transformer(d_model=num_embedding, dim_feedforward=dim_feedforward, num_encoder_layers=num_encoder_layer, num_decoder_layers=num_decoder_layer, dropout=dropout) self.linear_out = torch.nn.Linear(num_embedding, num_vocab) def make_pad_mask(self, inp: torch.Tensor) -> torch.Tensor: """ Make mask attention that caused 'True' element will not be attended (ignored). Padding stated in self.padding_idx will not be attended at all. :param inp : input that to be masked in boolean Tensor """ return (inp == self.padding_idx).transpose(0, 1) def forward(self, src: torch.Tensor, tgt: torch.Tensor) -> torch.Tensor: """ forward! :param src : source tensor :param tgt : target tensor """ # Generate mask for decoder attention tgt_mask = self.trfm.generate_square_subsequent_mask(len(tgt)).to(tgt.device) # trg_mask shape = [target_seq_len, target_seq_len] src_pad_mask = self.make_pad_mask(src) tgt_pad_mask = self.make_pad_mask(tgt) # [ src : seq_len, batch_size, num_embedding ] out_emb_enc = self.pos_embedding(self.inp_embedding(src)) # [ src : seq_len, batch_size, num_embedding ] out_emb_dec = self.pos_embedding(self.inp_embedding(tgt)) out_trf = self.trfm(out_emb_enc, out_emb_dec, src_mask=None, tgt_mask=tgt_mask, memory_mask=None, src_key_padding_mask=src_pad_mask, tgt_key_padding_mask=tgt_pad_mask, memory_key_padding_mask=src_pad_mask) # [ out_trf : seq_len, batch_size, num_embedding] out_to_logit = self.linear_out(out_trf) # final_out : [ seq_len, batch_size, vocab_size ] return out_to_logit
en
0.574942
# [x : seq_len, batch_size ] # [ x : seq_len, batch_size, num_embedding ] Make mask attention that caused 'True' element will not be attended (ignored). Padding stated in self.padding_idx will not be attended at all. :param inp : input that to be masked in boolean Tensor forward! :param src : source tensor :param tgt : target tensor # Generate mask for decoder attention # trg_mask shape = [target_seq_len, target_seq_len] # [ src : seq_len, batch_size, num_embedding ] # [ src : seq_len, batch_size, num_embedding ] # [ out_trf : seq_len, batch_size, num_embedding] # final_out : [ seq_len, batch_size, vocab_size ]
2.461967
2
ci/ci/__init__.py
joonan30/hail
0
6624604
from .ci import run __all__ = ['run']
from .ci import run __all__ = ['run']
none
1
0.944885
1
api/test_main.py
pedromtelho/APS2-megadados
0
6624605
<reponame>pedromtelho/APS2-megadados<gh_stars>0 from fastapi.testclient import TestClient from .main import app import uuid client = TestClient(app) def test_read_main_returns_not_found(): response = client.get('/') assert response.status_code == 404 assert response.json() == {'detail': 'Not Found'} def test_get_starter_tasks(): response = client.get('/task') assert response.status_code == 200 assert response.json() == { "44c0c224-6084-48d0-876b-43f30f157014": { "description": "Buy food", "completed": True }, "953c3c2a-478b-48d7-9631-7b3113a1c4cc": { "description": "Finish exercise", "completed": False }, } def test_get_completed_starter_tasks(): response = client.get('/task', params={"completed": True}) assert response.status_code == 200 assert response.json() == { "44c0c224-6084-48d0-876b-43f30f157014": { "description": "Buy food", "completed": True } } def test_get_not_completed_starter_tasks(): response = client.get('/task', params={"completed": False}) assert response.status_code == 200 assert response.json() == { "953c3c2a-478b-48d7-9631-7b3113a1c4cc": { "description": "Finish exercise", "completed": False } } def test_create_task_and_returns_string(): response = client.post('/task', json={ "description": "Finish tasks", "completed": False }) assert response.status_code == 200 assert str(response.content) # check if the task has been inserted in the list response_get = client.get('/task') uuid = response.content.decode('utf-8')[1:-1] assert response_get.json()[uuid] == { "description": "Finish tasks", "completed": False } def test_create_invalid_task_returns_error(): response = client.post('/task', json={ "description": 123, "completed": 123 }) assert response.status_code == 422 def test_read_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_read = client.get('/task/{}'.format(uuid)) assert response_read.status_code == 200 assert response_read.json() == { "description": "Task description", "completed": False } def test_read_task_by_invalid_uuid_returns_not_found(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) response_read = client.get('/task/{}'.format(uuid.uuid4())) assert response_read.status_code == 404 assert response_read.json() == {'detail': 'Task not found'} def test_replace_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_replace = client.put('/task/{}'.format(uuid), json={ "description": "Replaced task", "completed": False }) assert response_replace.status_code == 200 # check if the task has been replaced in the list response_get = client.get('/task') assert response_get.json()[uuid] == { "description": "Replaced task", "completed": False } def test_delete_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_delete = client.delete('/task/{}'.format(uuid)) assert response_delete.status_code == 200 # check if the task has been deleted from the list response_get = client.get('/task') assert not uuid in response_get.json() def test_delete_task_by_invalid_uuid_returns_not_found(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) response_delete = client.delete('/task/{}'.format(uuid.uuid4())) assert response_delete.status_code == 404 assert response_delete.json() == {'detail': 'Task not found'} def test_alter_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_alter = client.patch('/task/{}'.format(uuid), json={ "description": "Altered task", "completed": False }) assert response_alter.status_code == 200 # check if the task has been altered in the list response_get = client.get('/task') assert response_get.json()[uuid] == { "description": "Altered task", "completed": False } def test_alter_task_by_invalid_uuid_returns_not_found(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) response_alter = client.patch('/task/{}'.format(uuid.uuid4()), json={ "description": "Altered task", "completed": False }) assert response_alter.status_code == 404 assert response_alter.json() == {'detail': 'Task not found'}
from fastapi.testclient import TestClient from .main import app import uuid client = TestClient(app) def test_read_main_returns_not_found(): response = client.get('/') assert response.status_code == 404 assert response.json() == {'detail': 'Not Found'} def test_get_starter_tasks(): response = client.get('/task') assert response.status_code == 200 assert response.json() == { "44c0c224-6084-48d0-876b-43f30f157014": { "description": "Buy food", "completed": True }, "953c3c2a-478b-48d7-9631-7b3113a1c4cc": { "description": "Finish exercise", "completed": False }, } def test_get_completed_starter_tasks(): response = client.get('/task', params={"completed": True}) assert response.status_code == 200 assert response.json() == { "44c0c224-6084-48d0-876b-43f30f157014": { "description": "Buy food", "completed": True } } def test_get_not_completed_starter_tasks(): response = client.get('/task', params={"completed": False}) assert response.status_code == 200 assert response.json() == { "953c3c2a-478b-48d7-9631-7b3113a1c4cc": { "description": "Finish exercise", "completed": False } } def test_create_task_and_returns_string(): response = client.post('/task', json={ "description": "Finish tasks", "completed": False }) assert response.status_code == 200 assert str(response.content) # check if the task has been inserted in the list response_get = client.get('/task') uuid = response.content.decode('utf-8')[1:-1] assert response_get.json()[uuid] == { "description": "Finish tasks", "completed": False } def test_create_invalid_task_returns_error(): response = client.post('/task', json={ "description": 123, "completed": 123 }) assert response.status_code == 422 def test_read_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_read = client.get('/task/{}'.format(uuid)) assert response_read.status_code == 200 assert response_read.json() == { "description": "Task description", "completed": False } def test_read_task_by_invalid_uuid_returns_not_found(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) response_read = client.get('/task/{}'.format(uuid.uuid4())) assert response_read.status_code == 404 assert response_read.json() == {'detail': 'Task not found'} def test_replace_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_replace = client.put('/task/{}'.format(uuid), json={ "description": "Replaced task", "completed": False }) assert response_replace.status_code == 200 # check if the task has been replaced in the list response_get = client.get('/task') assert response_get.json()[uuid] == { "description": "Replaced task", "completed": False } def test_delete_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_delete = client.delete('/task/{}'.format(uuid)) assert response_delete.status_code == 200 # check if the task has been deleted from the list response_get = client.get('/task') assert not uuid in response_get.json() def test_delete_task_by_invalid_uuid_returns_not_found(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) response_delete = client.delete('/task/{}'.format(uuid.uuid4())) assert response_delete.status_code == 404 assert response_delete.json() == {'detail': 'Task not found'} def test_alter_task_by_uuid(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) uuid = response_create.content.decode('utf-8')[1:-1] response_alter = client.patch('/task/{}'.format(uuid), json={ "description": "Altered task", "completed": False }) assert response_alter.status_code == 200 # check if the task has been altered in the list response_get = client.get('/task') assert response_get.json()[uuid] == { "description": "Altered task", "completed": False } def test_alter_task_by_invalid_uuid_returns_not_found(): response_create = client.post('/task', json={ "description": "Task description", "completed": False }) response_alter = client.patch('/task/{}'.format(uuid.uuid4()), json={ "description": "Altered task", "completed": False }) assert response_alter.status_code == 404 assert response_alter.json() == {'detail': 'Task not found'}
en
0.963041
# check if the task has been inserted in the list # check if the task has been replaced in the list # check if the task has been deleted from the list # check if the task has been altered in the list
2.645193
3
topi/tests/python/test_topi_pooling.py
wix-playground/incubator-tvm
1
6624606
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Test code for pooling""" import numpy as np import tvm import topi import topi.testing import math from topi.util import get_const_tuple from common import get_all_backend def verify_pool(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True): iw = ih kw = kh sw = sh pt, pl, pb, pr = padding layout = "NCHW" A = tvm.placeholder((n, ic, ih, iw), name='A') B = topi.nn.pool(A, kernel=[kh, kw], stride=[sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCHW", count_include_pad=count_include_pad) B = topi.nn.relu(B) dtype = A.dtype bshape = get_const_tuple(B.shape) ashape = get_const_tuple(A.shape) if ceil_mode: assert bshape[2] == int(math.ceil(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.ceil(float(ashape[3] - kw + pl + pr) / sw) + 1) else: assert bshape[2] == int(math.floor(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.floor(float(ashape[3] - kw + pl + pr) / sw) + 1) a_np = np.random.uniform(low=0.001, size=(n, ic, ih, iw)).astype(dtype) pad_np = np.zeros(shape=(n, ic, ih+pt+pb, iw+pl+pr)).astype(dtype) no_zero = (range(n), range(ic), (range(pt, ih+pt)), (range(pl, iw+pl))) pad_np[np.ix_(*no_zero)] = a_np _, oc, oh, ow = get_const_tuple(B.shape) b_np = np.zeros(shape=(n, oc, oh, ow)).astype(dtype) if pool_type == 'avg': for i in range(oh): for j in range(ow): if count_include_pad: b_np[:,:,i,j] = np.mean(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) else: pad_count = np.sum(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw] > 0, axis=(2,3)) b_np[:,:,i,j] = np.sum(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) / np.maximum(pad_count, 1) elif pool_type =='max': for i in range(oh): for j in range(ow): b_np[:,:,i,j] = np.max(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) b_np = np.maximum(b_np, 0.0) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_pool(B, layout) a = tvm.nd.array(a_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=dtype), ctx) f = tvm.build(s, [A, B], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def verify_pool_grad(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True, add_relu=False): iw = ih kw = kh sw = sh pt, pl, pb, pr = padding layout = "NCHW" A = tvm.placeholder((n, ic, ih, iw), name='A') B = topi.nn.pool(A, kernel=[kh, kw], stride=[sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCHW", count_include_pad=count_include_pad) dtype = A.dtype bshape = get_const_tuple(B.shape) ashape = get_const_tuple(A.shape) if ceil_mode: assert bshape[2] == int(math.ceil(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.ceil(float(ashape[3] - kw + pl + pr) / sw) + 1) else: assert bshape[2] == int(math.floor(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.floor(float(ashape[3] - kw + pl + pr) / sw) + 1) OutGrad = tvm.placeholder(bshape, name='OutGrad') PoolGrad = topi.nn.pool_grad(OutGrad, A, kernel=[kh, kw], stride=[sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCHW", count_include_pad=count_include_pad) if add_relu: PoolGrad = topi.nn.relu(PoolGrad) a_np = np.random.uniform(low=0.001, size=(n, ic, ih, iw)).astype(dtype) out_grad_np = np.random.uniform(low=0.001, size=bshape).astype(dtype) pool_grad_np = topi.testing.pool_grad_nchw(a_np, out_grad_np, pool_size=(kh, kw), strides=(sh, sw), padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, count_include_pad=count_include_pad) if add_relu: pool_grad_np = np.maximum(pool_grad_np, 0.) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_pool_grad(PoolGrad) a = tvm.nd.array(a_np, ctx) out_grad = tvm.nd.array(out_grad_np, ctx) pool_grad = tvm.nd.array(np.zeros(get_const_tuple(PoolGrad.shape), dtype=dtype), ctx) f = tvm.build(s, [A, OutGrad, PoolGrad], device) f(a, out_grad, pool_grad) tvm.testing.assert_allclose(pool_grad.asnumpy(), pool_grad_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_pool(): verify_pool(1, 256, 32, 2, 2, [0, 0, 0, 0], 'avg', False, True) verify_pool(1, 256, 31, 3, 3, [1, 2, 1, 2], 'avg', False, True) verify_pool(1, 256, 32, 2, 2, [1, 2, 1, 2], 'avg', False, False) verify_pool(1, 256, 31, 4, 4, [3, 3, 3, 3], 'avg', False, False) verify_pool(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False) verify_pool(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False) verify_pool(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', False) verify_pool(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', True) verify_pool(1, 256, 31, 3, 3, [2, 1, 0, 3], 'avg', False, True) verify_pool(1, 256, 32, 2, 2, [0, 3, 2, 1], 'avg', False, False) verify_pool(1, 256, 31, 3, 3, [1, 0, 3, 2], 'max', False) verify_pool(1, 256, 31, 3, 3, [3, 2, 1, 0], 'max', True) def test_pool_grad(): verify_pool_grad(1, 256, 32, 3, 2, [1, 1, 1, 1], 'avg', False, False) verify_pool_grad(1, 256, 32, 2, 2, [0, 0, 0, 0], 'avg', False, True) verify_pool_grad(1, 256, 31, 3, 3, [1, 2, 1, 2], 'avg', False, True) verify_pool_grad(1, 256, 32, 2, 2, [1, 2, 1, 2], 'avg', False, False) verify_pool_grad(1, 256, 31, 4, 4, [2, 2, 2, 2], 'avg', False, False) verify_pool_grad(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False) verify_pool_grad(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False) verify_pool_grad(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', False) verify_pool_grad(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', True) verify_pool_grad(1, 256, 31, 3, 3, [2, 1, 0, 3], 'avg', False, True) verify_pool_grad(1, 256, 32, 2, 2, [0, 3, 2, 1], 'avg', False, False) verify_pool_grad(1, 256, 31, 3, 3, [1, 0, 3, 2], 'max', False) verify_pool_grad(1, 256, 31, 3, 3, [3, 2, 1, 0], 'max', True) verify_pool_grad(1, 256, 32, 3, 2, [1, 1, 1, 1], 'max', False) verify_pool_grad(1, 256, 32, 1, 2, [1, 1, 1, 1], 'avg', False, False) verify_pool_grad(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False, add_relu=True) verify_pool_grad(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False, add_relu=True) def verify_global_pool(n, c, h, w, pool_type): A = tvm.placeholder((n, c, h, w), name='A') B = topi.nn.global_pool(A, pool_type=pool_type) B = topi.nn.relu(B) a_np = np.random.uniform(size=get_const_tuple(A.shape)).astype(A.dtype) if pool_type == 'avg': b_np = np.mean(a_np, axis=(2,3), keepdims=True) elif pool_type =='max': b_np = np.max(a_np, axis=(2,3), keepdims=True) b_np = np.maximum(b_np, 0.0) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_adaptive_pool(B) a = tvm.nd.array(a_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx) f = tvm.build(s, [A, B], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_global_pool(): verify_global_pool(1, 1024, 7, 7, 'avg') verify_global_pool(4, 1024, 7, 7, 'avg') verify_global_pool(1, 1024, 7, 7, 'max') verify_global_pool(4, 1024, 7, 7, 'max') def verify_adaptive_pool(dshape, out_size, pool_type, layout="NCHW", dtype="float32"): def start_index(index, odim, idim): return int(np.floor(index * idim / odim)) def end_index(index, odim, idim): return int(np.ceil((index + 1) * idim / odim)) np_data = np.random.uniform(low=0, high=255, size=dshape).astype(dtype) n, c, h, w = dshape oh, ow = out_size oshape = (n, c) + out_size np_out = np.zeros(oshape).astype(dtype) np_op = np.mean if pool_type == "avg" else np.max for i in range(n): for j in range(c): for k in range(oh): k_start = start_index(k, oh, h) k_end = end_index(k, oh, h) k_sl = slice(k_start, k_end) for l in range(ow): l_start = start_index(l, ow, w) l_end = end_index(l, ow, w) l_sl = slice(l_start, l_end) np_out[i, j, k, l] = np_op(np_data[i, j, k_sl, l_sl]) data = tvm.placeholder(dshape, name="data", dtype=dtype) out = topi.nn.adaptive_pool(data, out_size, pool_type, layout) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_adaptive_pool(out) a = tvm.nd.array(np_data, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(oshape), dtype=out.dtype), ctx) f = tvm.build(s, [data, out], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), np_out, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_adaptive_pool(): verify_adaptive_pool((1, 3, 224, 224), (1, 1), "max") verify_adaptive_pool((1, 3, 224, 224), (1, 1), "avg") verify_adaptive_pool((1, 14, 56, 78), (34, 13), "max") verify_adaptive_pool((1, 5, 46, 97), (4, 96), "avg") def verify_pool3d(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True): iz = iw = ih kz = kw = kh sz = sw = sh pf, pt, pl, pk, pb, pr = padding layout = "NCDHW" A = tvm.placeholder((n, ic, iz, ih, iw), name='A') B = topi.nn.pool3d(A, kernel=[kz, kh, kw], stride=[sz, sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCDHW", count_include_pad=count_include_pad) B = topi.nn.relu(B) dtype = A.dtype bshape = get_const_tuple(B.shape) ashape = get_const_tuple(A.shape) if ceil_mode: assert bshape[2] == int(math.ceil(float(ashape[2] - kz + pf + pk) / sz) + 1) assert bshape[3] == int(math.ceil(float(ashape[3] - kh + pt + pb) / sh) + 1) assert bshape[4] == int(math.ceil(float(ashape[4] - kw + pl + pr) / sw) + 1) else: assert bshape[2] == int(math.floor(float(ashape[2] - kz + pf + pk) / sz) + 1) assert bshape[3] == int(math.floor(float(ashape[3] - kh + pt + pb) / sh) + 1) assert bshape[4] == int(math.floor(float(ashape[4] - kw + pl + pr) / sw) + 1) a_np = np.random.uniform(low=0.001, size=(n, ic, iz, ih, iw)).astype(dtype) pad_np = np.zeros(shape=(n, ic, iz+pf+pk, ih+pt+pb, iw+pl+pr)).astype(dtype) no_zero = (range(n), range(ic), (range(pf, iz+pf)), (range(pt, ih+pt)), (range(pl, iw+pl))) pad_np[np.ix_(*no_zero)] = a_np _, oc, oz, oh, ow = get_const_tuple(B.shape) b_np = np.zeros(shape=(n, oc, oz, oh, ow)).astype(dtype) if pool_type == 'avg': for k in range(oz): for i in range(oh): for j in range(ow): if count_include_pad: b_np[:,:,k,i,j] = np.mean( \ pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3,4)) else: pad_count = np.sum( \ pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw] > 0, axis=(2,3,4)) b_np[:,:,k,i,j] = np.sum(pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw], \ axis=(2,3, 4)) / np.maximum(pad_count, 1) elif pool_type =='max': for k in range(oz): for i in range(oh): for j in range(ow): b_np[:,:,k,i,j] = np.max( \ pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3,4)) b_np = np.maximum(b_np, 0.0) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_pool(B, layout) a = tvm.nd.array(a_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=dtype), ctx) f = tvm.build(s, [A, B], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_pool3d(): verify_pool3d(1, 256, 32, 2, 2, [0, 0, 0, 0, 0, 0], 'avg', False, True) verify_pool3d(1, 256, 31, 3, 3, [1, 1, 2, 2, 2, 1], 'avg', False, True) verify_pool3d(1, 256, 32, 2, 2, [1, 1, 2, 2, 2, 1], 'avg', False, False) verify_pool3d(1, 256, 31, 4, 4, [3, 3, 3, 3, 3, 3], 'avg', False, False) verify_pool3d(1, 256, 31, 4, 4, [0, 0, 0, 0, 0, 0], 'avg', False, False) verify_pool3d(1, 256, 32, 2, 2, [0, 0, 0, 0, 0, 0], 'max', False) verify_pool3d(1, 256, 31, 3, 3, [2, 2, 1, 1, 1, 2], 'max', False) verify_pool3d(1, 256, 31, 3, 3, [2, 2, 1, 1, 1, 2], 'max', True) verify_pool3d(1, 256, 31, 3, 3, [2, 1, 0, 5, 4, 3], 'avg', False, True) verify_pool3d(1, 256, 32, 2, 2, [0, 5, 4, 3, 2, 1], 'avg', False, False) verify_pool3d(1, 256, 31, 3, 3, [1, 0, 5, 4, 3, 2], 'max', False) verify_pool3d(1, 256, 31, 3, 3, [3, 2, 1, 0, 5, 4], 'max', True) if __name__ == "__main__": test_pool() test_pool_grad() test_global_pool() test_adaptive_pool() test_pool3d()
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Test code for pooling""" import numpy as np import tvm import topi import topi.testing import math from topi.util import get_const_tuple from common import get_all_backend def verify_pool(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True): iw = ih kw = kh sw = sh pt, pl, pb, pr = padding layout = "NCHW" A = tvm.placeholder((n, ic, ih, iw), name='A') B = topi.nn.pool(A, kernel=[kh, kw], stride=[sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCHW", count_include_pad=count_include_pad) B = topi.nn.relu(B) dtype = A.dtype bshape = get_const_tuple(B.shape) ashape = get_const_tuple(A.shape) if ceil_mode: assert bshape[2] == int(math.ceil(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.ceil(float(ashape[3] - kw + pl + pr) / sw) + 1) else: assert bshape[2] == int(math.floor(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.floor(float(ashape[3] - kw + pl + pr) / sw) + 1) a_np = np.random.uniform(low=0.001, size=(n, ic, ih, iw)).astype(dtype) pad_np = np.zeros(shape=(n, ic, ih+pt+pb, iw+pl+pr)).astype(dtype) no_zero = (range(n), range(ic), (range(pt, ih+pt)), (range(pl, iw+pl))) pad_np[np.ix_(*no_zero)] = a_np _, oc, oh, ow = get_const_tuple(B.shape) b_np = np.zeros(shape=(n, oc, oh, ow)).astype(dtype) if pool_type == 'avg': for i in range(oh): for j in range(ow): if count_include_pad: b_np[:,:,i,j] = np.mean(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) else: pad_count = np.sum(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw] > 0, axis=(2,3)) b_np[:,:,i,j] = np.sum(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) / np.maximum(pad_count, 1) elif pool_type =='max': for i in range(oh): for j in range(ow): b_np[:,:,i,j] = np.max(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) b_np = np.maximum(b_np, 0.0) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_pool(B, layout) a = tvm.nd.array(a_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=dtype), ctx) f = tvm.build(s, [A, B], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def verify_pool_grad(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True, add_relu=False): iw = ih kw = kh sw = sh pt, pl, pb, pr = padding layout = "NCHW" A = tvm.placeholder((n, ic, ih, iw), name='A') B = topi.nn.pool(A, kernel=[kh, kw], stride=[sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCHW", count_include_pad=count_include_pad) dtype = A.dtype bshape = get_const_tuple(B.shape) ashape = get_const_tuple(A.shape) if ceil_mode: assert bshape[2] == int(math.ceil(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.ceil(float(ashape[3] - kw + pl + pr) / sw) + 1) else: assert bshape[2] == int(math.floor(float(ashape[2] - kh + pt + pb) / sh) + 1) assert bshape[3] == int(math.floor(float(ashape[3] - kw + pl + pr) / sw) + 1) OutGrad = tvm.placeholder(bshape, name='OutGrad') PoolGrad = topi.nn.pool_grad(OutGrad, A, kernel=[kh, kw], stride=[sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCHW", count_include_pad=count_include_pad) if add_relu: PoolGrad = topi.nn.relu(PoolGrad) a_np = np.random.uniform(low=0.001, size=(n, ic, ih, iw)).astype(dtype) out_grad_np = np.random.uniform(low=0.001, size=bshape).astype(dtype) pool_grad_np = topi.testing.pool_grad_nchw(a_np, out_grad_np, pool_size=(kh, kw), strides=(sh, sw), padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, count_include_pad=count_include_pad) if add_relu: pool_grad_np = np.maximum(pool_grad_np, 0.) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_pool_grad(PoolGrad) a = tvm.nd.array(a_np, ctx) out_grad = tvm.nd.array(out_grad_np, ctx) pool_grad = tvm.nd.array(np.zeros(get_const_tuple(PoolGrad.shape), dtype=dtype), ctx) f = tvm.build(s, [A, OutGrad, PoolGrad], device) f(a, out_grad, pool_grad) tvm.testing.assert_allclose(pool_grad.asnumpy(), pool_grad_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_pool(): verify_pool(1, 256, 32, 2, 2, [0, 0, 0, 0], 'avg', False, True) verify_pool(1, 256, 31, 3, 3, [1, 2, 1, 2], 'avg', False, True) verify_pool(1, 256, 32, 2, 2, [1, 2, 1, 2], 'avg', False, False) verify_pool(1, 256, 31, 4, 4, [3, 3, 3, 3], 'avg', False, False) verify_pool(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False) verify_pool(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False) verify_pool(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', False) verify_pool(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', True) verify_pool(1, 256, 31, 3, 3, [2, 1, 0, 3], 'avg', False, True) verify_pool(1, 256, 32, 2, 2, [0, 3, 2, 1], 'avg', False, False) verify_pool(1, 256, 31, 3, 3, [1, 0, 3, 2], 'max', False) verify_pool(1, 256, 31, 3, 3, [3, 2, 1, 0], 'max', True) def test_pool_grad(): verify_pool_grad(1, 256, 32, 3, 2, [1, 1, 1, 1], 'avg', False, False) verify_pool_grad(1, 256, 32, 2, 2, [0, 0, 0, 0], 'avg', False, True) verify_pool_grad(1, 256, 31, 3, 3, [1, 2, 1, 2], 'avg', False, True) verify_pool_grad(1, 256, 32, 2, 2, [1, 2, 1, 2], 'avg', False, False) verify_pool_grad(1, 256, 31, 4, 4, [2, 2, 2, 2], 'avg', False, False) verify_pool_grad(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False) verify_pool_grad(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False) verify_pool_grad(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', False) verify_pool_grad(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', True) verify_pool_grad(1, 256, 31, 3, 3, [2, 1, 0, 3], 'avg', False, True) verify_pool_grad(1, 256, 32, 2, 2, [0, 3, 2, 1], 'avg', False, False) verify_pool_grad(1, 256, 31, 3, 3, [1, 0, 3, 2], 'max', False) verify_pool_grad(1, 256, 31, 3, 3, [3, 2, 1, 0], 'max', True) verify_pool_grad(1, 256, 32, 3, 2, [1, 1, 1, 1], 'max', False) verify_pool_grad(1, 256, 32, 1, 2, [1, 1, 1, 1], 'avg', False, False) verify_pool_grad(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False, add_relu=True) verify_pool_grad(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False, add_relu=True) def verify_global_pool(n, c, h, w, pool_type): A = tvm.placeholder((n, c, h, w), name='A') B = topi.nn.global_pool(A, pool_type=pool_type) B = topi.nn.relu(B) a_np = np.random.uniform(size=get_const_tuple(A.shape)).astype(A.dtype) if pool_type == 'avg': b_np = np.mean(a_np, axis=(2,3), keepdims=True) elif pool_type =='max': b_np = np.max(a_np, axis=(2,3), keepdims=True) b_np = np.maximum(b_np, 0.0) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_adaptive_pool(B) a = tvm.nd.array(a_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx) f = tvm.build(s, [A, B], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_global_pool(): verify_global_pool(1, 1024, 7, 7, 'avg') verify_global_pool(4, 1024, 7, 7, 'avg') verify_global_pool(1, 1024, 7, 7, 'max') verify_global_pool(4, 1024, 7, 7, 'max') def verify_adaptive_pool(dshape, out_size, pool_type, layout="NCHW", dtype="float32"): def start_index(index, odim, idim): return int(np.floor(index * idim / odim)) def end_index(index, odim, idim): return int(np.ceil((index + 1) * idim / odim)) np_data = np.random.uniform(low=0, high=255, size=dshape).astype(dtype) n, c, h, w = dshape oh, ow = out_size oshape = (n, c) + out_size np_out = np.zeros(oshape).astype(dtype) np_op = np.mean if pool_type == "avg" else np.max for i in range(n): for j in range(c): for k in range(oh): k_start = start_index(k, oh, h) k_end = end_index(k, oh, h) k_sl = slice(k_start, k_end) for l in range(ow): l_start = start_index(l, ow, w) l_end = end_index(l, ow, w) l_sl = slice(l_start, l_end) np_out[i, j, k, l] = np_op(np_data[i, j, k_sl, l_sl]) data = tvm.placeholder(dshape, name="data", dtype=dtype) out = topi.nn.adaptive_pool(data, out_size, pool_type, layout) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_adaptive_pool(out) a = tvm.nd.array(np_data, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(oshape), dtype=out.dtype), ctx) f = tvm.build(s, [data, out], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), np_out, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_adaptive_pool(): verify_adaptive_pool((1, 3, 224, 224), (1, 1), "max") verify_adaptive_pool((1, 3, 224, 224), (1, 1), "avg") verify_adaptive_pool((1, 14, 56, 78), (34, 13), "max") verify_adaptive_pool((1, 5, 46, 97), (4, 96), "avg") def verify_pool3d(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True): iz = iw = ih kz = kw = kh sz = sw = sh pf, pt, pl, pk, pb, pr = padding layout = "NCDHW" A = tvm.placeholder((n, ic, iz, ih, iw), name='A') B = topi.nn.pool3d(A, kernel=[kz, kh, kw], stride=[sz, sh, sw], padding=padding, pool_type=pool_type, ceil_mode=ceil_mode, layout="NCDHW", count_include_pad=count_include_pad) B = topi.nn.relu(B) dtype = A.dtype bshape = get_const_tuple(B.shape) ashape = get_const_tuple(A.shape) if ceil_mode: assert bshape[2] == int(math.ceil(float(ashape[2] - kz + pf + pk) / sz) + 1) assert bshape[3] == int(math.ceil(float(ashape[3] - kh + pt + pb) / sh) + 1) assert bshape[4] == int(math.ceil(float(ashape[4] - kw + pl + pr) / sw) + 1) else: assert bshape[2] == int(math.floor(float(ashape[2] - kz + pf + pk) / sz) + 1) assert bshape[3] == int(math.floor(float(ashape[3] - kh + pt + pb) / sh) + 1) assert bshape[4] == int(math.floor(float(ashape[4] - kw + pl + pr) / sw) + 1) a_np = np.random.uniform(low=0.001, size=(n, ic, iz, ih, iw)).astype(dtype) pad_np = np.zeros(shape=(n, ic, iz+pf+pk, ih+pt+pb, iw+pl+pr)).astype(dtype) no_zero = (range(n), range(ic), (range(pf, iz+pf)), (range(pt, ih+pt)), (range(pl, iw+pl))) pad_np[np.ix_(*no_zero)] = a_np _, oc, oz, oh, ow = get_const_tuple(B.shape) b_np = np.zeros(shape=(n, oc, oz, oh, ow)).astype(dtype) if pool_type == 'avg': for k in range(oz): for i in range(oh): for j in range(ow): if count_include_pad: b_np[:,:,k,i,j] = np.mean( \ pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3,4)) else: pad_count = np.sum( \ pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw] > 0, axis=(2,3,4)) b_np[:,:,k,i,j] = np.sum(pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw], \ axis=(2,3, 4)) / np.maximum(pad_count, 1) elif pool_type =='max': for k in range(oz): for i in range(oh): for j in range(ow): b_np[:,:,k,i,j] = np.max( \ pad_np[:, :, k*sz:k*sz+kz, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3,4)) b_np = np.maximum(b_np, 0.0) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_pool(B, layout) a = tvm.nd.array(a_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=dtype), ctx) f = tvm.build(s, [A, B], device) f(a, b) tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) for device in get_all_backend(): check_device(device) def test_pool3d(): verify_pool3d(1, 256, 32, 2, 2, [0, 0, 0, 0, 0, 0], 'avg', False, True) verify_pool3d(1, 256, 31, 3, 3, [1, 1, 2, 2, 2, 1], 'avg', False, True) verify_pool3d(1, 256, 32, 2, 2, [1, 1, 2, 2, 2, 1], 'avg', False, False) verify_pool3d(1, 256, 31, 4, 4, [3, 3, 3, 3, 3, 3], 'avg', False, False) verify_pool3d(1, 256, 31, 4, 4, [0, 0, 0, 0, 0, 0], 'avg', False, False) verify_pool3d(1, 256, 32, 2, 2, [0, 0, 0, 0, 0, 0], 'max', False) verify_pool3d(1, 256, 31, 3, 3, [2, 2, 1, 1, 1, 2], 'max', False) verify_pool3d(1, 256, 31, 3, 3, [2, 2, 1, 1, 1, 2], 'max', True) verify_pool3d(1, 256, 31, 3, 3, [2, 1, 0, 5, 4, 3], 'avg', False, True) verify_pool3d(1, 256, 32, 2, 2, [0, 5, 4, 3, 2, 1], 'avg', False, False) verify_pool3d(1, 256, 31, 3, 3, [1, 0, 5, 4, 3, 2], 'max', False) verify_pool3d(1, 256, 31, 3, 3, [3, 2, 1, 0, 5, 4], 'max', True) if __name__ == "__main__": test_pool() test_pool_grad() test_global_pool() test_adaptive_pool() test_pool3d()
en
0.863529
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. Test code for pooling
2.165362
2
tensorflow/python/kernel_tests/pack_op_test.py
yxiong/tensorflow
6
6624607
<gh_stars>1-10 # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for Pack Op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf def np_split_sqeeze(array, axis): axis_len = array.shape[axis] return [ np.squeeze(arr, axis=(axis,)) for arr in np.split(array, axis_len, axis=axis) ] class PackOpTest(tf.test.TestCase): def testSimple(self): np.random.seed(7) for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape) # Convert [data[0], data[1], ...] separately to tensorflow # TODO(irving): Remove list() once we handle maps correctly xs = list(map(tf.constant, data)) # Pack back into a single tensorflow tensor c = tf.pack(xs) self.assertAllEqual(c.eval(), data) def testConst(self): np.random.seed(7) for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape).astype(np.float32) # Pack back into a single tensorflow tensor directly using np array c = tf.pack(data) # This is implemented via a Const: self.assertEqual(c.op.type, "Const") self.assertAllEqual(c.eval(), data) # Python lists also work for 1-D case: if len(shape) == 1: data_list = list(data) cl = tf.pack(data_list) self.assertEqual(cl.op.type, "Const") self.assertAllEqual(cl.eval(), data) # Verify that shape induction works with shapes produced via const pack a = tf.constant([1, 2, 3, 4, 5, 6]) b = tf.reshape(a, tf.pack([2, 3])) self.assertAllEqual(b.get_shape(), [2, 3]) def testGradientsAxis0(self): np.random.seed(7) for use_gpu in False, True: for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape) shapes = [shape[1:]] * shape[0] with self.test_session(use_gpu=use_gpu): # TODO(irving): Remove list() once we handle maps correctly xs = list(map(tf.constant, data)) c = tf.pack(xs) err = tf.test.compute_gradient_error(xs, shapes, c, shape) self.assertLess(err, 1e-6) def testGradientsAxis1(self): np.random.seed(7) for use_gpu in False, True: for shape in (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape) shapes = [shape[1:]] * shape[0] out_shape = list(shape[1:]) out_shape.insert(1, shape[0]) with self.test_session(use_gpu=use_gpu): # TODO(irving): Remove list() once we handle maps correctly xs = list(map(tf.constant, data)) c = tf.pack(xs, axis=1) err = tf.test.compute_gradient_error(xs, shapes, c, out_shape) self.assertLess(err, 1e-6) def testZeroSize(self): # Verify that pack doesn't crash for zero size inputs for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): for shape in (0,), (3,0), (0, 3): x = np.zeros((2,) + shape) p = tf.pack(list(x)).eval() self.assertAllEqual(p, x) def testAxis0Default(self): with self.test_session(): t = [tf.constant([1, 2, 3]), tf.constant([4, 5, 6])] packed = tf.pack(t).eval() self.assertAllEqual(packed, np.array([[1, 2, 3], [4, 5, 6]])) def testAgainstNumpy(self): # For 1 to 5 dimensions. for i in range(1, 6): expected = np.random.random(np.random.permutation(i) + 1) # For all the possible axis to split it, including negative indices. for j in range(-i, i): test_arrays = np_split_sqeeze(expected, j) with self.test_session(): actual = tf.pack(test_arrays, axis=j) self.assertEqual(expected.shape, actual.get_shape()) actual = actual.eval() self.assertNDArrayNear(expected, actual, 1e-6) def testDimOutOfRange(self): t = [tf.constant([1, 2, 3]), tf.constant([4, 5, 6])] with self.assertRaisesRegexp(ValueError, r"axis = 2 not in \[-2, 2\)"): tf.unpack(t, axis=2) def testDimOutOfNegativeRange(self): t = [tf.constant([1, 2, 3]), tf.constant([4, 5, 6])] with self.assertRaisesRegexp(ValueError, r"axis = -3 not in \[-2, 2\)"): tf.unpack(t, axis=-3) class AutomaticPackingTest(tf.test.TestCase): def testSimple(self): with self.test_session(): self.assertAllEqual([1, 0, 2], tf.convert_to_tensor([1, tf.constant(0), 2]).eval()) self.assertAllEqual( [[0, 0, 0], [0, 1, 0], [0, 0, 0]], tf.convert_to_tensor([[0, 0, 0], [0, tf.constant(1), 0], [0, 0, 0]]).eval()) self.assertAllEqual( [[0, 0, 0], [0, 1, 0], [0, 0, 0]], tf.convert_to_tensor([[0, 0, 0], tf.constant([0, 1, 0]), [0, 0, 0]]).eval()) self.assertAllEqual( [[0, 0, 0], [0, 1, 0], [0, 0, 0]], tf.convert_to_tensor([tf.constant([0, 0, 0]), tf.constant([0, 1, 0]), tf.constant([0, 0, 0])]).eval()) def testWithNDArray(self): with self.test_session(): result = tf.convert_to_tensor([[[0., 0.], tf.constant([1., 1.])], np.array([[2., 2.], [3., 3.]], dtype=np.float32)]) self.assertAllEqual( [[[0., 0.], [1., 1.]], [[2., 2.], [3., 3.]]], result.eval()) def testVariable(self): with self.test_session(): v = tf.Variable(17) result = tf.convert_to_tensor([[0, 0, 0], [0, v, 0], [0, 0, 0]]) v.initializer.run() self.assertAllEqual([[0, 0, 0], [0, 17, 0], [0, 0, 0]], result.eval()) v.assign(38).op.run() self.assertAllEqual([[0, 0, 0], [0, 38, 0], [0, 0, 0]], result.eval()) def testDtype(self): t_0 = tf.convert_to_tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]]) self.assertEqual(tf.float32, t_0.dtype) t_1 = tf.convert_to_tensor([[0., 0., 0.], tf.constant([0., 0., 0.], dtype=tf.float64), [0., 0., 0.]]) self.assertEqual(tf.float64, t_1.dtype) t_2 = tf.convert_to_tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], dtype=tf.float64) self.assertEqual(tf.float64, t_2.dtype) with self.assertRaises(TypeError): tf.convert_to_tensor([tf.constant([0., 0., 0.], dtype=tf.float32), tf.constant([0., 0., 0.], dtype=tf.float64), [0., 0., 0.]]) with self.assertRaises(TypeError): tf.convert_to_tensor([[0., 0., 0.], tf.constant([0., 0., 0.], dtype=tf.float64), [0., 0., 0.]], dtype=tf.float32) with self.assertRaises(TypeError): tf.convert_to_tensor([tf.constant([0., 0., 0.], dtype=tf.float64)], dtype=tf.float32) def testPlaceholder(self): with self.test_session(): # Test using placeholder with a defined shape. ph_0 = tf.placeholder(tf.int32, shape=[]) result_0 = tf.convert_to_tensor([[0, 0, 0], [0, ph_0, 0], [0, 0, 0]]) self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]], result_0.eval(feed_dict={ph_0: 1})) self.assertAllEqual([[0, 0, 0], [0, 2, 0], [0, 0, 0]], result_0.eval(feed_dict={ph_0: 2})) # Test using placeholder with an undefined shape. ph_1 = tf.placeholder(tf.int32) result_1 = tf.convert_to_tensor([[0, 0, 0], [0, ph_1, 0], [0, 0, 0]]) self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]], result_1.eval(feed_dict={ph_1: 1})) self.assertAllEqual([[0, 0, 0], [0, 2, 0], [0, 0, 0]], result_1.eval(feed_dict={ph_1: 2})) def testShapeErrors(self): # Static shape error. ph_0 = tf.placeholder(tf.int32, shape=[1]) with self.assertRaises(ValueError): tf.convert_to_tensor([[0, 0, 0], [0, ph_0, 0], [0, 0, 0]]) # Dynamic shape error. ph_1 = tf.placeholder(tf.int32) result_1 = tf.convert_to_tensor([[0, 0, 0], [0, ph_1, 0], [0, 0, 0]]) with self.test_session(): with self.assertRaises(tf.errors.InvalidArgumentError): result_1.eval(feed_dict={ph_1: [1]}) if __name__ == "__main__": tf.test.main()
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for Pack Op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf def np_split_sqeeze(array, axis): axis_len = array.shape[axis] return [ np.squeeze(arr, axis=(axis,)) for arr in np.split(array, axis_len, axis=axis) ] class PackOpTest(tf.test.TestCase): def testSimple(self): np.random.seed(7) for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape) # Convert [data[0], data[1], ...] separately to tensorflow # TODO(irving): Remove list() once we handle maps correctly xs = list(map(tf.constant, data)) # Pack back into a single tensorflow tensor c = tf.pack(xs) self.assertAllEqual(c.eval(), data) def testConst(self): np.random.seed(7) for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape).astype(np.float32) # Pack back into a single tensorflow tensor directly using np array c = tf.pack(data) # This is implemented via a Const: self.assertEqual(c.op.type, "Const") self.assertAllEqual(c.eval(), data) # Python lists also work for 1-D case: if len(shape) == 1: data_list = list(data) cl = tf.pack(data_list) self.assertEqual(cl.op.type, "Const") self.assertAllEqual(cl.eval(), data) # Verify that shape induction works with shapes produced via const pack a = tf.constant([1, 2, 3, 4, 5, 6]) b = tf.reshape(a, tf.pack([2, 3])) self.assertAllEqual(b.get_shape(), [2, 3]) def testGradientsAxis0(self): np.random.seed(7) for use_gpu in False, True: for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape) shapes = [shape[1:]] * shape[0] with self.test_session(use_gpu=use_gpu): # TODO(irving): Remove list() once we handle maps correctly xs = list(map(tf.constant, data)) c = tf.pack(xs) err = tf.test.compute_gradient_error(xs, shapes, c, shape) self.assertLess(err, 1e-6) def testGradientsAxis1(self): np.random.seed(7) for use_gpu in False, True: for shape in (2, 3), (3, 2), (4, 3, 2): data = np.random.randn(*shape) shapes = [shape[1:]] * shape[0] out_shape = list(shape[1:]) out_shape.insert(1, shape[0]) with self.test_session(use_gpu=use_gpu): # TODO(irving): Remove list() once we handle maps correctly xs = list(map(tf.constant, data)) c = tf.pack(xs, axis=1) err = tf.test.compute_gradient_error(xs, shapes, c, out_shape) self.assertLess(err, 1e-6) def testZeroSize(self): # Verify that pack doesn't crash for zero size inputs for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): for shape in (0,), (3,0), (0, 3): x = np.zeros((2,) + shape) p = tf.pack(list(x)).eval() self.assertAllEqual(p, x) def testAxis0Default(self): with self.test_session(): t = [tf.constant([1, 2, 3]), tf.constant([4, 5, 6])] packed = tf.pack(t).eval() self.assertAllEqual(packed, np.array([[1, 2, 3], [4, 5, 6]])) def testAgainstNumpy(self): # For 1 to 5 dimensions. for i in range(1, 6): expected = np.random.random(np.random.permutation(i) + 1) # For all the possible axis to split it, including negative indices. for j in range(-i, i): test_arrays = np_split_sqeeze(expected, j) with self.test_session(): actual = tf.pack(test_arrays, axis=j) self.assertEqual(expected.shape, actual.get_shape()) actual = actual.eval() self.assertNDArrayNear(expected, actual, 1e-6) def testDimOutOfRange(self): t = [tf.constant([1, 2, 3]), tf.constant([4, 5, 6])] with self.assertRaisesRegexp(ValueError, r"axis = 2 not in \[-2, 2\)"): tf.unpack(t, axis=2) def testDimOutOfNegativeRange(self): t = [tf.constant([1, 2, 3]), tf.constant([4, 5, 6])] with self.assertRaisesRegexp(ValueError, r"axis = -3 not in \[-2, 2\)"): tf.unpack(t, axis=-3) class AutomaticPackingTest(tf.test.TestCase): def testSimple(self): with self.test_session(): self.assertAllEqual([1, 0, 2], tf.convert_to_tensor([1, tf.constant(0), 2]).eval()) self.assertAllEqual( [[0, 0, 0], [0, 1, 0], [0, 0, 0]], tf.convert_to_tensor([[0, 0, 0], [0, tf.constant(1), 0], [0, 0, 0]]).eval()) self.assertAllEqual( [[0, 0, 0], [0, 1, 0], [0, 0, 0]], tf.convert_to_tensor([[0, 0, 0], tf.constant([0, 1, 0]), [0, 0, 0]]).eval()) self.assertAllEqual( [[0, 0, 0], [0, 1, 0], [0, 0, 0]], tf.convert_to_tensor([tf.constant([0, 0, 0]), tf.constant([0, 1, 0]), tf.constant([0, 0, 0])]).eval()) def testWithNDArray(self): with self.test_session(): result = tf.convert_to_tensor([[[0., 0.], tf.constant([1., 1.])], np.array([[2., 2.], [3., 3.]], dtype=np.float32)]) self.assertAllEqual( [[[0., 0.], [1., 1.]], [[2., 2.], [3., 3.]]], result.eval()) def testVariable(self): with self.test_session(): v = tf.Variable(17) result = tf.convert_to_tensor([[0, 0, 0], [0, v, 0], [0, 0, 0]]) v.initializer.run() self.assertAllEqual([[0, 0, 0], [0, 17, 0], [0, 0, 0]], result.eval()) v.assign(38).op.run() self.assertAllEqual([[0, 0, 0], [0, 38, 0], [0, 0, 0]], result.eval()) def testDtype(self): t_0 = tf.convert_to_tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]]) self.assertEqual(tf.float32, t_0.dtype) t_1 = tf.convert_to_tensor([[0., 0., 0.], tf.constant([0., 0., 0.], dtype=tf.float64), [0., 0., 0.]]) self.assertEqual(tf.float64, t_1.dtype) t_2 = tf.convert_to_tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], dtype=tf.float64) self.assertEqual(tf.float64, t_2.dtype) with self.assertRaises(TypeError): tf.convert_to_tensor([tf.constant([0., 0., 0.], dtype=tf.float32), tf.constant([0., 0., 0.], dtype=tf.float64), [0., 0., 0.]]) with self.assertRaises(TypeError): tf.convert_to_tensor([[0., 0., 0.], tf.constant([0., 0., 0.], dtype=tf.float64), [0., 0., 0.]], dtype=tf.float32) with self.assertRaises(TypeError): tf.convert_to_tensor([tf.constant([0., 0., 0.], dtype=tf.float64)], dtype=tf.float32) def testPlaceholder(self): with self.test_session(): # Test using placeholder with a defined shape. ph_0 = tf.placeholder(tf.int32, shape=[]) result_0 = tf.convert_to_tensor([[0, 0, 0], [0, ph_0, 0], [0, 0, 0]]) self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]], result_0.eval(feed_dict={ph_0: 1})) self.assertAllEqual([[0, 0, 0], [0, 2, 0], [0, 0, 0]], result_0.eval(feed_dict={ph_0: 2})) # Test using placeholder with an undefined shape. ph_1 = tf.placeholder(tf.int32) result_1 = tf.convert_to_tensor([[0, 0, 0], [0, ph_1, 0], [0, 0, 0]]) self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]], result_1.eval(feed_dict={ph_1: 1})) self.assertAllEqual([[0, 0, 0], [0, 2, 0], [0, 0, 0]], result_1.eval(feed_dict={ph_1: 2})) def testShapeErrors(self): # Static shape error. ph_0 = tf.placeholder(tf.int32, shape=[1]) with self.assertRaises(ValueError): tf.convert_to_tensor([[0, 0, 0], [0, ph_0, 0], [0, 0, 0]]) # Dynamic shape error. ph_1 = tf.placeholder(tf.int32) result_1 = tf.convert_to_tensor([[0, 0, 0], [0, ph_1, 0], [0, 0, 0]]) with self.test_session(): with self.assertRaises(tf.errors.InvalidArgumentError): result_1.eval(feed_dict={ph_1: [1]}) if __name__ == "__main__": tf.test.main()
en
0.796732
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== Functional tests for Pack Op. # Convert [data[0], data[1], ...] separately to tensorflow # TODO(irving): Remove list() once we handle maps correctly # Pack back into a single tensorflow tensor # Pack back into a single tensorflow tensor directly using np array # This is implemented via a Const: # Python lists also work for 1-D case: # Verify that shape induction works with shapes produced via const pack # TODO(irving): Remove list() once we handle maps correctly # TODO(irving): Remove list() once we handle maps correctly # Verify that pack doesn't crash for zero size inputs # For 1 to 5 dimensions. # For all the possible axis to split it, including negative indices. # Test using placeholder with a defined shape. # Test using placeholder with an undefined shape. # Static shape error. # Dynamic shape error.
2.193501
2
setup.py
mehsoy/jaws
1
6624608
#!/usr/bin/python #-*- coding: utf-8 -*- from setuptools import setup setup(name='jaws', version='0.1.0', author=["<NAME>","<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>"], author_email=["<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>"], url="https://git.scc.kit.edu/az2556/jaws", package_dir = {'': 'src'}, packages=['application', 'controller', 'database', 'exceptions', 'master', 'views', 'worker','tests'], entry_points={'console_scripts': ['jaws = views.__main__:main', 'dmdcontroller = controller.__main__:main', 'dmdsearch = search.__main__:main', 'dmdworker = worker.__main__:main']},)
#!/usr/bin/python #-*- coding: utf-8 -*- from setuptools import setup setup(name='jaws', version='0.1.0', author=["<NAME>","<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>"], author_email=["<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>"], url="https://git.scc.kit.edu/az2556/jaws", package_dir = {'': 'src'}, packages=['application', 'controller', 'database', 'exceptions', 'master', 'views', 'worker','tests'], entry_points={'console_scripts': ['jaws = views.__main__:main', 'dmdcontroller = controller.__main__:main', 'dmdsearch = search.__main__:main', 'dmdworker = worker.__main__:main']},)
en
0.348434
#!/usr/bin/python #-*- coding: utf-8 -*-
1.193193
1
common/src/stack/command/stack/commands/report/host/bootfile/plugin_pxe.py
shivanshs9/stacki
0
6624609
# @copyright@ # Copyright (c) 2006 - 2018 Teradata # All rights reserved. Stacki(r) v5.x stacki.com # https://github.com/Teradata/stacki/blob/master/LICENSE.txt # @copyright@ import os import stack.commands class Plugin(stack.commands.Plugin): """ Generate a PXE specific configuration file """ def provides(self): return 'pxe' def run(self, ha): for host in ha: if 'interfaces' not in ha[host]: continue for interface in ha[host]['interfaces']: filename = os.path.join(os.path.sep, 'tftpboot', 'pxelinux', 'pxelinux.cfg', # IP as Hex ''.join(map(lambda x: '%02X' % int(x), interface['ip'].split('.')))) self.owner.addOutput(host, """ <stack:file stack:name="%s" stack:owner="root:apache" stack:perms="0664" stack:rcs="off"><![CDATA[""" % filename) self.owner.runImplementation("%s_pxe" % ha[host]['os'], (ha[host], interface)) self.owner.addOutput(host, ']]>\n</stack:file>')
# @copyright@ # Copyright (c) 2006 - 2018 Teradata # All rights reserved. Stacki(r) v5.x stacki.com # https://github.com/Teradata/stacki/blob/master/LICENSE.txt # @copyright@ import os import stack.commands class Plugin(stack.commands.Plugin): """ Generate a PXE specific configuration file """ def provides(self): return 'pxe' def run(self, ha): for host in ha: if 'interfaces' not in ha[host]: continue for interface in ha[host]['interfaces']: filename = os.path.join(os.path.sep, 'tftpboot', 'pxelinux', 'pxelinux.cfg', # IP as Hex ''.join(map(lambda x: '%02X' % int(x), interface['ip'].split('.')))) self.owner.addOutput(host, """ <stack:file stack:name="%s" stack:owner="root:apache" stack:perms="0664" stack:rcs="off"><![CDATA[""" % filename) self.owner.runImplementation("%s_pxe" % ha[host]['os'], (ha[host], interface)) self.owner.addOutput(host, ']]>\n</stack:file>')
en
0.569862
# @copyright@ # Copyright (c) 2006 - 2018 Teradata # All rights reserved. Stacki(r) v5.x stacki.com # https://github.com/Teradata/stacki/blob/master/LICENSE.txt # @copyright@ Generate a PXE specific configuration file # IP as Hex <stack:file stack:name="%s" stack:owner="root:apache" stack:perms="0664" stack:rcs="off"><![CDATA[
1.809764
2
treat/moc/cmm/__init__.py
tjlaboss/tasty_treat
3
6624610
<filename>treat/moc/cmm/__init__.py<gh_stars>1-10 from .cumulative import CumulativeMigrationCorrection from .corrections import CORRECTIONS
<filename>treat/moc/cmm/__init__.py<gh_stars>1-10 from .cumulative import CumulativeMigrationCorrection from .corrections import CORRECTIONS
none
1
0.96515
1
suricata-4.1.4/suricata-update/suricata/update/maps.py
runtest007/dpdk_surcata_4.1.1
77
6624611
# Copyright (C) 2017 Open Information Security Foundation # Copyright (c) 2013 <NAME> # # You can copy, redistribute or modify this Program under the terms of # the GNU General Public License version 2 as published by the Free # Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # version 2 along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Provide mappings from ID's to descriptions. Includes mapping classes for event ID messages and classification information. """ from __future__ import print_function import re class SignatureMap(object): """SignatureMap maps signature IDs to a signature info dict. The signature map can be build up from classification.config, gen-msg.map, and new and old-style sid-msg.map files. The dict's in the map will have at a minimum the following fields: * gid *(int)* * sid *(int)* * msg *(string)* * refs *(list of strings)* Signatures loaded from a new style sid-msg.map file will also have *rev*, *classification* and *priority* fields. Example:: >>> from idstools import maps >>> sigmap = maps.SignatureMap() >>> sigmap.load_generator_map(open("tests/gen-msg.map")) >>> sigmap.load_signature_map(open("tests/sid-msg-v2.map")) >>> print(sigmap.get(1, 2495)) {'classification': 'misc-attack', 'rev': 8, 'priority': 0, 'gid': 1, 'sid': 2495, 'msg': 'GPL NETBIOS SMB DCEPRC ORPCThis request flood attempt', 'ref': ['bugtraq,8811', 'cve,2003-0813', 'nessus,12206', 'url,www.microsoft.com/technet/security/bulletin/MS04-011.mspx']} """ def __init__(self): self.map = {} def size(self): return len(self.map) def get(self, generator_id, signature_id): """Get signature info by generator_id and signature_id. :param generator_id: The generator id of the signature to lookup. :param signature_id: The signature id of the signature to lookup. For convenience, if the generator_id is 3 and the signature is not found, a second lookup will be done using a generator_id of 1. """ key = (generator_id, signature_id) sig = self.map.get(key) if sig is None and generator_id == 3: return self.get(1, signature_id) return sig def load_generator_map(self, fileobj): """Load the generator message map (gen-msg.map) from a file-like object. """ for line in fileobj: line = line.strip() if not line or line.startswith("#"): continue gid, sid, msg = [part.strip() for part in line.split("||")] entry = { "gid": int(gid), "sid": int(sid), "msg": msg, "refs": [], } self.map[(entry["gid"], entry["sid"])] = entry def load_signature_map(self, fileobj, defaultgid=1): """Load signature message map (sid-msg.map) from a file-like object. """ for line in fileobj: line = line.strip() if not line or line.startswith("#"): continue parts = [p.strip() for p in line.split("||")] # If we have at least 6 parts, attempt to parse as a v2 # signature map file. try: entry = { "gid": int(parts[0]), "sid": int(parts[1]), "rev": int(parts[2]), "classification": parts[3], "priority": int(parts[4]), "msg": parts[5], "ref": parts[6:], } except: entry = { "gid": defaultgid, "sid": int(parts[0]), "msg": parts[1], "ref": parts[2:], } self.map[(entry["gid"], entry["sid"])] = entry class ClassificationMap(object): """ClassificationMap maps classification IDs and names to a dict object describing a classification. :param fileobj: (Optional) A file like object to load classifications from on initialization. The classification dicts stored in the map have the following fields: * name *(string)* * description *(string)* * priority *(int)* Example:: >>> from idstools import maps >>> classmap = maps.ClassificationMap() >>> classmap.load_from_file(open("tests/classification.config")) >>> classmap.get(3) {'priority': 2, 'name': 'bad-unknown', 'description': 'Potentially Bad Traffic'} >>> classmap.get_by_name("bad-unknown") {'priority': 2, 'name': 'bad-unknown', 'description': 'Potentially Bad Traffic'} """ def __init__(self, fileobj=None): self.id_map = [] self.name_map = {} if fileobj: self.load_from_file(fileobj) def size(self): return len(self.id_map) def add(self, classification): """Add a classification to the map.""" self.id_map.append(classification) self.name_map[classification["name"]] = classification def get(self, class_id): """Get a classification by ID. :param class_id: The classification ID to get. :returns: A dict describing the classification or None. """ if 0 < class_id <= len(self.id_map): return self.id_map[class_id - 1] else: return None def get_by_name(self, name): """Get a classification by name. :param name: The name of the classification :returns: A dict describing the classification or None. """ if name in self.name_map: return self.name_map[name] else: return None def load_from_file(self, fileobj): """Load classifications from a Snort style classification.config file object. """ pattern = "config classification: ([^,]+),([^,]+),([^,]+)" for line in fileobj: m = re.match(pattern, line.strip()) if m: self.add({ "name": m.group(1), "description": m.group(2), "priority": int(m.group(3))})
# Copyright (C) 2017 Open Information Security Foundation # Copyright (c) 2013 <NAME> # # You can copy, redistribute or modify this Program under the terms of # the GNU General Public License version 2 as published by the Free # Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # version 2 along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Provide mappings from ID's to descriptions. Includes mapping classes for event ID messages and classification information. """ from __future__ import print_function import re class SignatureMap(object): """SignatureMap maps signature IDs to a signature info dict. The signature map can be build up from classification.config, gen-msg.map, and new and old-style sid-msg.map files. The dict's in the map will have at a minimum the following fields: * gid *(int)* * sid *(int)* * msg *(string)* * refs *(list of strings)* Signatures loaded from a new style sid-msg.map file will also have *rev*, *classification* and *priority* fields. Example:: >>> from idstools import maps >>> sigmap = maps.SignatureMap() >>> sigmap.load_generator_map(open("tests/gen-msg.map")) >>> sigmap.load_signature_map(open("tests/sid-msg-v2.map")) >>> print(sigmap.get(1, 2495)) {'classification': 'misc-attack', 'rev': 8, 'priority': 0, 'gid': 1, 'sid': 2495, 'msg': 'GPL NETBIOS SMB DCEPRC ORPCThis request flood attempt', 'ref': ['bugtraq,8811', 'cve,2003-0813', 'nessus,12206', 'url,www.microsoft.com/technet/security/bulletin/MS04-011.mspx']} """ def __init__(self): self.map = {} def size(self): return len(self.map) def get(self, generator_id, signature_id): """Get signature info by generator_id and signature_id. :param generator_id: The generator id of the signature to lookup. :param signature_id: The signature id of the signature to lookup. For convenience, if the generator_id is 3 and the signature is not found, a second lookup will be done using a generator_id of 1. """ key = (generator_id, signature_id) sig = self.map.get(key) if sig is None and generator_id == 3: return self.get(1, signature_id) return sig def load_generator_map(self, fileobj): """Load the generator message map (gen-msg.map) from a file-like object. """ for line in fileobj: line = line.strip() if not line or line.startswith("#"): continue gid, sid, msg = [part.strip() for part in line.split("||")] entry = { "gid": int(gid), "sid": int(sid), "msg": msg, "refs": [], } self.map[(entry["gid"], entry["sid"])] = entry def load_signature_map(self, fileobj, defaultgid=1): """Load signature message map (sid-msg.map) from a file-like object. """ for line in fileobj: line = line.strip() if not line or line.startswith("#"): continue parts = [p.strip() for p in line.split("||")] # If we have at least 6 parts, attempt to parse as a v2 # signature map file. try: entry = { "gid": int(parts[0]), "sid": int(parts[1]), "rev": int(parts[2]), "classification": parts[3], "priority": int(parts[4]), "msg": parts[5], "ref": parts[6:], } except: entry = { "gid": defaultgid, "sid": int(parts[0]), "msg": parts[1], "ref": parts[2:], } self.map[(entry["gid"], entry["sid"])] = entry class ClassificationMap(object): """ClassificationMap maps classification IDs and names to a dict object describing a classification. :param fileobj: (Optional) A file like object to load classifications from on initialization. The classification dicts stored in the map have the following fields: * name *(string)* * description *(string)* * priority *(int)* Example:: >>> from idstools import maps >>> classmap = maps.ClassificationMap() >>> classmap.load_from_file(open("tests/classification.config")) >>> classmap.get(3) {'priority': 2, 'name': 'bad-unknown', 'description': 'Potentially Bad Traffic'} >>> classmap.get_by_name("bad-unknown") {'priority': 2, 'name': 'bad-unknown', 'description': 'Potentially Bad Traffic'} """ def __init__(self, fileobj=None): self.id_map = [] self.name_map = {} if fileobj: self.load_from_file(fileobj) def size(self): return len(self.id_map) def add(self, classification): """Add a classification to the map.""" self.id_map.append(classification) self.name_map[classification["name"]] = classification def get(self, class_id): """Get a classification by ID. :param class_id: The classification ID to get. :returns: A dict describing the classification or None. """ if 0 < class_id <= len(self.id_map): return self.id_map[class_id - 1] else: return None def get_by_name(self, name): """Get a classification by name. :param name: The name of the classification :returns: A dict describing the classification or None. """ if name in self.name_map: return self.name_map[name] else: return None def load_from_file(self, fileobj): """Load classifications from a Snort style classification.config file object. """ pattern = "config classification: ([^,]+),([^,]+),([^,]+)" for line in fileobj: m = re.match(pattern, line.strip()) if m: self.add({ "name": m.group(1), "description": m.group(2), "priority": int(m.group(3))})
en
0.655409
# Copyright (C) 2017 Open Information Security Foundation # Copyright (c) 2013 <NAME> # # You can copy, redistribute or modify this Program under the terms of # the GNU General Public License version 2 as published by the Free # Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # version 2 along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Provide mappings from ID's to descriptions. Includes mapping classes for event ID messages and classification information. SignatureMap maps signature IDs to a signature info dict. The signature map can be build up from classification.config, gen-msg.map, and new and old-style sid-msg.map files. The dict's in the map will have at a minimum the following fields: * gid *(int)* * sid *(int)* * msg *(string)* * refs *(list of strings)* Signatures loaded from a new style sid-msg.map file will also have *rev*, *classification* and *priority* fields. Example:: >>> from idstools import maps >>> sigmap = maps.SignatureMap() >>> sigmap.load_generator_map(open("tests/gen-msg.map")) >>> sigmap.load_signature_map(open("tests/sid-msg-v2.map")) >>> print(sigmap.get(1, 2495)) {'classification': 'misc-attack', 'rev': 8, 'priority': 0, 'gid': 1, 'sid': 2495, 'msg': 'GPL NETBIOS SMB DCEPRC ORPCThis request flood attempt', 'ref': ['bugtraq,8811', 'cve,2003-0813', 'nessus,12206', 'url,www.microsoft.com/technet/security/bulletin/MS04-011.mspx']} Get signature info by generator_id and signature_id. :param generator_id: The generator id of the signature to lookup. :param signature_id: The signature id of the signature to lookup. For convenience, if the generator_id is 3 and the signature is not found, a second lookup will be done using a generator_id of 1. Load the generator message map (gen-msg.map) from a file-like object. Load signature message map (sid-msg.map) from a file-like object. # If we have at least 6 parts, attempt to parse as a v2 # signature map file. ClassificationMap maps classification IDs and names to a dict object describing a classification. :param fileobj: (Optional) A file like object to load classifications from on initialization. The classification dicts stored in the map have the following fields: * name *(string)* * description *(string)* * priority *(int)* Example:: >>> from idstools import maps >>> classmap = maps.ClassificationMap() >>> classmap.load_from_file(open("tests/classification.config")) >>> classmap.get(3) {'priority': 2, 'name': 'bad-unknown', 'description': 'Potentially Bad Traffic'} >>> classmap.get_by_name("bad-unknown") {'priority': 2, 'name': 'bad-unknown', 'description': 'Potentially Bad Traffic'} Add a classification to the map. Get a classification by ID. :param class_id: The classification ID to get. :returns: A dict describing the classification or None. Get a classification by name. :param name: The name of the classification :returns: A dict describing the classification or None. Load classifications from a Snort style classification.config file object.
2.111374
2
sunpy/tests/setup_command.py
ajeytiwary/sunpy
1
6624612
<reponame>ajeytiwary/sunpy # -*- coding: utf-8 -*- """ Created on Sat Jun 7 19:36:08 2014 @author: <NAME> This file is designed to be imported and ran only via setup.py, hence it's dependancy on astropy_helpers which will be availible in that context. """ from astropy_helpers.test_helpers import AstropyTest from astropy_helpers.compat import _fix_user_options class SunPyTest(AstropyTest): description = 'Run the tests for this package' user_options = [ # Package to test ('package=', 'P', "The name of a specific package to test, e.g. 'io' or 'utils'. " "If nothing is specified, all default tests are run."), # Print all the things ('verbose-results', 'V', 'Turn on verbose output from pytest.'), # plugins to enable ('plugins=', 'p', 'Plugins to enable when running pytest.'), # Run only offline tests? ('offline-only', None, 'Only run test that do not require a internet connection.'), # Run only offline tests? ('online-only', None, 'Only run test that do require a internet connection.'), # Calculate test coverage ('coverage', 'c', 'Create a coverage report. Requires the coverage package.'), ('cov-report=', None, 'Specify the type of coverage report to generate. (Default terminal)'), # Run tests in parallel ('parallel=', 'j', 'Run the tests in parallel on the specified number of ' 'CPUs. If negative, all the cores on the machine will be ' 'used. Requires the pytest-xdist plugin.'), # Pass additional cli args to pytest ('args=', 'a', 'Additional arguments to be passed to pytest.') ] user_options = _fix_user_options(user_options) package_name = '' def initialize_options(self): self.package = '' #self.test_path = None self.verbose_results = False self.plugins = None self.args = None self.online_only = False self.offline_only = False self.coverage = False self.cov_report = 'term' if self.coverage else None self.docs_path = None self.parallel = 0 def _validate_required_deps(self): """ This method checks that any required modules are installed before running the tests. """ try: import sunpy except ImportError: raise ImportError( "The 'test' command requires the sunpy package to be " "installed and importable.") def generate_testing_command(self): """ Build a Python script to run the tests. """ cmd_pre = '' # Commands to run before the test function cmd_post = '' # Commands to run after the test function online = not self.offline_only offline = not self.online_only cmd = ('{cmd_pre}{0}; import {1.package_name}, sys; result = (' '{1.package_name}.self_test(' 'modulename={1.package!r}, ' 'args={1.args!r}, ' 'verbose={1.verbose_results!r}, ' 'parallel={1.parallel!r}, ' 'online={online!r}, ' 'offline={offline!r}, ' 'coverage={1.coverage!r}, ' 'cov_report={1.cov_report!r})); ' '{cmd_post}' 'sys.exit(result)') x = cmd.format('pass', self, online=online, offline=offline, cmd_pre=cmd_pre, cmd_post=cmd_post) return x
# -*- coding: utf-8 -*- """ Created on Sat Jun 7 19:36:08 2014 @author: <NAME> This file is designed to be imported and ran only via setup.py, hence it's dependancy on astropy_helpers which will be availible in that context. """ from astropy_helpers.test_helpers import AstropyTest from astropy_helpers.compat import _fix_user_options class SunPyTest(AstropyTest): description = 'Run the tests for this package' user_options = [ # Package to test ('package=', 'P', "The name of a specific package to test, e.g. 'io' or 'utils'. " "If nothing is specified, all default tests are run."), # Print all the things ('verbose-results', 'V', 'Turn on verbose output from pytest.'), # plugins to enable ('plugins=', 'p', 'Plugins to enable when running pytest.'), # Run only offline tests? ('offline-only', None, 'Only run test that do not require a internet connection.'), # Run only offline tests? ('online-only', None, 'Only run test that do require a internet connection.'), # Calculate test coverage ('coverage', 'c', 'Create a coverage report. Requires the coverage package.'), ('cov-report=', None, 'Specify the type of coverage report to generate. (Default terminal)'), # Run tests in parallel ('parallel=', 'j', 'Run the tests in parallel on the specified number of ' 'CPUs. If negative, all the cores on the machine will be ' 'used. Requires the pytest-xdist plugin.'), # Pass additional cli args to pytest ('args=', 'a', 'Additional arguments to be passed to pytest.') ] user_options = _fix_user_options(user_options) package_name = '' def initialize_options(self): self.package = '' #self.test_path = None self.verbose_results = False self.plugins = None self.args = None self.online_only = False self.offline_only = False self.coverage = False self.cov_report = 'term' if self.coverage else None self.docs_path = None self.parallel = 0 def _validate_required_deps(self): """ This method checks that any required modules are installed before running the tests. """ try: import sunpy except ImportError: raise ImportError( "The 'test' command requires the sunpy package to be " "installed and importable.") def generate_testing_command(self): """ Build a Python script to run the tests. """ cmd_pre = '' # Commands to run before the test function cmd_post = '' # Commands to run after the test function online = not self.offline_only offline = not self.online_only cmd = ('{cmd_pre}{0}; import {1.package_name}, sys; result = (' '{1.package_name}.self_test(' 'modulename={1.package!r}, ' 'args={1.args!r}, ' 'verbose={1.verbose_results!r}, ' 'parallel={1.parallel!r}, ' 'online={online!r}, ' 'offline={offline!r}, ' 'coverage={1.coverage!r}, ' 'cov_report={1.cov_report!r})); ' '{cmd_post}' 'sys.exit(result)') x = cmd.format('pass', self, online=online, offline=offline, cmd_pre=cmd_pre, cmd_post=cmd_post) return x
en
0.860465
# -*- coding: utf-8 -*- Created on Sat Jun 7 19:36:08 2014 @author: <NAME> This file is designed to be imported and ran only via setup.py, hence it's dependancy on astropy_helpers which will be availible in that context. # Package to test # Print all the things # plugins to enable # Run only offline tests? # Run only offline tests? # Calculate test coverage # Run tests in parallel # Pass additional cli args to pytest #self.test_path = None This method checks that any required modules are installed before running the tests. Build a Python script to run the tests. # Commands to run before the test function # Commands to run after the test function
1.885016
2
src/cfnlint/rules/resources/DependsOn.py
amabowilli/cfn-python-lint
1
6624613
""" Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import six from cfnlint import CloudFormationLintRule from cfnlint import RuleMatch class DependsOn(CloudFormationLintRule): """Check Base Resource Configuration""" id = 'E3005' shortdesc = 'Check DependsOn values for Resources' description = 'Check that the DependsOn values are valid' source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-dependson.html' tags = ['resources', 'dependson'] def check_value(self, key, path, resources): """Check resource names for DependsOn""" matches = [] if not isinstance(key, (six.text_type, six.string_types)): message = 'DependsOn values should be of string at {0}' matches.append(RuleMatch(path, message.format('/'.join(map(str, path))))) return matches if key not in resources: message = 'DependsOn should reference other resources at {0}' matches.append(RuleMatch(path, message.format('/'.join(map(str, path))))) return matches def match(self, cfn): """Check CloudFormation Resources""" matches = [] resources = cfn.get_resources() for resource_name, resource_values in resources.items(): depends_ons = resource_values.get('DependsOn') if depends_ons: path = ['Resources', resource_name, 'DependsOn'] self.logger.debug('Validating DependsOn for %s base configuration', resource_name) if isinstance(depends_ons, list): for index, depends_on in enumerate(depends_ons): matches.extend(self.check_value(depends_on, path[:] + [index], resources)) else: matches.extend(self.check_value(depends_ons, path, resources)) return matches
""" Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import six from cfnlint import CloudFormationLintRule from cfnlint import RuleMatch class DependsOn(CloudFormationLintRule): """Check Base Resource Configuration""" id = 'E3005' shortdesc = 'Check DependsOn values for Resources' description = 'Check that the DependsOn values are valid' source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-dependson.html' tags = ['resources', 'dependson'] def check_value(self, key, path, resources): """Check resource names for DependsOn""" matches = [] if not isinstance(key, (six.text_type, six.string_types)): message = 'DependsOn values should be of string at {0}' matches.append(RuleMatch(path, message.format('/'.join(map(str, path))))) return matches if key not in resources: message = 'DependsOn should reference other resources at {0}' matches.append(RuleMatch(path, message.format('/'.join(map(str, path))))) return matches def match(self, cfn): """Check CloudFormation Resources""" matches = [] resources = cfn.get_resources() for resource_name, resource_values in resources.items(): depends_ons = resource_values.get('DependsOn') if depends_ons: path = ['Resources', resource_name, 'DependsOn'] self.logger.debug('Validating DependsOn for %s base configuration', resource_name) if isinstance(depends_ons, list): for index, depends_on in enumerate(depends_ons): matches.extend(self.check_value(depends_on, path[:] + [index], resources)) else: matches.extend(self.check_value(depends_ons, path, resources)) return matches
en
0.74052
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Check Base Resource Configuration Check resource names for DependsOn Check CloudFormation Resources
2.042618
2
mlflow/entities/file_info.py
freefrag/mlflow
1,825
6624614
<filename>mlflow/entities/file_info.py from mlflow.entities._mlflow_object import _MLflowObject from mlflow.protos.service_pb2 import FileInfo as ProtoFileInfo class FileInfo(_MLflowObject): """ Metadata about a file or directory. """ def __init__(self, path, is_dir, file_size): self._path = path self._is_dir = is_dir self._bytes = file_size @property def path(self): """String path of the file or directory.""" return self._path @property def is_dir(self): """Whether the FileInfo corresponds to a directory.""" return self._is_dir @property def file_size(self): """Size of the file or directory. If the FileInfo is a directory, returns None.""" return self._bytes def to_proto(self): proto = ProtoFileInfo() proto.path = self.path proto.is_dir = self.is_dir if self.file_size: proto.file_size = self.file_size return proto @classmethod def from_proto(cls, proto): return cls(proto.path, proto.is_dir, proto.file_size)
<filename>mlflow/entities/file_info.py from mlflow.entities._mlflow_object import _MLflowObject from mlflow.protos.service_pb2 import FileInfo as ProtoFileInfo class FileInfo(_MLflowObject): """ Metadata about a file or directory. """ def __init__(self, path, is_dir, file_size): self._path = path self._is_dir = is_dir self._bytes = file_size @property def path(self): """String path of the file or directory.""" return self._path @property def is_dir(self): """Whether the FileInfo corresponds to a directory.""" return self._is_dir @property def file_size(self): """Size of the file or directory. If the FileInfo is a directory, returns None.""" return self._bytes def to_proto(self): proto = ProtoFileInfo() proto.path = self.path proto.is_dir = self.is_dir if self.file_size: proto.file_size = self.file_size return proto @classmethod def from_proto(cls, proto): return cls(proto.path, proto.is_dir, proto.file_size)
en
0.836121
Metadata about a file or directory. String path of the file or directory. Whether the FileInfo corresponds to a directory. Size of the file or directory. If the FileInfo is a directory, returns None.
2.649911
3
CONFIG/StoryAPP/API/urls.py
Brktrlw/Instagram-Clone-Django-and-React
0
6624615
<filename>CONFIG/StoryAPP/API/urls.py from django.urls import path from .views import UserCurrentStoriesListAPIView,HomePageStoriesListAPIView,OwnStoriesListAPIView,StorySeeingCreateAPIView,UsersBySeeingStoryListAPIView app_name="stories" urlpatterns = [ path('username/<str:user__username>/',UserCurrentStoriesListAPIView.as_view()), path('homepage/', HomePageStoriesListAPIView.as_view()), path("own/",OwnStoriesListAPIView.as_view()), path("read/",StorySeeingCreateAPIView.as_view()), path("watchedusers/<unique_id>",UsersBySeeingStoryListAPIView.as_view()) ]
<filename>CONFIG/StoryAPP/API/urls.py from django.urls import path from .views import UserCurrentStoriesListAPIView,HomePageStoriesListAPIView,OwnStoriesListAPIView,StorySeeingCreateAPIView,UsersBySeeingStoryListAPIView app_name="stories" urlpatterns = [ path('username/<str:user__username>/',UserCurrentStoriesListAPIView.as_view()), path('homepage/', HomePageStoriesListAPIView.as_view()), path("own/",OwnStoriesListAPIView.as_view()), path("read/",StorySeeingCreateAPIView.as_view()), path("watchedusers/<unique_id>",UsersBySeeingStoryListAPIView.as_view()) ]
none
1
2.156149
2
mqtt-db/mqtt-postgres.py
rpoisel/AbbB23Energymeter
0
6624616
<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*- from mqttlib import MeasurementBroker import psycopg2 class PostgresWriter(object): def __init__(self, connect_arg): self.__conn = psycopg2.connect(connect_arg) def on_measurement(self, solar, total): cur = self.__conn.cursor() cur.execute( "INSERT INTO public.power (solar, total) VALUES (%s, %s)", (solar, total)) self.__conn.commit() cur.close() def __del__(self): self.__conn.close() def main(): postgres = PostgresWriter( "dbname=power user=power_rw password=<PASSWORD> host=localhost") measurementBroker = MeasurementBroker(postgres) measurementBroker.run() if __name__ == "__main__": try: main() except KeyboardInterrupt: pass
#!/usr/bin/env python # -*- coding: utf-8 -*- from mqttlib import MeasurementBroker import psycopg2 class PostgresWriter(object): def __init__(self, connect_arg): self.__conn = psycopg2.connect(connect_arg) def on_measurement(self, solar, total): cur = self.__conn.cursor() cur.execute( "INSERT INTO public.power (solar, total) VALUES (%s, %s)", (solar, total)) self.__conn.commit() cur.close() def __del__(self): self.__conn.close() def main(): postgres = PostgresWriter( "dbname=power user=power_rw password=<PASSWORD> host=localhost") measurementBroker = MeasurementBroker(postgres) measurementBroker.run() if __name__ == "__main__": try: main() except KeyboardInterrupt: pass
en
0.352855
#!/usr/bin/env python # -*- coding: utf-8 -*-
2.676782
3
blackswan/eodata.py
thaos/blackswan
0
6624617
<reponame>thaos/blackswan from tempfile import mkstemp from osgeo import gdal, osr import cartopy.crs as ccrs import matplotlib.pyplot as plt from flyingpigeon import visualisation as vs import logging LOGGER = logging.getLogger("PYWPS") def get_RGB(DIR, false_color=False): """ Extracts the files for RGB bands of Sentinel2 directory tree, scales and merge the values. Output is a merged tif including 3 bands. :param DIR: base directory of Sentinel2 directory tree :param false_color: if set to True the near infrared band (B08) will be taken as red band :returns geotif: merged geotiff """ import glob import subprocess # from subprocess import CalledProcessError jps = [] fname = DIR.split('/')[-1] ID = fname.replace('.SAVE','') for filename in glob.glob(DIR + '/GRANULE/*/IMG_DATA/*jp2'): jps.append(filename) jp_b = [jp for jp in jps if '_B02.jp2' in jp][0] jp_g = [jp for jp in jps if '_B03.jp2' in jp][0] if false_color: jp_r = [jp for jp in jps if '_B08.jp2' in jp][0] else: jp_r = [jp for jp in jps if '_B04.jp2' in jp][0] # scaling the color values and trasform from jp2 to tif try: # response.update_status('execution of CASTf90', 50) red = 'RED_{0}.tif'.format(ID) cmd = ['gdal_translate', '-scale', jp_r, red ] # LOGGER.debug("translate command: %s", cmd) output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() # output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) LOGGER.info('translate output:\n %s', output) green = 'GREEN_{0}.tif'.format(ID) cmd = ['gdal_translate', '-scale', jp_g, green ] LOGGER.debug("translate command: %s", cmd) output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() LOGGER.info('translate output:\n %s', output) blue = 'BLUE_{0}.tif'.format(ID) cmd = ['gdal_translate', '-scale', jp_b, blue ] LOGGER.debug("translate command: %s", cmd) output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() LOGGER.info('translate output:\n %s', output) # response.update_status('**** scaling suceeded', 20) except: msg = 'scaleing failed:\n{0}'.format(error) LOGGER.exception(msg) # merge tree files to one geotiff with tree seperated bands try: merged_RGB = 'RGB_{0}.tif'.format(ID) cmd = ['gdal_merge.py', '-seperate', '-co', 'PHOTOMETRIC=RGB', '-o', merged_RGB , red , green, blue ] output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except: msg = 'merging failed:\n{0}'.format(error) # LOGGER.exception(msg) return merged_RGB def get_timestamp(tile): """ returns the creation timestamp of a tile image as datetime. :param tile: path to geotiff confom to gdal metadata http://www.gdal.org/gdal_datamodel.html :return datetime: timestamp """ from datetime import datetime as dt try: ds = gdal.Open(tile, 0) ts = ds.GetMetadataItem("TIFFTAG_DATETIME") LOGGER.debug("timestamp: %s " % ts) ds = None # to close the dataset timestamp = dt.strptime(ts, '%Y:%m:%d %H:%M:%S') except: LOGGER.exception('failed to get timestamp for: %s' % tile) return timestamp def plot_products(products, extend=[10, 20, 5, 15]): """ plot the products extends of the search result :param products: output of sentinel api search :return graphic: map of extents """ import numpy as np from matplotlib.patches import Polygon import matplotlib.patches as mpatches from matplotlib.collections import PatchCollection from cartopy import config as cartopy_config import cartopy.feature as cfeature from cartopy.util import add_cyclic_point import re fig = plt.figure(dpi=90, facecolor='w', edgecolor='k') projection = ccrs.PlateCarree() ax = plt.axes(projection=projection) ax.set_extent(extend) ax.stock_img() ax.coastlines() ax.add_feature(cfeature.BORDERS) pat = re.compile(r'''(-*\d+\.\d+ -*\d+\.\d+);*''') for key in products.keys(): polygon = str(products[key]['footprint']) # s = 'POLYGON ((15.71888453311329 9.045763865974665,15.7018748825589 8.97110837227606,15.66795226563288 8.822558900399137,15.639498612331632 8.69721920092792,15.63428409805786 8.674303514900869,15.600477269179995 8.525798537094156,15.566734239298787 8.377334323160321,15.53315342410745 8.228822837291709,15.499521168391912 8.080353481086165,15.493321895031096 8.052970059354971,14.999818486685434 8.053569047879877,14.999818016115439 9.046743365203026,15.71888453311329 9.045763865974665))' matches = pat.findall(polygon) if matches: xy = np.array([map(float, m.split()) for m in matches]) ax.add_patch(mpatches.Polygon(xy, closed=True, transform=ccrs.PlateCarree(), alpha=0.4)) # color='coral' # ccrs.Geodetic() ax.gridlines(draw_labels=True,) img = vs.fig2plot(fig, output_dir='.') return img def plot_ndvi(geotif, file_extension='jpg', dpi=150, figsize=(10,10)): """ plots a NDVI image :param geotif: geotif file containning one band with NDVI values :param file_extension: format of the output graphic. default='png' :result str: path to graphic file """ # https://ocefpaf.github.io/python4oceanographers/blog/2015/03/02/geotiff/ gdal.UseExceptions() norm = vs.MidpointNormalize(midpoint=0) ds = gdal.Open(geotif) gt = ds.GetGeoTransform() proj = ds.GetProjection() inproj = osr.SpatialReference() inproj.ImportFromWkt(proj) projcs = inproj.GetAuthorityCode('PROJCS') projection = ccrs.epsg(projcs) # print("Projection: %s " % projection) subplot_kw = dict(projection=projection) fig, ax = plt.subplots( subplot_kw=subplot_kw) extent = (gt[0], gt[0] + ds.RasterXSize * gt[1], gt[3] + ds.RasterYSize * gt[5], gt[3]) bnd1 = ds.GetRasterBand(1) data = bnd1.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) # buf_xsize=ds.RasterXSize/10, buf_ysize=ds.RasterYSize/10, img_ndvi = ax.imshow(data, extent=extent,origin='upper', norm=norm, vmin=-1, vmax=1, cmap=plt.cm.BrBG, transform=projection) # img_ndvi = ax.imshow(data, extent=extent, # [:3, :, :].transpose((1, 2, 0)) # origin='upper',norm=norm, vmin=-1, vmax=1, cmap=plt.cm.summer) plt.title('NDVI') plt.colorbar(img_ndvi) ax.gridlines() #draw_labels=True, ndvi_plot = vs.fig2plot(fig, output_dir='.', file_extension=file_extension, dpi=dpi, figsize=figsize) return ndvi_plot # ndvi_plot def plot_RGB(geotif, rgb_bands=[1,2,3], file_extension='jpg', dpi=150, figsize=(10,10)): """ Calculates a RGB image (True color composite) based on red, greed, and blue bands. :param geotif: geotif file containning one band with NDVI values :param file_extension: format of the output graphic. default='png' :param rgb_bands: order of bands storing red, green and blue values default=[1,2,3] :result str: path to graphic file """ from numpy import dstack gdal.UseExceptions() ds = gdal.Open(geotif) data = ds.ReadAsArray() gt = ds.GetGeoTransform() proj = ds.GetProjection() inproj = osr.SpatialReference() inproj.ImportFromWkt(proj) projcs = inproj.GetAuthorityCode('PROJCS') projection = ccrs.epsg(projcs) # print(projection) subplot_kw = dict(projection=projection) fig, ax = plt.subplots( subplot_kw=subplot_kw) extent = (gt[0], gt[0] + ds.RasterXSize * gt[1], gt[3] + ds.RasterYSize * gt[5], gt[3]) red = ds.GetRasterBand(rgb_bands[0]) green = ds.GetRasterBand(rgb_bands[1]) blue = ds.GetRasterBand(rgb_bands[2]) # band 1 PSSCINE4Band blue img_r = red.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) img_g = green.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) img_b = blue.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) # rgb = dstack((data[0, :, :], data[1, :, :], data[2, :, :])) rgb = dstack([img_r, img_g, img_b]) img = ax.imshow(rgb, extent=extent, origin='upper', transform=projection) # img = ax.imshow(rgb.transpose((1, 2, 0)), extent=extent, # origin='upper') ax.gridlines(color='lightgrey', linestyle='-') # ax.set_xticks() tcc_plot = vs.fig2plot(fig, dpi=dpi, figsize=figsize, file_extension='jpg') plt.close() ds = None return tcc_plot def merge(tiles, prefix="mosaic_"): """ merging a given list of files with gdal_merge.py :param tiles: list of geotiffs to be merged_tiles :return geotiff: mosaic of merged files """ from flyingpigeon import gdal_merge as gm from os.path import join, basename import subprocess from subprocess import CalledProcessError from flyingpigeon.config import _PATH try: LOGGER.debug('start merging of %s files' % len(tiles)) # prefix = dt.strftime(date, "%Y%m%d") _, filename = mkstemp(dir='.', prefix=prefix, suffix='.tif') gdal_merge = '%s/gdal_merge.py' % _PATH cmd = ['python', gdal_merge, '-o', filename, '-of', 'GTiff', '-v'] for tile in tiles: LOGGER.debug('extent tile %s ', tile) cmd.append(tile) LOGGER.debug('cmd: %s' % cmd) output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) LOGGER.debug('gdal_merge log: \n %s', output) except CalledProcessError as e: LOGGER.exception('failed to merge tiles:\n{0}'.format(e.output)) # import sys # try: # LOGGER.debug('start merging') # # prefix = dt.strftime(date, "%Y%m%d") # _, filename = mkstemp(dir='.', prefix=prefix, suffix='.tif') # call = ['-o', "%s" % filename, '-of', 'GTiff', '-v'] # # # # tiles_day = [tile for tile in tiles if date.date() == get_timestamp(tile).date()] # # for tile in tiles: # call.extend([tile]) # sys.argv[1:] = call # gm.main() # # LOGGER.debug("files merged for %s tiles " % len(tiles)) # except: # LOGGER.exception("failed to merge tiles") return filename def ndvi(tiles, product='PlanetScope'): """ :param tiles: list of tiles including appropriate metadata files :param product: EO product e.g. "PlanetScope" (default) :retrun files, plots : list of calculated files and plots """ import rasterio import numpy from xml.dom import minidom import matplotlib.pyplot as plt ndvifiles = [] ndviplots = [] if product == 'PlanetScope': tiles_dic = ndvi_sorttiles(tiles, product=product) for key in tiles_dic.keys(): try: LOGGER.debug("NDVI for %s" % key) if len(tiles_dic[key]) == 2: tile = next(x for x in tiles_dic[key] if ".tif" in x) meta = next(x for x in tiles_dic[key] if ".xml" in x) else: LOGGER.debug('Key %s data are not complete' % key) continue # continue with next key # Load red and NIR bands - note all PlanetScope 4-band images have band order BGRN with rasterio.open(tile) as src: band_red = src.read(3) with rasterio.open(tile) as src: band_nir = src.read(4) LOGGER.debug("data read in memory") xmldoc = minidom.parse(meta) nodes = xmldoc.getElementsByTagName("ps:bandSpecificMetadata") # XML parser refers to bands by numbers 1-4 coeffs = {} for node in nodes: bn = node.getElementsByTagName("ps:bandNumber")[0].firstChild.data if bn in ['1', '2', '3', '4']: i = int(bn) value = node.getElementsByTagName("ps:reflectanceCoefficient")[0].firstChild.data coeffs[i] = float(value) # Multiply by corresponding coefficients band_red = band_red * coeffs[3] band_nir = band_nir * coeffs[4] LOGGER.debug("data athmospheric corrected") # Allow division by zero numpy.seterr(divide='ignore', invalid='ignore') # Calculate NDVI bn_ndvi = (band_nir.astype(float) - band_red.astype(float)) / (band_nir + band_red) # Set spatial characteristics of the output object to mirror the input kwargs = src.meta kwargs.update( dtype=rasterio.float32, count=1) # Create the file _, ndvifile = mkstemp(dir='.', prefix="ndvi_%s" % key, suffix='.tif') with rasterio.open(ndvifile, 'w', **kwargs) as dst: dst.write_band(1, bn_ndvi.astype(rasterio.float32)) LOGGER.debug("NDVI calculated for %s " % key) ndvifiles.extend([ndvifile]) LOGGER.debug("NDVI calculated: %s " % ndvifile) except: LOGGER.exception("Failed to Calculate NDVI for %s " % key) return ndvifiles def ndvi_sorttiles(tiles, product="PlanetScope"): """ sort un list fo files to calculate the NDVI. red nivr and metadata are sorted in an dictionary :param tiles: list of scene files and metadata :param product: EO data product e.g. "PlanetScope" (default) :return dictionary: sorted files ordered in a dictionary """ from os.path import splitext, basename if product == "PlanetScope": ids = [] for tile in tiles: bn, _ = splitext(basename(tile)) ids.extend([bn]) tiles_dic = {key: None for key in ids} for key in tiles_dic.keys(): tm = [t for t in tiles if key in t] tiles_dic[key] = tm # LOGGER.debug("files sorted in dictionary %s" % tiles_dic) return tiles_dic
from tempfile import mkstemp from osgeo import gdal, osr import cartopy.crs as ccrs import matplotlib.pyplot as plt from flyingpigeon import visualisation as vs import logging LOGGER = logging.getLogger("PYWPS") def get_RGB(DIR, false_color=False): """ Extracts the files for RGB bands of Sentinel2 directory tree, scales and merge the values. Output is a merged tif including 3 bands. :param DIR: base directory of Sentinel2 directory tree :param false_color: if set to True the near infrared band (B08) will be taken as red band :returns geotif: merged geotiff """ import glob import subprocess # from subprocess import CalledProcessError jps = [] fname = DIR.split('/')[-1] ID = fname.replace('.SAVE','') for filename in glob.glob(DIR + '/GRANULE/*/IMG_DATA/*jp2'): jps.append(filename) jp_b = [jp for jp in jps if '_B02.jp2' in jp][0] jp_g = [jp for jp in jps if '_B03.jp2' in jp][0] if false_color: jp_r = [jp for jp in jps if '_B08.jp2' in jp][0] else: jp_r = [jp for jp in jps if '_B04.jp2' in jp][0] # scaling the color values and trasform from jp2 to tif try: # response.update_status('execution of CASTf90', 50) red = 'RED_{0}.tif'.format(ID) cmd = ['gdal_translate', '-scale', jp_r, red ] # LOGGER.debug("translate command: %s", cmd) output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() # output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) LOGGER.info('translate output:\n %s', output) green = 'GREEN_{0}.tif'.format(ID) cmd = ['gdal_translate', '-scale', jp_g, green ] LOGGER.debug("translate command: %s", cmd) output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() LOGGER.info('translate output:\n %s', output) blue = 'BLUE_{0}.tif'.format(ID) cmd = ['gdal_translate', '-scale', jp_b, blue ] LOGGER.debug("translate command: %s", cmd) output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() LOGGER.info('translate output:\n %s', output) # response.update_status('**** scaling suceeded', 20) except: msg = 'scaleing failed:\n{0}'.format(error) LOGGER.exception(msg) # merge tree files to one geotiff with tree seperated bands try: merged_RGB = 'RGB_{0}.tif'.format(ID) cmd = ['gdal_merge.py', '-seperate', '-co', 'PHOTOMETRIC=RGB', '-o', merged_RGB , red , green, blue ] output, error = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except: msg = 'merging failed:\n{0}'.format(error) # LOGGER.exception(msg) return merged_RGB def get_timestamp(tile): """ returns the creation timestamp of a tile image as datetime. :param tile: path to geotiff confom to gdal metadata http://www.gdal.org/gdal_datamodel.html :return datetime: timestamp """ from datetime import datetime as dt try: ds = gdal.Open(tile, 0) ts = ds.GetMetadataItem("TIFFTAG_DATETIME") LOGGER.debug("timestamp: %s " % ts) ds = None # to close the dataset timestamp = dt.strptime(ts, '%Y:%m:%d %H:%M:%S') except: LOGGER.exception('failed to get timestamp for: %s' % tile) return timestamp def plot_products(products, extend=[10, 20, 5, 15]): """ plot the products extends of the search result :param products: output of sentinel api search :return graphic: map of extents """ import numpy as np from matplotlib.patches import Polygon import matplotlib.patches as mpatches from matplotlib.collections import PatchCollection from cartopy import config as cartopy_config import cartopy.feature as cfeature from cartopy.util import add_cyclic_point import re fig = plt.figure(dpi=90, facecolor='w', edgecolor='k') projection = ccrs.PlateCarree() ax = plt.axes(projection=projection) ax.set_extent(extend) ax.stock_img() ax.coastlines() ax.add_feature(cfeature.BORDERS) pat = re.compile(r'''(-*\d+\.\d+ -*\d+\.\d+);*''') for key in products.keys(): polygon = str(products[key]['footprint']) # s = 'POLYGON ((15.71888453311329 9.045763865974665,15.7018748825589 8.97110837227606,15.66795226563288 8.822558900399137,15.639498612331632 8.69721920092792,15.63428409805786 8.674303514900869,15.600477269179995 8.525798537094156,15.566734239298787 8.377334323160321,15.53315342410745 8.228822837291709,15.499521168391912 8.080353481086165,15.493321895031096 8.052970059354971,14.999818486685434 8.053569047879877,14.999818016115439 9.046743365203026,15.71888453311329 9.045763865974665))' matches = pat.findall(polygon) if matches: xy = np.array([map(float, m.split()) for m in matches]) ax.add_patch(mpatches.Polygon(xy, closed=True, transform=ccrs.PlateCarree(), alpha=0.4)) # color='coral' # ccrs.Geodetic() ax.gridlines(draw_labels=True,) img = vs.fig2plot(fig, output_dir='.') return img def plot_ndvi(geotif, file_extension='jpg', dpi=150, figsize=(10,10)): """ plots a NDVI image :param geotif: geotif file containning one band with NDVI values :param file_extension: format of the output graphic. default='png' :result str: path to graphic file """ # https://ocefpaf.github.io/python4oceanographers/blog/2015/03/02/geotiff/ gdal.UseExceptions() norm = vs.MidpointNormalize(midpoint=0) ds = gdal.Open(geotif) gt = ds.GetGeoTransform() proj = ds.GetProjection() inproj = osr.SpatialReference() inproj.ImportFromWkt(proj) projcs = inproj.GetAuthorityCode('PROJCS') projection = ccrs.epsg(projcs) # print("Projection: %s " % projection) subplot_kw = dict(projection=projection) fig, ax = plt.subplots( subplot_kw=subplot_kw) extent = (gt[0], gt[0] + ds.RasterXSize * gt[1], gt[3] + ds.RasterYSize * gt[5], gt[3]) bnd1 = ds.GetRasterBand(1) data = bnd1.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) # buf_xsize=ds.RasterXSize/10, buf_ysize=ds.RasterYSize/10, img_ndvi = ax.imshow(data, extent=extent,origin='upper', norm=norm, vmin=-1, vmax=1, cmap=plt.cm.BrBG, transform=projection) # img_ndvi = ax.imshow(data, extent=extent, # [:3, :, :].transpose((1, 2, 0)) # origin='upper',norm=norm, vmin=-1, vmax=1, cmap=plt.cm.summer) plt.title('NDVI') plt.colorbar(img_ndvi) ax.gridlines() #draw_labels=True, ndvi_plot = vs.fig2plot(fig, output_dir='.', file_extension=file_extension, dpi=dpi, figsize=figsize) return ndvi_plot # ndvi_plot def plot_RGB(geotif, rgb_bands=[1,2,3], file_extension='jpg', dpi=150, figsize=(10,10)): """ Calculates a RGB image (True color composite) based on red, greed, and blue bands. :param geotif: geotif file containning one band with NDVI values :param file_extension: format of the output graphic. default='png' :param rgb_bands: order of bands storing red, green and blue values default=[1,2,3] :result str: path to graphic file """ from numpy import dstack gdal.UseExceptions() ds = gdal.Open(geotif) data = ds.ReadAsArray() gt = ds.GetGeoTransform() proj = ds.GetProjection() inproj = osr.SpatialReference() inproj.ImportFromWkt(proj) projcs = inproj.GetAuthorityCode('PROJCS') projection = ccrs.epsg(projcs) # print(projection) subplot_kw = dict(projection=projection) fig, ax = plt.subplots( subplot_kw=subplot_kw) extent = (gt[0], gt[0] + ds.RasterXSize * gt[1], gt[3] + ds.RasterYSize * gt[5], gt[3]) red = ds.GetRasterBand(rgb_bands[0]) green = ds.GetRasterBand(rgb_bands[1]) blue = ds.GetRasterBand(rgb_bands[2]) # band 1 PSSCINE4Band blue img_r = red.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) img_g = green.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) img_b = blue.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) # rgb = dstack((data[0, :, :], data[1, :, :], data[2, :, :])) rgb = dstack([img_r, img_g, img_b]) img = ax.imshow(rgb, extent=extent, origin='upper', transform=projection) # img = ax.imshow(rgb.transpose((1, 2, 0)), extent=extent, # origin='upper') ax.gridlines(color='lightgrey', linestyle='-') # ax.set_xticks() tcc_plot = vs.fig2plot(fig, dpi=dpi, figsize=figsize, file_extension='jpg') plt.close() ds = None return tcc_plot def merge(tiles, prefix="mosaic_"): """ merging a given list of files with gdal_merge.py :param tiles: list of geotiffs to be merged_tiles :return geotiff: mosaic of merged files """ from flyingpigeon import gdal_merge as gm from os.path import join, basename import subprocess from subprocess import CalledProcessError from flyingpigeon.config import _PATH try: LOGGER.debug('start merging of %s files' % len(tiles)) # prefix = dt.strftime(date, "%Y%m%d") _, filename = mkstemp(dir='.', prefix=prefix, suffix='.tif') gdal_merge = '%s/gdal_merge.py' % _PATH cmd = ['python', gdal_merge, '-o', filename, '-of', 'GTiff', '-v'] for tile in tiles: LOGGER.debug('extent tile %s ', tile) cmd.append(tile) LOGGER.debug('cmd: %s' % cmd) output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) LOGGER.debug('gdal_merge log: \n %s', output) except CalledProcessError as e: LOGGER.exception('failed to merge tiles:\n{0}'.format(e.output)) # import sys # try: # LOGGER.debug('start merging') # # prefix = dt.strftime(date, "%Y%m%d") # _, filename = mkstemp(dir='.', prefix=prefix, suffix='.tif') # call = ['-o', "%s" % filename, '-of', 'GTiff', '-v'] # # # # tiles_day = [tile for tile in tiles if date.date() == get_timestamp(tile).date()] # # for tile in tiles: # call.extend([tile]) # sys.argv[1:] = call # gm.main() # # LOGGER.debug("files merged for %s tiles " % len(tiles)) # except: # LOGGER.exception("failed to merge tiles") return filename def ndvi(tiles, product='PlanetScope'): """ :param tiles: list of tiles including appropriate metadata files :param product: EO product e.g. "PlanetScope" (default) :retrun files, plots : list of calculated files and plots """ import rasterio import numpy from xml.dom import minidom import matplotlib.pyplot as plt ndvifiles = [] ndviplots = [] if product == 'PlanetScope': tiles_dic = ndvi_sorttiles(tiles, product=product) for key in tiles_dic.keys(): try: LOGGER.debug("NDVI for %s" % key) if len(tiles_dic[key]) == 2: tile = next(x for x in tiles_dic[key] if ".tif" in x) meta = next(x for x in tiles_dic[key] if ".xml" in x) else: LOGGER.debug('Key %s data are not complete' % key) continue # continue with next key # Load red and NIR bands - note all PlanetScope 4-band images have band order BGRN with rasterio.open(tile) as src: band_red = src.read(3) with rasterio.open(tile) as src: band_nir = src.read(4) LOGGER.debug("data read in memory") xmldoc = minidom.parse(meta) nodes = xmldoc.getElementsByTagName("ps:bandSpecificMetadata") # XML parser refers to bands by numbers 1-4 coeffs = {} for node in nodes: bn = node.getElementsByTagName("ps:bandNumber")[0].firstChild.data if bn in ['1', '2', '3', '4']: i = int(bn) value = node.getElementsByTagName("ps:reflectanceCoefficient")[0].firstChild.data coeffs[i] = float(value) # Multiply by corresponding coefficients band_red = band_red * coeffs[3] band_nir = band_nir * coeffs[4] LOGGER.debug("data athmospheric corrected") # Allow division by zero numpy.seterr(divide='ignore', invalid='ignore') # Calculate NDVI bn_ndvi = (band_nir.astype(float) - band_red.astype(float)) / (band_nir + band_red) # Set spatial characteristics of the output object to mirror the input kwargs = src.meta kwargs.update( dtype=rasterio.float32, count=1) # Create the file _, ndvifile = mkstemp(dir='.', prefix="ndvi_%s" % key, suffix='.tif') with rasterio.open(ndvifile, 'w', **kwargs) as dst: dst.write_band(1, bn_ndvi.astype(rasterio.float32)) LOGGER.debug("NDVI calculated for %s " % key) ndvifiles.extend([ndvifile]) LOGGER.debug("NDVI calculated: %s " % ndvifile) except: LOGGER.exception("Failed to Calculate NDVI for %s " % key) return ndvifiles def ndvi_sorttiles(tiles, product="PlanetScope"): """ sort un list fo files to calculate the NDVI. red nivr and metadata are sorted in an dictionary :param tiles: list of scene files and metadata :param product: EO data product e.g. "PlanetScope" (default) :return dictionary: sorted files ordered in a dictionary """ from os.path import splitext, basename if product == "PlanetScope": ids = [] for tile in tiles: bn, _ = splitext(basename(tile)) ids.extend([bn]) tiles_dic = {key: None for key in ids} for key in tiles_dic.keys(): tm = [t for t in tiles if key in t] tiles_dic[key] = tm # LOGGER.debug("files sorted in dictionary %s" % tiles_dic) return tiles_dic
en
0.576395
Extracts the files for RGB bands of Sentinel2 directory tree, scales and merge the values. Output is a merged tif including 3 bands. :param DIR: base directory of Sentinel2 directory tree :param false_color: if set to True the near infrared band (B08) will be taken as red band :returns geotif: merged geotiff # from subprocess import CalledProcessError # scaling the color values and trasform from jp2 to tif # response.update_status('execution of CASTf90', 50) # LOGGER.debug("translate command: %s", cmd) # output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) # response.update_status('**** scaling suceeded', 20) # merge tree files to one geotiff with tree seperated bands # LOGGER.exception(msg) returns the creation timestamp of a tile image as datetime. :param tile: path to geotiff confom to gdal metadata http://www.gdal.org/gdal_datamodel.html :return datetime: timestamp # to close the dataset plot the products extends of the search result :param products: output of sentinel api search :return graphic: map of extents (-*\d+\.\d+ -*\d+\.\d+);* # s = 'POLYGON ((15.71888453311329 9.045763865974665,15.7018748825589 8.97110837227606,15.66795226563288 8.822558900399137,15.639498612331632 8.69721920092792,15.63428409805786 8.674303514900869,15.600477269179995 8.525798537094156,15.566734239298787 8.377334323160321,15.53315342410745 8.228822837291709,15.499521168391912 8.080353481086165,15.493321895031096 8.052970059354971,14.999818486685434 8.053569047879877,14.999818016115439 9.046743365203026,15.71888453311329 9.045763865974665))' # color='coral' # ccrs.Geodetic() plots a NDVI image :param geotif: geotif file containning one band with NDVI values :param file_extension: format of the output graphic. default='png' :result str: path to graphic file # https://ocefpaf.github.io/python4oceanographers/blog/2015/03/02/geotiff/ # print("Projection: %s " % projection) # buf_xsize=ds.RasterXSize/10, buf_ysize=ds.RasterYSize/10, # img_ndvi = ax.imshow(data, extent=extent, # [:3, :, :].transpose((1, 2, 0)) # origin='upper',norm=norm, vmin=-1, vmax=1, cmap=plt.cm.summer) #draw_labels=True, # ndvi_plot Calculates a RGB image (True color composite) based on red, greed, and blue bands. :param geotif: geotif file containning one band with NDVI values :param file_extension: format of the output graphic. default='png' :param rgb_bands: order of bands storing red, green and blue values default=[1,2,3] :result str: path to graphic file # print(projection) # band 1 PSSCINE4Band blue # rgb = dstack((data[0, :, :], data[1, :, :], data[2, :, :])) # img = ax.imshow(rgb.transpose((1, 2, 0)), extent=extent, # origin='upper') # ax.set_xticks() merging a given list of files with gdal_merge.py :param tiles: list of geotiffs to be merged_tiles :return geotiff: mosaic of merged files # prefix = dt.strftime(date, "%Y%m%d") # import sys # try: # LOGGER.debug('start merging') # # prefix = dt.strftime(date, "%Y%m%d") # _, filename = mkstemp(dir='.', prefix=prefix, suffix='.tif') # call = ['-o', "%s" % filename, '-of', 'GTiff', '-v'] # # # # tiles_day = [tile for tile in tiles if date.date() == get_timestamp(tile).date()] # # for tile in tiles: # call.extend([tile]) # sys.argv[1:] = call # gm.main() # # LOGGER.debug("files merged for %s tiles " % len(tiles)) # except: # LOGGER.exception("failed to merge tiles") :param tiles: list of tiles including appropriate metadata files :param product: EO product e.g. "PlanetScope" (default) :retrun files, plots : list of calculated files and plots # continue with next key # Load red and NIR bands - note all PlanetScope 4-band images have band order BGRN # XML parser refers to bands by numbers 1-4 # Multiply by corresponding coefficients # Allow division by zero # Calculate NDVI # Set spatial characteristics of the output object to mirror the input # Create the file sort un list fo files to calculate the NDVI. red nivr and metadata are sorted in an dictionary :param tiles: list of scene files and metadata :param product: EO data product e.g. "PlanetScope" (default) :return dictionary: sorted files ordered in a dictionary # LOGGER.debug("files sorted in dictionary %s" % tiles_dic)
2.674844
3
wiggin/actions/interactions.py
golobor/plugychrom
0
6624618
<gh_stars>0 from dataclasses import dataclass import numbers import numpy as np from typing import Optional, Any, Union import polychrom from ..core import SimAction from .. import forces @dataclass class Chains(SimAction): chains: Any = ((0, None, False),) bond_length: float = 1.0 wiggle_dist: float = 0.25 stiffness_k: Optional[float] = None repulsion_e: Optional[float] = 1.5 attraction_e: Optional[float] = None attraction_r: Optional[float] = None except_bonds: Union[bool, int] = False _writes_shared = ['chains'] def configure(self): out_shared = {} if hasattr(self.chains, "__iter__") and hasattr( self.chains[0], "__iter__" ): out_shared["chains"] = self.chains elif hasattr(self.chains, "__iter__") and isinstance( self.chains[0], numbers.Number ): edges = np.r_[0, np.cumsum(self.chains)] chains = [(st, end, False) for st, end in zip(edges[:-1], edges[1:])] self.chains = chains out_shared['chains'] = chains return out_shared def run_init(self, sim): # do not use self.args! # only use parameters from self.ndonfig.shared nonbonded_force_func = None nonbonded_force_kwargs = {} if self.repulsion_e: if self.attraction_e and self.attraction_r: nonbonded_force_func = forces.quartic_repulsive_attractive nonbonded_force_kwargs = dict( repulsionEnergy=self.repulsion_e, repulsionRadius=1.0, attractionEnergy=self.attraction_e, attractionRadius=self.attraction_r, ) else: nonbonded_force_func = forces.quartic_repulsive nonbonded_force_kwargs = {"trunc": self.repulsion_e} sim.add_force( polychrom.forcekits.polymer_chains( sim, chains=self.chains, bond_force_func=polychrom.forces.harmonic_bonds, bond_force_kwargs={ "bondLength": self.bond_length, "bondWiggleDistance": self.wiggle_dist, }, angle_force_func=( None if self.stiffness_k is None else polychrom.forces.angle_force ), angle_force_kwargs={"k": self.stiffness_k}, nonbonded_force_func=nonbonded_force_func, nonbonded_force_kwargs=nonbonded_force_kwargs, except_bonds=self.except_bonds, ) )
from dataclasses import dataclass import numbers import numpy as np from typing import Optional, Any, Union import polychrom from ..core import SimAction from .. import forces @dataclass class Chains(SimAction): chains: Any = ((0, None, False),) bond_length: float = 1.0 wiggle_dist: float = 0.25 stiffness_k: Optional[float] = None repulsion_e: Optional[float] = 1.5 attraction_e: Optional[float] = None attraction_r: Optional[float] = None except_bonds: Union[bool, int] = False _writes_shared = ['chains'] def configure(self): out_shared = {} if hasattr(self.chains, "__iter__") and hasattr( self.chains[0], "__iter__" ): out_shared["chains"] = self.chains elif hasattr(self.chains, "__iter__") and isinstance( self.chains[0], numbers.Number ): edges = np.r_[0, np.cumsum(self.chains)] chains = [(st, end, False) for st, end in zip(edges[:-1], edges[1:])] self.chains = chains out_shared['chains'] = chains return out_shared def run_init(self, sim): # do not use self.args! # only use parameters from self.ndonfig.shared nonbonded_force_func = None nonbonded_force_kwargs = {} if self.repulsion_e: if self.attraction_e and self.attraction_r: nonbonded_force_func = forces.quartic_repulsive_attractive nonbonded_force_kwargs = dict( repulsionEnergy=self.repulsion_e, repulsionRadius=1.0, attractionEnergy=self.attraction_e, attractionRadius=self.attraction_r, ) else: nonbonded_force_func = forces.quartic_repulsive nonbonded_force_kwargs = {"trunc": self.repulsion_e} sim.add_force( polychrom.forcekits.polymer_chains( sim, chains=self.chains, bond_force_func=polychrom.forces.harmonic_bonds, bond_force_kwargs={ "bondLength": self.bond_length, "bondWiggleDistance": self.wiggle_dist, }, angle_force_func=( None if self.stiffness_k is None else polychrom.forces.angle_force ), angle_force_kwargs={"k": self.stiffness_k}, nonbonded_force_func=nonbonded_force_func, nonbonded_force_kwargs=nonbonded_force_kwargs, except_bonds=self.except_bonds, ) )
en
0.122589
# do not use self.args! # only use parameters from self.ndonfig.shared
2.649276
3
examples/LaTeX/4Derr.py
waldyrious/galgebra
151
6624619
<gh_stars>100-1000 from __future__ import print_function from sympy import * from galgebra.printer import Format,xpdf,xdvi from galgebra.ga import Ga def main(): Format() snr=1 g = '0 0 1 0 ,0 0 0 1 ,1 0 0 0 ,0 1 0 0' sk4coords = (e1,e2,e3,e4) = symbols('e1 e2 e3 e4') sk4 = Ga('e_1 e_2 e_3 e_4', g=g, coords=sk4coords) (e1,e2,e3,e4) = sk4.mv() print('g_{ii} =',sk4.g) v = symbols('v', real=True) x1=(e1+e3)/sqrt(2) x2=(e2+e4)/sqrt(2) print('x_1<x_1==',x1<x1) print('x_1<x_2==',x1<x2) print('x_2<x_1==',x2<x1) print('x_2<x_2==',x2<x2) print(r'#$-\infty < v < \infty$') print('(-v*(x_1^x_2)/2).exp()==',(-v*(x1^x2)/2).exp()) v = symbols('v', real=True, positive=True) print(r'#$0\le v < \infty$') print('(-v*(x_1^x_2)/2).exp()==',(-v*(x1^x2)/2).exp()) xpdf(pdfprog=None) return if __name__ == "__main__": main()
from __future__ import print_function from sympy import * from galgebra.printer import Format,xpdf,xdvi from galgebra.ga import Ga def main(): Format() snr=1 g = '0 0 1 0 ,0 0 0 1 ,1 0 0 0 ,0 1 0 0' sk4coords = (e1,e2,e3,e4) = symbols('e1 e2 e3 e4') sk4 = Ga('e_1 e_2 e_3 e_4', g=g, coords=sk4coords) (e1,e2,e3,e4) = sk4.mv() print('g_{ii} =',sk4.g) v = symbols('v', real=True) x1=(e1+e3)/sqrt(2) x2=(e2+e4)/sqrt(2) print('x_1<x_1==',x1<x1) print('x_1<x_2==',x1<x2) print('x_2<x_1==',x2<x1) print('x_2<x_2==',x2<x2) print(r'#$-\infty < v < \infty$') print('(-v*(x_1^x_2)/2).exp()==',(-v*(x1^x2)/2).exp()) v = symbols('v', real=True, positive=True) print(r'#$0\le v < \infty$') print('(-v*(x_1^x_2)/2).exp()==',(-v*(x1^x2)/2).exp()) xpdf(pdfprog=None) return if __name__ == "__main__": main()
none
1
2.205395
2
src/lambda/ppe-detector-function/test/graphql/test_mutation.py
gai6948/video-analytics-for-ppe-compliance
0
6624620
import os import json from main.graphql.mutation import make_mutation def test_mutation_execution(): src_dirname = os.path.dirname(__file__) src_filename = os.path.join(src_dirname, '../data/mutation-variables.json') with open(src_filename, 'r') as fd: variables = json.load(fd) mutation = """ mutation InjestFrame( $cameraId: String!, $ts: String!, $s3url: String, $ppeResult: PPEResultInput $ppeViolationCount: Int $pplCount: Int) { injestFrame( cameraId: $cameraId, ts: $ts, s3url: $s3url, ppeResult: $ppeResult ppeViolationCount: $ppeViolationCount pplCount: $pplCount ) { cameraId ts s3url } } """ variables = { "cameraId": 'test-laptop-01', "s3url": 'prod-frameprocessorstack-framebucket6a445548-1o2vm6nfi5pyq/test-laptop-1-2021-01-08-08:27:53:985000.jpg', "ts": '1000000', "ppeResult": { "personsWithRequiredEquipment": {}, "personsWithoutRequiredEquipment": {} }, "ppeViolationCount": 0, "pplCount": 1 } res = make_mutation(mutation, variables) assert res is not None
import os import json from main.graphql.mutation import make_mutation def test_mutation_execution(): src_dirname = os.path.dirname(__file__) src_filename = os.path.join(src_dirname, '../data/mutation-variables.json') with open(src_filename, 'r') as fd: variables = json.load(fd) mutation = """ mutation InjestFrame( $cameraId: String!, $ts: String!, $s3url: String, $ppeResult: PPEResultInput $ppeViolationCount: Int $pplCount: Int) { injestFrame( cameraId: $cameraId, ts: $ts, s3url: $s3url, ppeResult: $ppeResult ppeViolationCount: $ppeViolationCount pplCount: $pplCount ) { cameraId ts s3url } } """ variables = { "cameraId": 'test-laptop-01', "s3url": 'prod-frameprocessorstack-framebucket6a445548-1o2vm6nfi5pyq/test-laptop-1-2021-01-08-08:27:53:985000.jpg', "ts": '1000000', "ppeResult": { "personsWithRequiredEquipment": {}, "personsWithoutRequiredEquipment": {} }, "ppeViolationCount": 0, "pplCount": 1 } res = make_mutation(mutation, variables) assert res is not None
en
0.367715
mutation InjestFrame( $cameraId: String!, $ts: String!, $s3url: String, $ppeResult: PPEResultInput $ppeViolationCount: Int $pplCount: Int) { injestFrame( cameraId: $cameraId, ts: $ts, s3url: $s3url, ppeResult: $ppeResult ppeViolationCount: $ppeViolationCount pplCount: $pplCount ) { cameraId ts s3url } }
2.478608
2
weibo_comments_crawler.py
KeithYue/weibo-keywords-crawler
16
6624621
# coding=utf-8 class WeiboCommentsCrawler(): ''' A spider which is used to crawl all comments given a weibo item ''' def __init__(self, client, weibo_mid): ''' client: the weibo api client weibo_mid: the specific weibo whose comments would be crawled ''' self.client = client self.weibo_id = weibo_mid def crawl(self): c = self.client results = [] page = 1 while True: comments = c.get('comments/show', id=int(self.weibo_id), count = 200, page = page) # print(comments) # print(type(comments)) if type(comments) is dict: if len(comments['comments']) == 0: break results.extend(comments['comments']) page += 1 else: break # print('comments number', len(results)) return results def save(self): pass if __name__ == '__main__': pass
# coding=utf-8 class WeiboCommentsCrawler(): ''' A spider which is used to crawl all comments given a weibo item ''' def __init__(self, client, weibo_mid): ''' client: the weibo api client weibo_mid: the specific weibo whose comments would be crawled ''' self.client = client self.weibo_id = weibo_mid def crawl(self): c = self.client results = [] page = 1 while True: comments = c.get('comments/show', id=int(self.weibo_id), count = 200, page = page) # print(comments) # print(type(comments)) if type(comments) is dict: if len(comments['comments']) == 0: break results.extend(comments['comments']) page += 1 else: break # print('comments number', len(results)) return results def save(self): pass if __name__ == '__main__': pass
en
0.807607
# coding=utf-8 A spider which is used to crawl all comments given a weibo item client: the weibo api client weibo_mid: the specific weibo whose comments would be crawled # print(comments) # print(type(comments)) # print('comments number', len(results))
3.213267
3
src/tests/ftest/util/job_manager_utils.py
kmajzero/daos
2
6624622
#!/usr/bin/python """ (C) Copyright 2020-2021 Intel Corporation. SPDX-License-Identifier: BSD-2-Clause-Patent """ # pylint: disable=too-many-lines from datetime import datetime from distutils.spawn import find_executable import os import re import time from ClusterShell.NodeSet import NodeSet from command_utils import ExecutableCommand, SystemctlCommand from command_utils_base import FormattedParameter, EnvironmentVariables from command_utils_base import CommandFailure from env_modules import load_mpi from general_utils import pcmd, stop_processes, run_pcmd from write_host_file import write_host_file class JobManager(ExecutableCommand): """A class for commands with parameters that manage other commands.""" def __init__(self, namespace, command, job, path="", subprocess=False): """Create a JobManager object. Args: namespace (str): yaml namespace (path to parameters) command (str): string of the command to be executed. job (ExecutableCommand): command object to manage. path (str, optional): path to location of command binary file. Defaults to "". subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ super().__init__(namespace, command, path, subprocess) self.job = job self._hosts = None @property def hosts(self): """Get the list of hosts associated with this command.""" return self._hosts def __str__(self): """Return the command with all of its defined parameters as a string. Returns: str: the command with all the defined parameters """ commands = [super().__str__(), str(self.job)] return " ".join(commands) def check_subprocess_status(self, sub_process): """Verify command status when called in a subprocess. Args: sub_process (process.SubProcess): subprocess used to run the command Returns: bool: whether or not the command progress has been detected """ return self.job.check_subprocess_status(sub_process) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command. Set the appropriate command line parameter with the specified value. Args: hosts (list): list of hosts to specify on the command line path (str, optional): path to use when specifying the hosts through a hostfile. Defaults to None. slots (int, optional): number of slots per host to specify in the optional hostfile. Defaults to None. """ def assign_processes(self, processes): """Assign the number of processes per node. Set the appropriate command line parameter with the specified value. Args: processes (int): number of processes per node """ def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ def get_subprocess_state(self, message=None): """Display the state of the subprocess. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: list: a list of states for the process found. If the local job manager command is running its state will be the first in the list. Additional states in the list can typically indicate that remote processes were also found to be active. Active remote processes will be indicated by a 'R' state at the end of the list. """ # Get/display the state of the local job manager process state = super().get_subprocess_state(message) if self._process is not None and self._hosts: # Determine if the status of the remote job processes on each host remote_state = self._get_remote_process_state(message) if remote_state: # Add a running state to the list of process states if any # remote process was found to be active. if not state: state = ["?"] state.append(remote_state) return state def _get_remote_process_state(self, message=None): """Display the state of the processes running on remote hosts. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: str: a "R" if any remote processes are found to be active otherwise None. """ # Display the status of the remote job processes on each host command = "/usr/bin/pgrep -a {}".format(self.job.command_regex) self.log.debug( "%s processes still running remotely%s:", self.command, " {}".format(message) if message else "") self.log.debug("Running (on %s): %s", self._hosts, command) results = pcmd(self._hosts, command, True, 10, None) # The pcmd method will return a dictionary with a single key, e.g. # {1: <NodeSet>}, if there are no remote processes running on any of the # hosts. If this value is not returned, indicate there are remote # processes running by returning a "R" state. return "R" if 1 not in results or len(results) > 1 else None def kill(self): """Forcibly terminate any job processes running on hosts.""" regex = self.job.command_regex result = stop_processes(self._hosts, regex) if 0 in result and len(result) == 1: self.log.info( "No remote %s processes killed (none found), done.", regex) else: self.log.info( "***At least one remote %s process needed to be killed! Please " "investigate/report.***", regex) class Orterun(JobManager): """A class for the orterun job manager command.""" def __init__(self, job, subprocess=False): """Create a Orterun object. Args: job (ExecutableCommand): command object to manage. subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ if not load_mpi("openmpi"): raise CommandFailure("Failed to load openmpi") path = os.path.dirname(find_executable("orterun")) super().__init__("/run/orterun/*", "orterun", job, path, subprocess) # Default mca values to avoid queue pair errors mca_default = { "btl_openib_warn_default_gid_prefix": "0", "btl": "tcp,self", "oob": "tcp", "pml": "ob1", "btl_tcp_if_include": "eth0", } self.hostfile = FormattedParameter("--hostfile {}", None) self.processes = FormattedParameter("--np {}", 1) self.display_map = FormattedParameter("--display-map", False) self.map_by = FormattedParameter("--map-by {}", "node") self.export = FormattedParameter("-x {}", None) self.enable_recovery = FormattedParameter("--enable-recovery", True) self.report_uri = FormattedParameter("--report-uri {}", None) self.allow_run_as_root = FormattedParameter("--allow-run-as-root", None) self.mca = FormattedParameter("--mca {}", mca_default) self.pprnode = FormattedParameter("--map-by ppr:{}:node", None) self.tag_output = FormattedParameter("--tag-output", True) self.ompi_server = FormattedParameter("--ompi-server {}", None) self.working_dir = FormattedParameter("-wdir {}", None) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command (--hostfile). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. """ self._hosts = hosts kwargs = {"hostlist": self._hosts, "slots": slots} if path is not None: kwargs["path"] = path self.hostfile.value = write_host_file(**kwargs) def assign_processes(self, processes): """Assign the number of processes per node (-np). Args: processes (int): number of processes per node """ self.processes.value = processes def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ if append and self.export.value is not None: # Convert the current list of environmental variable assignments # into an EnvironmentVariables (dict) object. Then update the # dictionary keys with the specified values or add new key value # pairs to the dictionary. Finally convert the updated dictionary # back to a list for the parameter assignment. original = EnvironmentVariables({ item.split("=")[0]: item.split("=")[1] if "=" in item else None for item in self.export.value}) original.update(env_vars) self.export.value = original.get_list() else: # Overwrite the environmental variable assignment self.export.value = env_vars.get_list() def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ self.export.update_default(env_vars.get_list()) def run(self): """Run the orterun command. Raises: CommandFailure: if there is an error running the command """ if not load_mpi("openmpi"): raise CommandFailure("Failed to load openmpi") return super().run() class Mpirun(JobManager): """A class for the mpirun job manager command.""" def __init__(self, job, subprocess=False, mpitype="openmpi"): """Create a Mpirun object. Args: job (ExecutableCommand): command object to manage. subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ if not load_mpi(mpitype): raise CommandFailure("Failed to load {}".format(mpitype)) path = os.path.dirname(find_executable("mpirun")) super().__init__("/run/mpirun", "mpirun", job, path, subprocess) mca_default = None if mpitype == "openmpi": # Default mca values to avoid queue pair errors w/ OpenMPI mca_default = { "btl_openib_warn_default_gid_prefix": "0", "btl": "tcp,self", "oob": "tcp", "pml": "ob1", "btl_tcp_if_include": "eth0", } self.hostfile = FormattedParameter("-hostfile {}", None) self.processes = FormattedParameter("-np {}", 1) self.ppn = FormattedParameter("-ppn {}", None) self.envlist = FormattedParameter("-envlist {}", None) self.mca = FormattedParameter("--mca {}", mca_default) self.working_dir = FormattedParameter("-wdir {}", None) self.mpitype = mpitype def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command (-f). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. """ self._hosts = hosts kwargs = {"hostlist": self._hosts, "slots": slots} if path is not None: kwargs["path"] = path self.hostfile.value = write_host_file(**kwargs) def assign_processes(self, processes): """Assign the number of processes per node (-np). Args: processes (int): number of processes per node """ self.processes.value = processes def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ # Pass the environment variables via the process.run method env argument if append and self.env is not None: # Update the existing dictionary with the new values self.env.update(env_vars) else: # Overwrite/create the dictionary of environment variables self.env = EnvironmentVariables(env_vars) def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ self.envlist.update_default(env_vars.get_list()) def run(self): """Run the mpirun command. Raises: CommandFailure: if there is an error running the command """ if not load_mpi(self.mpitype): raise CommandFailure("Failed to load {}".format(self.mpitype)) return super().run() class Srun(JobManager): """A class for the srun job manager command.""" def __init__(self, job, path="", subprocess=False): """Create a Srun object. Args: job (ExecutableCommand): command object to manage. path (str, optional): path to location of command binary file. Defaults to "". subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ super().__init__("/run/srun", "srun", job, path, subprocess) self.label = FormattedParameter("--label", True) self.mpi = FormattedParameter("--mpi={}", "pmi2") self.export = FormattedParameter("--export={}", "ALL") self.ntasks = FormattedParameter("--ntasks={}", None) self.distribution = FormattedParameter("--distribution={}", None) self.nodefile = FormattedParameter("--nodefile={}", None) self.nodelist = FormattedParameter("--nodelist={}", None) self.ntasks_per_node = FormattedParameter("--ntasks-per-node={}", None) self.nodes = FormattedParameter("--nodes={}", None) self.reservation = FormattedParameter("--reservation={}", None) self.partition = FormattedParameter("--partition={}", None) self.output = FormattedParameter("--output={}", None) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command (-f). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. """ self._hosts = hosts kwargs = {"hostlist": self._hosts, "slots": None} if path is not None: kwargs["path"] = path self.nodefile.value = write_host_file(**kwargs) self.ntasks_per_node.value = slots def assign_processes(self, processes): """Assign the number of processes per node (--ntasks). Args: processes (int): number of processes per node """ self.ntasks.value = processes self.distribution.value = "cyclic" def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ if append and self.export.value is not None: # Convert the current list of environmental variable assignments # into an EnvironmentVariables (dict) object. Then update the # dictionary keys with the specified values or add new key value # pairs to the dictionary. Finally convert the updated dictionary # back to a string for the parameter assignment. original = EnvironmentVariables({ item.split("=")[0]: item.split("=")[1] if "=" in item else None for item in self.export.value.split(",")}) original.update(env_vars) self.export.value = ",".join(original.get_list()) else: # Overwrite the environmental variable assignment self.export.value = ",".join(env_vars.get_list()) def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ self.export.update_default(env_vars.get_list()) class Systemctl(JobManager): # pylint: disable=too-many-public-methods,too-many-public-methods """A class for the systemctl job manager command.""" def __init__(self, job): """Create a Orterun object. Args: job (SubProcessCommand): command object to manage. """ # path = os.path.dirname(find_executable("systemctl")) super().__init__("/run/systemctl/*", "systemd", job) self.job = job self._systemctl = SystemctlCommand() self._systemctl.service.value = self.job.service_name self.timestamps = { "enable": None, "disable": None, "start": None, "running": None, "verified": None, "stop": None, "restart": None, } @property def hosts(self): """Get the list of hosts associated with this command.""" return list(self._hosts) if self._hosts else None def __str__(self): """Return the command with all of its defined parameters as a string. Returns: str: the command with all the defined parameters """ return self._systemctl.__str__() def run(self): """Start the job's service via the systemctl command. Enable the service, start the service, and report the status of the service. If an error occurs with any of these commands also display the journalctl output for the service. Raises: CommandFailure: if unable to enable or start the service Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ # Start the daos_server.service self.service_enable() result = self.service_start() # result = self.service_status() # Determine if the command has launched correctly using its # check_subprocess_status() method. if not self.check_subprocess_status(None): msg = "Command '{}' did not launch correctly".format(self) self.log.error(msg) raise CommandFailure(msg) return result def stop(self): """Stop the job's service via the systemctl command. Stop the service, disable the service, and report the status of the service. If an error occurs with any of these commands also display the journalctl output for the service. Raises: CommandFailure: if unable to stop or disable the service Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ self.service_stop() return self.service_disable() def wait(self): """Wait for the sub process to complete.""" raise NotImplementedError() def kill(self): """Forcibly terminate any job processes running on hosts.""" try: self.stop() except CommandFailure as error: self.log.info( "Error stopping/disabling %s: %s", self.job.service_name, error) super(Systemctl, self).kill() def check_subprocess_status(self, sub_process): """Verify command status when called in a subprocess. Args: sub_process (process.SubProcess): subprocess used to run the command Returns: bool: whether or not the command progress has been detected """ return self.check_logs( self.job.pattern, self.timestamps["start"], None, self.job.pattern_count, self.job.pattern_timeout.value) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command. Set the appropriate command line parameter with the specified value. Args: hosts (list): list of hosts to specify on the command line path (str, optional): path to use when specifying the hosts through a hostfile. Defaults to None. Not used. slots (int, optional): number of slots per host to specify in the optional hostfile. Defaults to None. Not used. """ self._hosts = NodeSet.fromlist(hosts) def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ def get_subprocess_state(self, message=None): """Display the state of the subprocess. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: list: a list of states for the process found. Any active remote processes will be indicated by a 'R' state at the end of the list. """ state = None remote_state = self._get_remote_process_state(message) if remote_state: state = [remote_state] return state def _run_unit_command(self, command): """Run the systemctl command. Args: command (str): systemctl unit command Raises: CommandFailure: if there is an issue running the command Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ self._systemctl.unit_command.value = command self.timestamps[command] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") result = pcmd(self._hosts, self.__str__(), self.verbose, self.timeout) if 255 in result: raise CommandFailure( "Timeout detected running '{}' with a {}s timeout on {}".format( self.__str__(), self.timeout, NodeSet.fromlist(result[255])) ) if 0 not in result or len(result) > 1: failed = [] for item, value in list(result.items()): if item != 0: failed.extend(value) raise CommandFailure("Error occurred running '{}' on {}".format( self.__str__(), NodeSet.fromlist(failed))) return result def _report_unit_command(self, command): """Run the systemctl command and report the log data on an error. Args: command (str): systemctl unit command Raises: CommandFailure: if there is an issue running the command Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ try: return self._run_unit_command(command) except CommandFailure as error: self.log.info(error) self.display_log_data( self.get_log_data(self._hosts, self.timestamps[command])) raise CommandFailure(error) from error def service_enable(self): """Enable the job's service via the systemctl command. Raises: CommandFailure: if unable to enable Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("enable") def service_disable(self): """Disable the job's service via the systemctl command. Raises: CommandFailure: if unable to disable Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("disable") def service_start(self): """Start the job's service via the systemctl command. Raises: CommandFailure: if unable to start Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("start") def service_stop(self): """Stop the job's service via the systemctl command. Raises: CommandFailure: if unable to stop Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("stop") def service_status(self): """Get the status of the job's service via the systemctl command. Raises: CommandFailure: if unable to get the status Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("status") def service_running(self): """Determine if the job's service is active via the systemctl command. The 'systemctl is-active <service>' command will return a string indicating one of the following states: active, inactive, activating, deactivating, failed, unknown If the <service> is "active" or "activating" return True. Returns: bool: True id the service is running, False otherwise """ status = True states = {} valid_states = ["active", "activating"] self._systemctl.unit_command.value = "is-active" results = run_pcmd( self._hosts, self.__str__(), False, self.timeout, None) for result in results: if result["interrupted"]: states["timeout"] = result["hosts"] status = False else: output = result["stdout"][-1] if output not in states: states[output] = NodeSet() states[output].add(result["hosts"]) status &= output in valid_states data = ["=".join([key, str(states[key])]) for key in sorted(states)] self.log.info( " Detected %s states: %s", self._systemctl.service.value, ", ".join(data)) return status def get_log_data(self, hosts, since, until=None, timeout=60): """Gather log output for the command running on each host. Note (from journalctl man page): Date specifications should be of the format "2012-10-30 18:17:16". If the time part is omitted, "00:00:00" is assumed. If only the seconds component is omitted, ":00" is assumed. If the date component is omitted, the current day is assumed. Alternatively the strings "yesterday", "today", "tomorrow" are understood, which refer to 00:00:00 of the day before the current day, the current day, or the day after the current day, respectively. "now" refers to the current time. Finally, relative times may be specified, prefixed with "-" or "+", referring to times before or after the current time, respectively. Args: hosts (list): list of hosts from which to gather log data. since (str): show log entries from this date. until (str, optional): show log entries up to this date. Defaults to None, in which case it is not utilized. timeout (int, optional): timeout for issuing the command. Defaults to 60 seconds. Returns: list: a list of dictionaries including: "hosts": <NodeSet() of hosts with this data> "data": <journalctl output> """ # Setup the journalctl command to capture all unit activity from the # specified start date to now or a specified end date # --output=json? command = [ "sudo", "journalctl", "--unit={}".format(self._systemctl.service.value), "--since=\"{}\"".format(since), ] if until: command.append("--until=\"{}\"".format(until)) self.log.info( "Gathering log data on %s: %s", str(hosts), " ".join(command)) # Gather the log information per host results = run_pcmd(hosts, " ".join(command), False, timeout, None) # Determine if the command completed successfully without a timeout status = True for result in results: if result["interrupted"]: self.log.info(" Errors detected running \"%s\":", command) self.log.info( " %s: timeout detected after %s seconds", str(result["hosts"]), timeout) status = False elif result["exit_status"] != 0: self.log.info(" Errors detected running \"%s\":", command) status = False if not status: break # Display/return the command output log_data = [] for result in results: if result["exit_status"] == 0 and not result["interrupted"]: # Add the successful output from each node to the dictionary log_data.append( {"hosts": result["hosts"], "data": result["stdout"]}) else: # Display all of the results in the case of an error if len(result["stdout"]) > 1: self.log.info( " %s: rc=%s, output:", str(result["hosts"]), result["exit_status"]) for line in result["stdout"]: self.log.info(" %s", line) else: self.log.info( " %s: rc=%s, output: %s", str(result["hosts"]), result["exit_status"], result["stdout"][0]) # Report any errors through an exception if not status: raise CommandFailure( "Error(s) detected gathering {} log data on {}".format( self._systemctl.service.value, NodeSet.fromlist(hosts))) # Return the successful command output per set of hosts return log_data def display_log_data(self, log_data): """Display the journalctl log data. Args: log_data (dict): dictionary of journalctl log output. """ self.log.info("Journalctl output:") for line in self.str_log_data(log_data).split("\n"): self.log.info(line) @staticmethod def str_log_data(log_data): """Get the journalctl log data as a string. Args: log_data (dict): dictionary of journalctl log output. Returns: str: the journalctl log data """ data = [] for entry in log_data: data.append(" {}:".format(entry["hosts"])) for line in entry["data"]: data.append(" {}".format(line)) return "\n".join(data) def check_logs(self, pattern, since, until, quantity=1, timeout=60): """Check the command logs on each host for a specified string. Args: pattern (str): regular expression to search for in the logs since (str): search log entries from this date. until (str, optional): search log entries up to this date. Defaults to None, in which case it is not utilized. quantity (int, optional): number of times to expect the search pattern per host. Defaults to 1. timeout (int, optional): maximum number of seconds to wait to detect the specified pattern. Defaults to 60. Returns: bool: whether or not the search string was found in the logs on each host """ self.log.info( "Searching for '%s' in '%s' output on %s", pattern, self._systemctl, self._hosts) log_data = None detected = 0 complete = False timed_out = False start = time.time() # Search for patterns in the subprocess output until: # - the expected number of pattern matches are detected (success) # - the time out is reached (failure) # - the service is no longer running (failure) while not complete and not timed_out and self.service_running(): detected = 0 log_data = self.get_log_data(self._hosts, since, until, timeout) for entry in log_data: match = re.findall(pattern, "\n".join(entry["data"])) detected += len(match) if match else 0 complete = detected == quantity timed_out = time.time() - start > timeout if complete: self.timestamps["running"] = datetime.now().strftime( "%Y-%m-%d %H:%M:%S") # Summarize results msg = "{}/{} '{}' messages detected in".format( detected, quantity, pattern) runtime = "{}/{} seconds".format(time.time() - start, timeout) if not complete: # Report the error / timeout reason = "ERROR detected" details = "" if timed_out: reason = "TIMEOUT detected, exceeded {} seconds".format(timeout) runtime = "{} seconds".format(time.time() - start) if log_data: details = ":\n{}".format(self.str_log_data(log_data)) self.log.info("%s - %s %s%s", reason, msg, runtime, details) if timed_out: self.log.debug( "If needed the %s second timeout can be adjusted via " "the 'pattern_timeout' test yaml parameter under %s", timeout, self.namespace) else: # Report the successful start # self.display_log_data(log_data) self.log.info( "%s subprocess startup detected - %s %s", self._command, msg, runtime) return complete def dump_logs(self, hosts=None): """Display the journalctl log data since detecting server start. Args: hosts (list, optional): list of hosts from which to display the journalctl log data. Defaults to None which will log the journalctl log data from all of the hosts. """ timestamp = None if self.timestamps["running"]: timestamp = self.timestamps["running"] elif self.timestamps["verified"]: timestamp = self.timestamps["verified"] if timestamp: if hosts is None: hosts = self._hosts self.display_log_data(self.get_log_data(hosts, timestamp))
#!/usr/bin/python """ (C) Copyright 2020-2021 Intel Corporation. SPDX-License-Identifier: BSD-2-Clause-Patent """ # pylint: disable=too-many-lines from datetime import datetime from distutils.spawn import find_executable import os import re import time from ClusterShell.NodeSet import NodeSet from command_utils import ExecutableCommand, SystemctlCommand from command_utils_base import FormattedParameter, EnvironmentVariables from command_utils_base import CommandFailure from env_modules import load_mpi from general_utils import pcmd, stop_processes, run_pcmd from write_host_file import write_host_file class JobManager(ExecutableCommand): """A class for commands with parameters that manage other commands.""" def __init__(self, namespace, command, job, path="", subprocess=False): """Create a JobManager object. Args: namespace (str): yaml namespace (path to parameters) command (str): string of the command to be executed. job (ExecutableCommand): command object to manage. path (str, optional): path to location of command binary file. Defaults to "". subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ super().__init__(namespace, command, path, subprocess) self.job = job self._hosts = None @property def hosts(self): """Get the list of hosts associated with this command.""" return self._hosts def __str__(self): """Return the command with all of its defined parameters as a string. Returns: str: the command with all the defined parameters """ commands = [super().__str__(), str(self.job)] return " ".join(commands) def check_subprocess_status(self, sub_process): """Verify command status when called in a subprocess. Args: sub_process (process.SubProcess): subprocess used to run the command Returns: bool: whether or not the command progress has been detected """ return self.job.check_subprocess_status(sub_process) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command. Set the appropriate command line parameter with the specified value. Args: hosts (list): list of hosts to specify on the command line path (str, optional): path to use when specifying the hosts through a hostfile. Defaults to None. slots (int, optional): number of slots per host to specify in the optional hostfile. Defaults to None. """ def assign_processes(self, processes): """Assign the number of processes per node. Set the appropriate command line parameter with the specified value. Args: processes (int): number of processes per node """ def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ def get_subprocess_state(self, message=None): """Display the state of the subprocess. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: list: a list of states for the process found. If the local job manager command is running its state will be the first in the list. Additional states in the list can typically indicate that remote processes were also found to be active. Active remote processes will be indicated by a 'R' state at the end of the list. """ # Get/display the state of the local job manager process state = super().get_subprocess_state(message) if self._process is not None and self._hosts: # Determine if the status of the remote job processes on each host remote_state = self._get_remote_process_state(message) if remote_state: # Add a running state to the list of process states if any # remote process was found to be active. if not state: state = ["?"] state.append(remote_state) return state def _get_remote_process_state(self, message=None): """Display the state of the processes running on remote hosts. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: str: a "R" if any remote processes are found to be active otherwise None. """ # Display the status of the remote job processes on each host command = "/usr/bin/pgrep -a {}".format(self.job.command_regex) self.log.debug( "%s processes still running remotely%s:", self.command, " {}".format(message) if message else "") self.log.debug("Running (on %s): %s", self._hosts, command) results = pcmd(self._hosts, command, True, 10, None) # The pcmd method will return a dictionary with a single key, e.g. # {1: <NodeSet>}, if there are no remote processes running on any of the # hosts. If this value is not returned, indicate there are remote # processes running by returning a "R" state. return "R" if 1 not in results or len(results) > 1 else None def kill(self): """Forcibly terminate any job processes running on hosts.""" regex = self.job.command_regex result = stop_processes(self._hosts, regex) if 0 in result and len(result) == 1: self.log.info( "No remote %s processes killed (none found), done.", regex) else: self.log.info( "***At least one remote %s process needed to be killed! Please " "investigate/report.***", regex) class Orterun(JobManager): """A class for the orterun job manager command.""" def __init__(self, job, subprocess=False): """Create a Orterun object. Args: job (ExecutableCommand): command object to manage. subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ if not load_mpi("openmpi"): raise CommandFailure("Failed to load openmpi") path = os.path.dirname(find_executable("orterun")) super().__init__("/run/orterun/*", "orterun", job, path, subprocess) # Default mca values to avoid queue pair errors mca_default = { "btl_openib_warn_default_gid_prefix": "0", "btl": "tcp,self", "oob": "tcp", "pml": "ob1", "btl_tcp_if_include": "eth0", } self.hostfile = FormattedParameter("--hostfile {}", None) self.processes = FormattedParameter("--np {}", 1) self.display_map = FormattedParameter("--display-map", False) self.map_by = FormattedParameter("--map-by {}", "node") self.export = FormattedParameter("-x {}", None) self.enable_recovery = FormattedParameter("--enable-recovery", True) self.report_uri = FormattedParameter("--report-uri {}", None) self.allow_run_as_root = FormattedParameter("--allow-run-as-root", None) self.mca = FormattedParameter("--mca {}", mca_default) self.pprnode = FormattedParameter("--map-by ppr:{}:node", None) self.tag_output = FormattedParameter("--tag-output", True) self.ompi_server = FormattedParameter("--ompi-server {}", None) self.working_dir = FormattedParameter("-wdir {}", None) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command (--hostfile). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. """ self._hosts = hosts kwargs = {"hostlist": self._hosts, "slots": slots} if path is not None: kwargs["path"] = path self.hostfile.value = write_host_file(**kwargs) def assign_processes(self, processes): """Assign the number of processes per node (-np). Args: processes (int): number of processes per node """ self.processes.value = processes def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ if append and self.export.value is not None: # Convert the current list of environmental variable assignments # into an EnvironmentVariables (dict) object. Then update the # dictionary keys with the specified values or add new key value # pairs to the dictionary. Finally convert the updated dictionary # back to a list for the parameter assignment. original = EnvironmentVariables({ item.split("=")[0]: item.split("=")[1] if "=" in item else None for item in self.export.value}) original.update(env_vars) self.export.value = original.get_list() else: # Overwrite the environmental variable assignment self.export.value = env_vars.get_list() def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ self.export.update_default(env_vars.get_list()) def run(self): """Run the orterun command. Raises: CommandFailure: if there is an error running the command """ if not load_mpi("openmpi"): raise CommandFailure("Failed to load openmpi") return super().run() class Mpirun(JobManager): """A class for the mpirun job manager command.""" def __init__(self, job, subprocess=False, mpitype="openmpi"): """Create a Mpirun object. Args: job (ExecutableCommand): command object to manage. subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ if not load_mpi(mpitype): raise CommandFailure("Failed to load {}".format(mpitype)) path = os.path.dirname(find_executable("mpirun")) super().__init__("/run/mpirun", "mpirun", job, path, subprocess) mca_default = None if mpitype == "openmpi": # Default mca values to avoid queue pair errors w/ OpenMPI mca_default = { "btl_openib_warn_default_gid_prefix": "0", "btl": "tcp,self", "oob": "tcp", "pml": "ob1", "btl_tcp_if_include": "eth0", } self.hostfile = FormattedParameter("-hostfile {}", None) self.processes = FormattedParameter("-np {}", 1) self.ppn = FormattedParameter("-ppn {}", None) self.envlist = FormattedParameter("-envlist {}", None) self.mca = FormattedParameter("--mca {}", mca_default) self.working_dir = FormattedParameter("-wdir {}", None) self.mpitype = mpitype def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command (-f). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. """ self._hosts = hosts kwargs = {"hostlist": self._hosts, "slots": slots} if path is not None: kwargs["path"] = path self.hostfile.value = write_host_file(**kwargs) def assign_processes(self, processes): """Assign the number of processes per node (-np). Args: processes (int): number of processes per node """ self.processes.value = processes def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ # Pass the environment variables via the process.run method env argument if append and self.env is not None: # Update the existing dictionary with the new values self.env.update(env_vars) else: # Overwrite/create the dictionary of environment variables self.env = EnvironmentVariables(env_vars) def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ self.envlist.update_default(env_vars.get_list()) def run(self): """Run the mpirun command. Raises: CommandFailure: if there is an error running the command """ if not load_mpi(self.mpitype): raise CommandFailure("Failed to load {}".format(self.mpitype)) return super().run() class Srun(JobManager): """A class for the srun job manager command.""" def __init__(self, job, path="", subprocess=False): """Create a Srun object. Args: job (ExecutableCommand): command object to manage. path (str, optional): path to location of command binary file. Defaults to "". subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. """ super().__init__("/run/srun", "srun", job, path, subprocess) self.label = FormattedParameter("--label", True) self.mpi = FormattedParameter("--mpi={}", "pmi2") self.export = FormattedParameter("--export={}", "ALL") self.ntasks = FormattedParameter("--ntasks={}", None) self.distribution = FormattedParameter("--distribution={}", None) self.nodefile = FormattedParameter("--nodefile={}", None) self.nodelist = FormattedParameter("--nodelist={}", None) self.ntasks_per_node = FormattedParameter("--ntasks-per-node={}", None) self.nodes = FormattedParameter("--nodes={}", None) self.reservation = FormattedParameter("--reservation={}", None) self.partition = FormattedParameter("--partition={}", None) self.output = FormattedParameter("--output={}", None) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command (-f). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. """ self._hosts = hosts kwargs = {"hostlist": self._hosts, "slots": None} if path is not None: kwargs["path"] = path self.nodefile.value = write_host_file(**kwargs) self.ntasks_per_node.value = slots def assign_processes(self, processes): """Assign the number of processes per node (--ntasks). Args: processes (int): number of processes per node """ self.ntasks.value = processes self.distribution.value = "cyclic" def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ if append and self.export.value is not None: # Convert the current list of environmental variable assignments # into an EnvironmentVariables (dict) object. Then update the # dictionary keys with the specified values or add new key value # pairs to the dictionary. Finally convert the updated dictionary # back to a string for the parameter assignment. original = EnvironmentVariables({ item.split("=")[0]: item.split("=")[1] if "=" in item else None for item in self.export.value.split(",")}) original.update(env_vars) self.export.value = ",".join(original.get_list()) else: # Overwrite the environmental variable assignment self.export.value = ",".join(env_vars.get_list()) def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ self.export.update_default(env_vars.get_list()) class Systemctl(JobManager): # pylint: disable=too-many-public-methods,too-many-public-methods """A class for the systemctl job manager command.""" def __init__(self, job): """Create a Orterun object. Args: job (SubProcessCommand): command object to manage. """ # path = os.path.dirname(find_executable("systemctl")) super().__init__("/run/systemctl/*", "systemd", job) self.job = job self._systemctl = SystemctlCommand() self._systemctl.service.value = self.job.service_name self.timestamps = { "enable": None, "disable": None, "start": None, "running": None, "verified": None, "stop": None, "restart": None, } @property def hosts(self): """Get the list of hosts associated with this command.""" return list(self._hosts) if self._hosts else None def __str__(self): """Return the command with all of its defined parameters as a string. Returns: str: the command with all the defined parameters """ return self._systemctl.__str__() def run(self): """Start the job's service via the systemctl command. Enable the service, start the service, and report the status of the service. If an error occurs with any of these commands also display the journalctl output for the service. Raises: CommandFailure: if unable to enable or start the service Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ # Start the daos_server.service self.service_enable() result = self.service_start() # result = self.service_status() # Determine if the command has launched correctly using its # check_subprocess_status() method. if not self.check_subprocess_status(None): msg = "Command '{}' did not launch correctly".format(self) self.log.error(msg) raise CommandFailure(msg) return result def stop(self): """Stop the job's service via the systemctl command. Stop the service, disable the service, and report the status of the service. If an error occurs with any of these commands also display the journalctl output for the service. Raises: CommandFailure: if unable to stop or disable the service Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ self.service_stop() return self.service_disable() def wait(self): """Wait for the sub process to complete.""" raise NotImplementedError() def kill(self): """Forcibly terminate any job processes running on hosts.""" try: self.stop() except CommandFailure as error: self.log.info( "Error stopping/disabling %s: %s", self.job.service_name, error) super(Systemctl, self).kill() def check_subprocess_status(self, sub_process): """Verify command status when called in a subprocess. Args: sub_process (process.SubProcess): subprocess used to run the command Returns: bool: whether or not the command progress has been detected """ return self.check_logs( self.job.pattern, self.timestamps["start"], None, self.job.pattern_count, self.job.pattern_timeout.value) def assign_hosts(self, hosts, path=None, slots=None): """Assign the hosts to use with the command. Set the appropriate command line parameter with the specified value. Args: hosts (list): list of hosts to specify on the command line path (str, optional): path to use when specifying the hosts through a hostfile. Defaults to None. Not used. slots (int, optional): number of slots per host to specify in the optional hostfile. Defaults to None. Not used. """ self._hosts = NodeSet.fromlist(hosts) def assign_environment(self, env_vars, append=False): """Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables """ def assign_environment_default(self, env_vars): """Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default """ def get_subprocess_state(self, message=None): """Display the state of the subprocess. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: list: a list of states for the process found. Any active remote processes will be indicated by a 'R' state at the end of the list. """ state = None remote_state = self._get_remote_process_state(message) if remote_state: state = [remote_state] return state def _run_unit_command(self, command): """Run the systemctl command. Args: command (str): systemctl unit command Raises: CommandFailure: if there is an issue running the command Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ self._systemctl.unit_command.value = command self.timestamps[command] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") result = pcmd(self._hosts, self.__str__(), self.verbose, self.timeout) if 255 in result: raise CommandFailure( "Timeout detected running '{}' with a {}s timeout on {}".format( self.__str__(), self.timeout, NodeSet.fromlist(result[255])) ) if 0 not in result or len(result) > 1: failed = [] for item, value in list(result.items()): if item != 0: failed.extend(value) raise CommandFailure("Error occurred running '{}' on {}".format( self.__str__(), NodeSet.fromlist(failed))) return result def _report_unit_command(self, command): """Run the systemctl command and report the log data on an error. Args: command (str): systemctl unit command Raises: CommandFailure: if there is an issue running the command Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ try: return self._run_unit_command(command) except CommandFailure as error: self.log.info(error) self.display_log_data( self.get_log_data(self._hosts, self.timestamps[command])) raise CommandFailure(error) from error def service_enable(self): """Enable the job's service via the systemctl command. Raises: CommandFailure: if unable to enable Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("enable") def service_disable(self): """Disable the job's service via the systemctl command. Raises: CommandFailure: if unable to disable Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("disable") def service_start(self): """Start the job's service via the systemctl command. Raises: CommandFailure: if unable to start Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("start") def service_stop(self): """Stop the job's service via the systemctl command. Raises: CommandFailure: if unable to stop Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("stop") def service_status(self): """Get the status of the job's service via the systemctl command. Raises: CommandFailure: if unable to get the status Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. """ return self._report_unit_command("status") def service_running(self): """Determine if the job's service is active via the systemctl command. The 'systemctl is-active <service>' command will return a string indicating one of the following states: active, inactive, activating, deactivating, failed, unknown If the <service> is "active" or "activating" return True. Returns: bool: True id the service is running, False otherwise """ status = True states = {} valid_states = ["active", "activating"] self._systemctl.unit_command.value = "is-active" results = run_pcmd( self._hosts, self.__str__(), False, self.timeout, None) for result in results: if result["interrupted"]: states["timeout"] = result["hosts"] status = False else: output = result["stdout"][-1] if output not in states: states[output] = NodeSet() states[output].add(result["hosts"]) status &= output in valid_states data = ["=".join([key, str(states[key])]) for key in sorted(states)] self.log.info( " Detected %s states: %s", self._systemctl.service.value, ", ".join(data)) return status def get_log_data(self, hosts, since, until=None, timeout=60): """Gather log output for the command running on each host. Note (from journalctl man page): Date specifications should be of the format "2012-10-30 18:17:16". If the time part is omitted, "00:00:00" is assumed. If only the seconds component is omitted, ":00" is assumed. If the date component is omitted, the current day is assumed. Alternatively the strings "yesterday", "today", "tomorrow" are understood, which refer to 00:00:00 of the day before the current day, the current day, or the day after the current day, respectively. "now" refers to the current time. Finally, relative times may be specified, prefixed with "-" or "+", referring to times before or after the current time, respectively. Args: hosts (list): list of hosts from which to gather log data. since (str): show log entries from this date. until (str, optional): show log entries up to this date. Defaults to None, in which case it is not utilized. timeout (int, optional): timeout for issuing the command. Defaults to 60 seconds. Returns: list: a list of dictionaries including: "hosts": <NodeSet() of hosts with this data> "data": <journalctl output> """ # Setup the journalctl command to capture all unit activity from the # specified start date to now or a specified end date # --output=json? command = [ "sudo", "journalctl", "--unit={}".format(self._systemctl.service.value), "--since=\"{}\"".format(since), ] if until: command.append("--until=\"{}\"".format(until)) self.log.info( "Gathering log data on %s: %s", str(hosts), " ".join(command)) # Gather the log information per host results = run_pcmd(hosts, " ".join(command), False, timeout, None) # Determine if the command completed successfully without a timeout status = True for result in results: if result["interrupted"]: self.log.info(" Errors detected running \"%s\":", command) self.log.info( " %s: timeout detected after %s seconds", str(result["hosts"]), timeout) status = False elif result["exit_status"] != 0: self.log.info(" Errors detected running \"%s\":", command) status = False if not status: break # Display/return the command output log_data = [] for result in results: if result["exit_status"] == 0 and not result["interrupted"]: # Add the successful output from each node to the dictionary log_data.append( {"hosts": result["hosts"], "data": result["stdout"]}) else: # Display all of the results in the case of an error if len(result["stdout"]) > 1: self.log.info( " %s: rc=%s, output:", str(result["hosts"]), result["exit_status"]) for line in result["stdout"]: self.log.info(" %s", line) else: self.log.info( " %s: rc=%s, output: %s", str(result["hosts"]), result["exit_status"], result["stdout"][0]) # Report any errors through an exception if not status: raise CommandFailure( "Error(s) detected gathering {} log data on {}".format( self._systemctl.service.value, NodeSet.fromlist(hosts))) # Return the successful command output per set of hosts return log_data def display_log_data(self, log_data): """Display the journalctl log data. Args: log_data (dict): dictionary of journalctl log output. """ self.log.info("Journalctl output:") for line in self.str_log_data(log_data).split("\n"): self.log.info(line) @staticmethod def str_log_data(log_data): """Get the journalctl log data as a string. Args: log_data (dict): dictionary of journalctl log output. Returns: str: the journalctl log data """ data = [] for entry in log_data: data.append(" {}:".format(entry["hosts"])) for line in entry["data"]: data.append(" {}".format(line)) return "\n".join(data) def check_logs(self, pattern, since, until, quantity=1, timeout=60): """Check the command logs on each host for a specified string. Args: pattern (str): regular expression to search for in the logs since (str): search log entries from this date. until (str, optional): search log entries up to this date. Defaults to None, in which case it is not utilized. quantity (int, optional): number of times to expect the search pattern per host. Defaults to 1. timeout (int, optional): maximum number of seconds to wait to detect the specified pattern. Defaults to 60. Returns: bool: whether or not the search string was found in the logs on each host """ self.log.info( "Searching for '%s' in '%s' output on %s", pattern, self._systemctl, self._hosts) log_data = None detected = 0 complete = False timed_out = False start = time.time() # Search for patterns in the subprocess output until: # - the expected number of pattern matches are detected (success) # - the time out is reached (failure) # - the service is no longer running (failure) while not complete and not timed_out and self.service_running(): detected = 0 log_data = self.get_log_data(self._hosts, since, until, timeout) for entry in log_data: match = re.findall(pattern, "\n".join(entry["data"])) detected += len(match) if match else 0 complete = detected == quantity timed_out = time.time() - start > timeout if complete: self.timestamps["running"] = datetime.now().strftime( "%Y-%m-%d %H:%M:%S") # Summarize results msg = "{}/{} '{}' messages detected in".format( detected, quantity, pattern) runtime = "{}/{} seconds".format(time.time() - start, timeout) if not complete: # Report the error / timeout reason = "ERROR detected" details = "" if timed_out: reason = "TIMEOUT detected, exceeded {} seconds".format(timeout) runtime = "{} seconds".format(time.time() - start) if log_data: details = ":\n{}".format(self.str_log_data(log_data)) self.log.info("%s - %s %s%s", reason, msg, runtime, details) if timed_out: self.log.debug( "If needed the %s second timeout can be adjusted via " "the 'pattern_timeout' test yaml parameter under %s", timeout, self.namespace) else: # Report the successful start # self.display_log_data(log_data) self.log.info( "%s subprocess startup detected - %s %s", self._command, msg, runtime) return complete def dump_logs(self, hosts=None): """Display the journalctl log data since detecting server start. Args: hosts (list, optional): list of hosts from which to display the journalctl log data. Defaults to None which will log the journalctl log data from all of the hosts. """ timestamp = None if self.timestamps["running"]: timestamp = self.timestamps["running"] elif self.timestamps["verified"]: timestamp = self.timestamps["verified"] if timestamp: if hosts is None: hosts = self._hosts self.display_log_data(self.get_log_data(hosts, timestamp))
en
0.763111
#!/usr/bin/python (C) Copyright 2020-2021 Intel Corporation. SPDX-License-Identifier: BSD-2-Clause-Patent # pylint: disable=too-many-lines A class for commands with parameters that manage other commands. Create a JobManager object. Args: namespace (str): yaml namespace (path to parameters) command (str): string of the command to be executed. job (ExecutableCommand): command object to manage. path (str, optional): path to location of command binary file. Defaults to "". subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. Get the list of hosts associated with this command. Return the command with all of its defined parameters as a string. Returns: str: the command with all the defined parameters Verify command status when called in a subprocess. Args: sub_process (process.SubProcess): subprocess used to run the command Returns: bool: whether or not the command progress has been detected Assign the hosts to use with the command. Set the appropriate command line parameter with the specified value. Args: hosts (list): list of hosts to specify on the command line path (str, optional): path to use when specifying the hosts through a hostfile. Defaults to None. slots (int, optional): number of slots per host to specify in the optional hostfile. Defaults to None. Assign the number of processes per node. Set the appropriate command line parameter with the specified value. Args: processes (int): number of processes per node Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default Display the state of the subprocess. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: list: a list of states for the process found. If the local job manager command is running its state will be the first in the list. Additional states in the list can typically indicate that remote processes were also found to be active. Active remote processes will be indicated by a 'R' state at the end of the list. # Get/display the state of the local job manager process # Determine if the status of the remote job processes on each host # Add a running state to the list of process states if any # remote process was found to be active. Display the state of the processes running on remote hosts. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: str: a "R" if any remote processes are found to be active otherwise None. # Display the status of the remote job processes on each host # The pcmd method will return a dictionary with a single key, e.g. # {1: <NodeSet>}, if there are no remote processes running on any of the # hosts. If this value is not returned, indicate there are remote # processes running by returning a "R" state. Forcibly terminate any job processes running on hosts. A class for the orterun job manager command. Create a Orterun object. Args: job (ExecutableCommand): command object to manage. subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. # Default mca values to avoid queue pair errors Assign the hosts to use with the command (--hostfile). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. Assign the number of processes per node (-np). Args: processes (int): number of processes per node Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables # Convert the current list of environmental variable assignments # into an EnvironmentVariables (dict) object. Then update the # dictionary keys with the specified values or add new key value # pairs to the dictionary. Finally convert the updated dictionary # back to a list for the parameter assignment. # Overwrite the environmental variable assignment Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default Run the orterun command. Raises: CommandFailure: if there is an error running the command A class for the mpirun job manager command. Create a Mpirun object. Args: job (ExecutableCommand): command object to manage. subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. # Default mca values to avoid queue pair errors w/ OpenMPI Assign the hosts to use with the command (-f). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. Assign the number of processes per node (-np). Args: processes (int): number of processes per node Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables # Pass the environment variables via the process.run method env argument # Update the existing dictionary with the new values # Overwrite/create the dictionary of environment variables Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default Run the mpirun command. Raises: CommandFailure: if there is an error running the command A class for the srun job manager command. Create a Srun object. Args: job (ExecutableCommand): command object to manage. path (str, optional): path to location of command binary file. Defaults to "". subprocess (bool, optional): whether the command is run as a subprocess. Defaults to False. Assign the hosts to use with the command (-f). Args: hosts (list): list of hosts to specify in the hostfile path (str, optional): hostfile path. Defaults to None. slots (int, optional): number of slots per host to specify in the hostfile. Defaults to None. Assign the number of processes per node (--ntasks). Args: processes (int): number of processes per node Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables # Convert the current list of environmental variable assignments # into an EnvironmentVariables (dict) object. Then update the # dictionary keys with the specified values or add new key value # pairs to the dictionary. Finally convert the updated dictionary # back to a string for the parameter assignment. # Overwrite the environmental variable assignment Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default # pylint: disable=too-many-public-methods,too-many-public-methods A class for the systemctl job manager command. Create a Orterun object. Args: job (SubProcessCommand): command object to manage. # path = os.path.dirname(find_executable("systemctl")) Get the list of hosts associated with this command. Return the command with all of its defined parameters as a string. Returns: str: the command with all the defined parameters Start the job's service via the systemctl command. Enable the service, start the service, and report the status of the service. If an error occurs with any of these commands also display the journalctl output for the service. Raises: CommandFailure: if unable to enable or start the service Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. # Start the daos_server.service # result = self.service_status() # Determine if the command has launched correctly using its # check_subprocess_status() method. Stop the job's service via the systemctl command. Stop the service, disable the service, and report the status of the service. If an error occurs with any of these commands also display the journalctl output for the service. Raises: CommandFailure: if unable to stop or disable the service Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Wait for the sub process to complete. Forcibly terminate any job processes running on hosts. Verify command status when called in a subprocess. Args: sub_process (process.SubProcess): subprocess used to run the command Returns: bool: whether or not the command progress has been detected Assign the hosts to use with the command. Set the appropriate command line parameter with the specified value. Args: hosts (list): list of hosts to specify on the command line path (str, optional): path to use when specifying the hosts through a hostfile. Defaults to None. Not used. slots (int, optional): number of slots per host to specify in the optional hostfile. Defaults to None. Not used. Assign or add environment variables to the command. Args: env_vars (EnvironmentVariables): the environment variables to use assign or add to the command append (bool): whether to assign (False) or append (True) the specified environment variables Assign the default environment variables for the command. Args: env_vars (EnvironmentVariables): the environment variables to assign as the default Display the state of the subprocess. Args: message (str, optional): additional text to include in output. Defaults to None. Returns: list: a list of states for the process found. Any active remote processes will be indicated by a 'R' state at the end of the list. Run the systemctl command. Args: command (str): systemctl unit command Raises: CommandFailure: if there is an issue running the command Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Run the systemctl command and report the log data on an error. Args: command (str): systemctl unit command Raises: CommandFailure: if there is an issue running the command Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Enable the job's service via the systemctl command. Raises: CommandFailure: if unable to enable Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Disable the job's service via the systemctl command. Raises: CommandFailure: if unable to disable Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Start the job's service via the systemctl command. Raises: CommandFailure: if unable to start Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Stop the job's service via the systemctl command. Raises: CommandFailure: if unable to stop Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Get the status of the job's service via the systemctl command. Raises: CommandFailure: if unable to get the status Returns: dict: a dictionary of return codes keys and accompanying NodeSet values indicating which hosts yielded the return code. Determine if the job's service is active via the systemctl command. The 'systemctl is-active <service>' command will return a string indicating one of the following states: active, inactive, activating, deactivating, failed, unknown If the <service> is "active" or "activating" return True. Returns: bool: True id the service is running, False otherwise Gather log output for the command running on each host. Note (from journalctl man page): Date specifications should be of the format "2012-10-30 18:17:16". If the time part is omitted, "00:00:00" is assumed. If only the seconds component is omitted, ":00" is assumed. If the date component is omitted, the current day is assumed. Alternatively the strings "yesterday", "today", "tomorrow" are understood, which refer to 00:00:00 of the day before the current day, the current day, or the day after the current day, respectively. "now" refers to the current time. Finally, relative times may be specified, prefixed with "-" or "+", referring to times before or after the current time, respectively. Args: hosts (list): list of hosts from which to gather log data. since (str): show log entries from this date. until (str, optional): show log entries up to this date. Defaults to None, in which case it is not utilized. timeout (int, optional): timeout for issuing the command. Defaults to 60 seconds. Returns: list: a list of dictionaries including: "hosts": <NodeSet() of hosts with this data> "data": <journalctl output> # Setup the journalctl command to capture all unit activity from the # specified start date to now or a specified end date # --output=json? # Gather the log information per host # Determine if the command completed successfully without a timeout # Display/return the command output # Add the successful output from each node to the dictionary # Display all of the results in the case of an error # Report any errors through an exception # Return the successful command output per set of hosts Display the journalctl log data. Args: log_data (dict): dictionary of journalctl log output. Get the journalctl log data as a string. Args: log_data (dict): dictionary of journalctl log output. Returns: str: the journalctl log data Check the command logs on each host for a specified string. Args: pattern (str): regular expression to search for in the logs since (str): search log entries from this date. until (str, optional): search log entries up to this date. Defaults to None, in which case it is not utilized. quantity (int, optional): number of times to expect the search pattern per host. Defaults to 1. timeout (int, optional): maximum number of seconds to wait to detect the specified pattern. Defaults to 60. Returns: bool: whether or not the search string was found in the logs on each host # Search for patterns in the subprocess output until: # - the expected number of pattern matches are detected (success) # - the time out is reached (failure) # - the service is no longer running (failure) # Summarize results # Report the error / timeout # Report the successful start # self.display_log_data(log_data) Display the journalctl log data since detecting server start. Args: hosts (list, optional): list of hosts from which to display the journalctl log data. Defaults to None which will log the journalctl log data from all of the hosts.
2.306018
2
lale/schemas.py
ariffyasri/lale
1
6624623
# Copyright 2019 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import cast, TypeVar, Any, Dict, List, Tuple, Optional, Union class Undefined(): pass undefined = Undefined() T = TypeVar('T') Option = Union[Undefined, T] class Schema: schema: Dict[str, Any] def __init__(self, desc: Option[str] = undefined, default: Option[Any] = undefined, forOptimizer: bool = True): self.schema: Dict[str, Any] = {} if not isinstance(default, Undefined): self.schema['default'] = default if not isinstance(desc, Undefined): self.schema['description'] = desc if not forOptimizer: self.schema['forOptimizer'] = forOptimizer def set(self, prop: str, value: Option[Any]): if not isinstance(value, Undefined): self.schema[prop] = value #Base Type class Bool(Schema): def __init__(self, desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True): super().__init__(desc, default, forOptimizer) self.set('type', 'boolean') class Enum(Schema): def __init__(self, values: List[str] = [], desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True): super().__init__(desc, default, forOptimizer) self.set('enum', values) class Float(Schema): def __init__(self, desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True, min: Option[float] = undefined, exclusiveMin: Option[bool] = undefined, minForOptimizer: Option[bool] = undefined, max: Option[float] = undefined, exclusiveMax: Option[bool] = undefined, maxForOptimizer: Option[bool] = undefined, distribution: Option[str] = undefined): super().__init__(desc, default, forOptimizer) self.set('type', 'number') self.set('minimum', min) self.set('exclusiveMinimum', exclusiveMin) self.set('minimumForOptimizer', minForOptimizer) self.set('maximum', max) self.set('exclusiveMaximum', exclusiveMax) self.set('maximumForOptimizer', maxForOptimizer) self.set('distribution', distribution) class Int(Schema): def __init__(self, desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True, min: Option[int] = undefined, exclusiveMin: Option[bool] = undefined, max: Option[int] = undefined, exclusiveMax: Option[bool] = undefined, distribution: Option[str] = undefined): super().__init__(desc, default, forOptimizer) self.set('type', 'integer') self.set('minimum', min) self.set('exclusiveMinimum', exclusiveMin) self.set('maximum', max) self.set('exclusiveMaximum', exclusiveMax) self.set('distribution', distribution) class Null(Schema): def __init__(self, desc: Option[str] = undefined, forOptimizer: bool = True): super().__init__(desc=desc, forOptimizer=forOptimizer) self.set('enum', [None]) class Not(Schema): def __init__(self, body: Schema): super().__init__() self.schema = {'not': body.schema} class JSON(Schema): def __init__(self, body: Dict[str, Any]): super().__init__() self.schema = body # Combinator class AnyOf(Schema): def __init__(self, types: List[Schema] = [], desc: Option[str] = undefined, default: Option[Any] = undefined): super().__init__(desc, default) self.set('anyOf', [t.schema for t in types]) class AllOf(Schema): def __init__(self, types: List[Schema] = [], desc: Option[str] = undefined, default: Option[Any] = undefined): super().__init__(desc, default) self.set('allOf', [t.schema for t in types]) class Array(Schema): def __init__(self, items: Schema, desc: Option[str] = undefined, default: Option[List[Any]] = undefined, forOptimizer: bool = True, minItems: Option[int] = undefined, minItemsForOptimizer: Option[int] = undefined, maxItems: Option[int] = undefined, maxItemsForOptimizer: Option[int] = undefined, laleType: Option[str] = undefined,): super().__init__(desc, default, forOptimizer) self.set('type', 'array') self.set('items', items.schema) self.set('minItems', minItems) self.set('minItemsForOptimizer', minItemsForOptimizer) self.set('maxItems', maxItems) self.set('maxItemsForOptimizer', maxItemsForOptimizer) self.set('laleType', laleType) class Object(Schema): def __init__(self, default: Option[Any] = undefined, desc: Option[str] = undefined, forOptimizer: bool = True, required: Option[List[str]] = undefined, additionalProperties: Option[bool] = undefined, **kwargs: Schema): super().__init__(desc, default, forOptimizer) self.set('type', 'object') self.set('required', required) self.set('additionalProperties', additionalProperties) self.set('properties', {k: p.schema for (k, p) in kwargs.items()})
# Copyright 2019 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import cast, TypeVar, Any, Dict, List, Tuple, Optional, Union class Undefined(): pass undefined = Undefined() T = TypeVar('T') Option = Union[Undefined, T] class Schema: schema: Dict[str, Any] def __init__(self, desc: Option[str] = undefined, default: Option[Any] = undefined, forOptimizer: bool = True): self.schema: Dict[str, Any] = {} if not isinstance(default, Undefined): self.schema['default'] = default if not isinstance(desc, Undefined): self.schema['description'] = desc if not forOptimizer: self.schema['forOptimizer'] = forOptimizer def set(self, prop: str, value: Option[Any]): if not isinstance(value, Undefined): self.schema[prop] = value #Base Type class Bool(Schema): def __init__(self, desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True): super().__init__(desc, default, forOptimizer) self.set('type', 'boolean') class Enum(Schema): def __init__(self, values: List[str] = [], desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True): super().__init__(desc, default, forOptimizer) self.set('enum', values) class Float(Schema): def __init__(self, desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True, min: Option[float] = undefined, exclusiveMin: Option[bool] = undefined, minForOptimizer: Option[bool] = undefined, max: Option[float] = undefined, exclusiveMax: Option[bool] = undefined, maxForOptimizer: Option[bool] = undefined, distribution: Option[str] = undefined): super().__init__(desc, default, forOptimizer) self.set('type', 'number') self.set('minimum', min) self.set('exclusiveMinimum', exclusiveMin) self.set('minimumForOptimizer', minForOptimizer) self.set('maximum', max) self.set('exclusiveMaximum', exclusiveMax) self.set('maximumForOptimizer', maxForOptimizer) self.set('distribution', distribution) class Int(Schema): def __init__(self, desc: Option[str] = undefined, default: Option[str] = undefined, forOptimizer: bool = True, min: Option[int] = undefined, exclusiveMin: Option[bool] = undefined, max: Option[int] = undefined, exclusiveMax: Option[bool] = undefined, distribution: Option[str] = undefined): super().__init__(desc, default, forOptimizer) self.set('type', 'integer') self.set('minimum', min) self.set('exclusiveMinimum', exclusiveMin) self.set('maximum', max) self.set('exclusiveMaximum', exclusiveMax) self.set('distribution', distribution) class Null(Schema): def __init__(self, desc: Option[str] = undefined, forOptimizer: bool = True): super().__init__(desc=desc, forOptimizer=forOptimizer) self.set('enum', [None]) class Not(Schema): def __init__(self, body: Schema): super().__init__() self.schema = {'not': body.schema} class JSON(Schema): def __init__(self, body: Dict[str, Any]): super().__init__() self.schema = body # Combinator class AnyOf(Schema): def __init__(self, types: List[Schema] = [], desc: Option[str] = undefined, default: Option[Any] = undefined): super().__init__(desc, default) self.set('anyOf', [t.schema for t in types]) class AllOf(Schema): def __init__(self, types: List[Schema] = [], desc: Option[str] = undefined, default: Option[Any] = undefined): super().__init__(desc, default) self.set('allOf', [t.schema for t in types]) class Array(Schema): def __init__(self, items: Schema, desc: Option[str] = undefined, default: Option[List[Any]] = undefined, forOptimizer: bool = True, minItems: Option[int] = undefined, minItemsForOptimizer: Option[int] = undefined, maxItems: Option[int] = undefined, maxItemsForOptimizer: Option[int] = undefined, laleType: Option[str] = undefined,): super().__init__(desc, default, forOptimizer) self.set('type', 'array') self.set('items', items.schema) self.set('minItems', minItems) self.set('minItemsForOptimizer', minItemsForOptimizer) self.set('maxItems', maxItems) self.set('maxItemsForOptimizer', maxItemsForOptimizer) self.set('laleType', laleType) class Object(Schema): def __init__(self, default: Option[Any] = undefined, desc: Option[str] = undefined, forOptimizer: bool = True, required: Option[List[str]] = undefined, additionalProperties: Option[bool] = undefined, **kwargs: Schema): super().__init__(desc, default, forOptimizer) self.set('type', 'object') self.set('required', required) self.set('additionalProperties', additionalProperties) self.set('properties', {k: p.schema for (k, p) in kwargs.items()})
en
0.8428
# Copyright 2019 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #Base Type # Combinator
2.252419
2
tests/test_reproducibility.py
AnotherSamWilson/miceForest
3
6624624
from sklearn.datasets import load_boston import pandas as pd import numpy as np import miceforest as mf from datetime import datetime from miceforest.mean_matching_functions import mean_match_kdtree_classification from matplotlib.pyplot import close # Make random state and load data # Define data random_state = np.random.RandomState(5) boston = pd.DataFrame(load_boston(return_X_y=True)[0]) rows = boston.shape[0] boston.columns = [str(i) for i in boston.columns] boston["3"] = boston["3"].map({0: 'a', 1: 'b'}).astype('category') boston["8"] = boston["8"].astype("category") boston_amp = mf.ampute_data(boston, perc=0.25, random_state=random_state) random_seed_array = np.random.choice( range(1000), size=rows, replace=False ).astype("int32") def test_pandas_reproducibility(): datasets = 2 kernel = mf.ImputationKernel( data=boston_amp, datasets=datasets, initialization="random", save_models=2, random_state=2 ) kernel2 = mf.ImputationKernel( data=boston_amp, datasets=datasets, initialization="random", save_models=2, random_state=2 ) assert kernel.complete_data(0).equals(kernel2.complete_data(0)), ( "random_state initialization failed to be deterministic" ) # Run mice for 2 iterations kernel.mice(2) kernel2.mice(2) assert kernel.complete_data(0).equals(kernel2.complete_data(0)), ( "random_state after mice() failed to be deterministic" ) kernel_imputed_as_new = kernel.impute_new_data( boston_amp, random_state=4, random_seed_array=random_seed_array ) # Generate and impute new data as a reordering of original new_order = np.arange(rows) random_state.shuffle(new_order) new_data = boston_amp.loc[new_order] new_seeds = random_seed_array[new_order] new_imputed = kernel.impute_new_data( new_data, random_state=4, random_seed_array=new_seeds ) # Expect deterministic imputations at the record level, since seeds were passed. for i in range(datasets): reordered_kernel_completed = kernel_imputed_as_new.complete_data(dataset=0).loc[new_order] new_data_completed = new_imputed.complete_data(dataset=0) assert (reordered_kernel_completed == new_data_completed).all().all(), ( "Seeds did not cause deterministic imputations when data was reordered." ) # Generate and impute new data as a subset of original new_ind = [0,1,4,7,8,10] new_data = boston_amp.loc[new_ind] new_seeds = random_seed_array[new_ind] new_imputed = kernel.impute_new_data( new_data, random_state=4, random_seed_array=new_seeds ) # Expect deterministic imputations at the record level, since seeds were passed. for i in range(datasets): reordered_kernel_completed = kernel_imputed_as_new.complete_data(dataset=0).loc[new_ind] new_data_completed = new_imputed.complete_data(dataset=0) assert (reordered_kernel_completed == new_data_completed).all().all(), ( "Seeds did not cause deterministic imputations when data was reordered." ) # Generate and impute new data as a reordering of original new_order = np.arange(rows) random_state.shuffle(new_order) new_data = boston_amp.loc[new_order] new_imputed = kernel.impute_new_data( new_data, random_state=4, random_seed_array=random_seed_array ) # Expect deterministic imputations at the record level, since seeds were passed. for i in range(datasets): reordered_kernel_completed = kernel_imputed_as_new.complete_data(dataset=0).loc[new_order] new_data_completed = new_imputed.complete_data(dataset=0) assert not (reordered_kernel_completed == new_data_completed).all().all(), ( "Different seeds caused deterministic imputations for all rows / columns." )
from sklearn.datasets import load_boston import pandas as pd import numpy as np import miceforest as mf from datetime import datetime from miceforest.mean_matching_functions import mean_match_kdtree_classification from matplotlib.pyplot import close # Make random state and load data # Define data random_state = np.random.RandomState(5) boston = pd.DataFrame(load_boston(return_X_y=True)[0]) rows = boston.shape[0] boston.columns = [str(i) for i in boston.columns] boston["3"] = boston["3"].map({0: 'a', 1: 'b'}).astype('category') boston["8"] = boston["8"].astype("category") boston_amp = mf.ampute_data(boston, perc=0.25, random_state=random_state) random_seed_array = np.random.choice( range(1000), size=rows, replace=False ).astype("int32") def test_pandas_reproducibility(): datasets = 2 kernel = mf.ImputationKernel( data=boston_amp, datasets=datasets, initialization="random", save_models=2, random_state=2 ) kernel2 = mf.ImputationKernel( data=boston_amp, datasets=datasets, initialization="random", save_models=2, random_state=2 ) assert kernel.complete_data(0).equals(kernel2.complete_data(0)), ( "random_state initialization failed to be deterministic" ) # Run mice for 2 iterations kernel.mice(2) kernel2.mice(2) assert kernel.complete_data(0).equals(kernel2.complete_data(0)), ( "random_state after mice() failed to be deterministic" ) kernel_imputed_as_new = kernel.impute_new_data( boston_amp, random_state=4, random_seed_array=random_seed_array ) # Generate and impute new data as a reordering of original new_order = np.arange(rows) random_state.shuffle(new_order) new_data = boston_amp.loc[new_order] new_seeds = random_seed_array[new_order] new_imputed = kernel.impute_new_data( new_data, random_state=4, random_seed_array=new_seeds ) # Expect deterministic imputations at the record level, since seeds were passed. for i in range(datasets): reordered_kernel_completed = kernel_imputed_as_new.complete_data(dataset=0).loc[new_order] new_data_completed = new_imputed.complete_data(dataset=0) assert (reordered_kernel_completed == new_data_completed).all().all(), ( "Seeds did not cause deterministic imputations when data was reordered." ) # Generate and impute new data as a subset of original new_ind = [0,1,4,7,8,10] new_data = boston_amp.loc[new_ind] new_seeds = random_seed_array[new_ind] new_imputed = kernel.impute_new_data( new_data, random_state=4, random_seed_array=new_seeds ) # Expect deterministic imputations at the record level, since seeds were passed. for i in range(datasets): reordered_kernel_completed = kernel_imputed_as_new.complete_data(dataset=0).loc[new_ind] new_data_completed = new_imputed.complete_data(dataset=0) assert (reordered_kernel_completed == new_data_completed).all().all(), ( "Seeds did not cause deterministic imputations when data was reordered." ) # Generate and impute new data as a reordering of original new_order = np.arange(rows) random_state.shuffle(new_order) new_data = boston_amp.loc[new_order] new_imputed = kernel.impute_new_data( new_data, random_state=4, random_seed_array=random_seed_array ) # Expect deterministic imputations at the record level, since seeds were passed. for i in range(datasets): reordered_kernel_completed = kernel_imputed_as_new.complete_data(dataset=0).loc[new_order] new_data_completed = new_imputed.complete_data(dataset=0) assert not (reordered_kernel_completed == new_data_completed).all().all(), ( "Different seeds caused deterministic imputations for all rows / columns." )
en
0.960583
# Make random state and load data # Define data # Run mice for 2 iterations # Generate and impute new data as a reordering of original # Expect deterministic imputations at the record level, since seeds were passed. # Generate and impute new data as a subset of original # Expect deterministic imputations at the record level, since seeds were passed. # Generate and impute new data as a reordering of original # Expect deterministic imputations at the record level, since seeds were passed.
2.735638
3
scilla-benchmarks/evm-benchmark/benchmark_plans.py
AmritKumar/scilla-benchmarks
1
6624625
import sys import random import utils from utils import ContractFunction, get_addresses, get_random_address,\ get_random_number, addresses, SENDER_ADDRESS from evm_tools import perform_transaction total_token_supply = 1000000 * 10**16 TRANSACTION_LIMIT = 100 TEST_ITERATIONS = 100 if len(sys.argv) != 4: print('Invalid number of arguments, wanted 3') sys.exit() if sys.argv[1] == 'multi': TRANSACTION_LIMIT = int(sys.argv[2]) TEST_ITERATIONS = int(sys.argv[3]) elif sys.argv[1] == 'single': print('Manually running a single test for EVM is not implemented yet') print('Using {:,} state entries'.format(TRANSACTION_LIMIT)) contracts_benchmark_plans = [ { 'contract_filename': 'fungible-token.sol', 'contract_name': 'ERC20', 'constructor': ( # ('uint256', 'string', 'string'), # (total_token_supply, 'Test', 'TEST'), ('uint256', 'string', 'string', 'address[]'), (total_token_supply, 'Test', 'TEST', addresses[:TRANSACTION_LIMIT]), ), 'transactions': [ # { # 'function': ContractFunction('transfer', ('address', 'uint256', 'address[]')), # 'values': (addr, 1*(10**16), addresses), # 'caller': SENDER_ADDRESS, # } # for addr in addresses[:TRANSACTION_LIMIT] ], 'tests': [ { 'test_name': 'transfer', 'transactions': [ { 'function': ContractFunction( 'transfer', ('address', 'uint256')), 'values': (get_random_address, get_random_number), 'caller': SENDER_ADDRESS } for iteration in range(TEST_ITERATIONS) ] }, # { # 'test_name': 'approve', # 'transactions': [ # { # 'function': ContractFunction( # 'approve', ('address', 'uint256')), # 'caller': SENDER_ADDRESS, # 'values': (get_random_address, get_random_number), # } # for iteration in range(TEST_ITERATIONS) # ] # } ] }, { 'contract_filename': 'non-fungible-token.sol', 'contract_name': 'ERC721', 'constructor': ( ('uint256',), (TRANSACTION_LIMIT,) # ('uint256', 'string', 'string'), # (total_token_supply, 'Test', 'TEST'), ), 'transactions': [ # { # 'function': ContractFunction('safeTransferFrom', # ('address', 'address', 'uint256')), # 'values': (SENDER_ADDRESS, addr, index), # 'caller': SENDER_ADDRESS # } # for index, addr in enumerate(addresses[:TRANSACTION_LIMIT]) ], 'tests': [ { 'test_name': 'setApprovalForAll', 'transactions': [ { 'function': ContractFunction( 'setApprovalForAll', ('address', 'uint256')), 'values': (get_random_address, utils.get_random_token_id), 'caller': SENDER_ADDRESS, } for iteration in range(TEST_ITERATIONS) ] }, # { # 'test_name': 'safeTransferFrom', # 'transactions': [ # { # 'function': ContractFunction( # 'safeTransferFrom', ('address', 'address', 'uint256')), # 'values': (SENDER_ADDRESS, get_random_address, utils.get_random_token_id), # 'caller': SENDER_ADDRESS, # } # for iteration in range(TEST_ITERATIONS) # ] # }, # { # 'test_name': 'approve', # 'transactions': [ # { # 'function': ContractFunction( # 'approve', ('address', 'uint256')), # 'values': (SENDER_ADDRESS, utils.get_random_token_id), # 'caller': SENDER_ADDRESS # } # for iteration in range(TEST_ITERATIONS) # ] # } ] }, { 'contract_filename': 'auction.sol', 'contract_name': 'SimpleAuction', 'constructor': ( ('uint256', 'address', 'address[]'), (1000, SENDER_ADDRESS, addresses[:TRANSACTION_LIMIT]) ), 'transactions': [ # { # 'function': ContractFunction('bid', ()), # 'values': (), # 'amount': 1*index, # 'caller': addr # } # for index, addr in enumerate(addresses[:TRANSACTION_LIMIT]) ], 'tests': [ { # increment the bid each iteration # so we can do the withdraw function for the losers # there can only be 1 winning bid, so the total number of bids is n+1 'test_name': 'bid', 'transactions': [ { 'function': ContractFunction('bid', ()), 'values': (), 'amount': 1000*index, 'caller': addr, } for index, addr in enumerate(addresses[:TEST_ITERATIONS]) ] }, # { # 'test_name': 'withdraw', # 'transactions': [ # { # 'function': ContractFunction('withdraw', ()), # 'values': (), # 'caller': addr, # } # for index, addr in enumerate(addresses[:TEST_ITERATIONS]) # ] # }, ] }, { 'contract_filename': 'crowdfunding.sol', 'contract_name': 'Crowdfunding', 'constructor': ( ('uint256', 'uint256', 'address[]'), (1, 1000, addresses[:TRANSACTION_LIMIT]), ), 'transactions': [ # { # 'function': ContractFunction('pledge', ('uint256',)), # 'values': (1,), # 'caller': addr, # 'amount': 1 # } # for addr in addresses[:TRANSACTION_LIMIT] ], 'tests': [ { 'test_name': 'pledge', 'transactions': [ { 'function': ContractFunction('pledge', ('uint256',)), 'values': (100,), 'caller': addr, 'amount': 1 } for addr in addresses[:TEST_ITERATIONS] ] }, # { # 'test_name': 'claimFunds', # 'transactions': [ # { # 'function': ContractFunction('claimFunds', ()), # 'values': (), # 'caller': SENDER_ADDRESS, # 'amount': 0 # } # for addr in addresses[:TEST_ITERATIONS] # ] # }, # { # 'test_name': 'getRefund', # 'transactions': [ # { # 'function': ContractFunction('getRefund', ()), # 'values': (), # 'caller': addr, # 'time': 9547698860 # } # for addr in addresses[:TEST_ITERATIONS] # ] # }, ] }, ]
import sys import random import utils from utils import ContractFunction, get_addresses, get_random_address,\ get_random_number, addresses, SENDER_ADDRESS from evm_tools import perform_transaction total_token_supply = 1000000 * 10**16 TRANSACTION_LIMIT = 100 TEST_ITERATIONS = 100 if len(sys.argv) != 4: print('Invalid number of arguments, wanted 3') sys.exit() if sys.argv[1] == 'multi': TRANSACTION_LIMIT = int(sys.argv[2]) TEST_ITERATIONS = int(sys.argv[3]) elif sys.argv[1] == 'single': print('Manually running a single test for EVM is not implemented yet') print('Using {:,} state entries'.format(TRANSACTION_LIMIT)) contracts_benchmark_plans = [ { 'contract_filename': 'fungible-token.sol', 'contract_name': 'ERC20', 'constructor': ( # ('uint256', 'string', 'string'), # (total_token_supply, 'Test', 'TEST'), ('uint256', 'string', 'string', 'address[]'), (total_token_supply, 'Test', 'TEST', addresses[:TRANSACTION_LIMIT]), ), 'transactions': [ # { # 'function': ContractFunction('transfer', ('address', 'uint256', 'address[]')), # 'values': (addr, 1*(10**16), addresses), # 'caller': SENDER_ADDRESS, # } # for addr in addresses[:TRANSACTION_LIMIT] ], 'tests': [ { 'test_name': 'transfer', 'transactions': [ { 'function': ContractFunction( 'transfer', ('address', 'uint256')), 'values': (get_random_address, get_random_number), 'caller': SENDER_ADDRESS } for iteration in range(TEST_ITERATIONS) ] }, # { # 'test_name': 'approve', # 'transactions': [ # { # 'function': ContractFunction( # 'approve', ('address', 'uint256')), # 'caller': SENDER_ADDRESS, # 'values': (get_random_address, get_random_number), # } # for iteration in range(TEST_ITERATIONS) # ] # } ] }, { 'contract_filename': 'non-fungible-token.sol', 'contract_name': 'ERC721', 'constructor': ( ('uint256',), (TRANSACTION_LIMIT,) # ('uint256', 'string', 'string'), # (total_token_supply, 'Test', 'TEST'), ), 'transactions': [ # { # 'function': ContractFunction('safeTransferFrom', # ('address', 'address', 'uint256')), # 'values': (SENDER_ADDRESS, addr, index), # 'caller': SENDER_ADDRESS # } # for index, addr in enumerate(addresses[:TRANSACTION_LIMIT]) ], 'tests': [ { 'test_name': 'setApprovalForAll', 'transactions': [ { 'function': ContractFunction( 'setApprovalForAll', ('address', 'uint256')), 'values': (get_random_address, utils.get_random_token_id), 'caller': SENDER_ADDRESS, } for iteration in range(TEST_ITERATIONS) ] }, # { # 'test_name': 'safeTransferFrom', # 'transactions': [ # { # 'function': ContractFunction( # 'safeTransferFrom', ('address', 'address', 'uint256')), # 'values': (SENDER_ADDRESS, get_random_address, utils.get_random_token_id), # 'caller': SENDER_ADDRESS, # } # for iteration in range(TEST_ITERATIONS) # ] # }, # { # 'test_name': 'approve', # 'transactions': [ # { # 'function': ContractFunction( # 'approve', ('address', 'uint256')), # 'values': (SENDER_ADDRESS, utils.get_random_token_id), # 'caller': SENDER_ADDRESS # } # for iteration in range(TEST_ITERATIONS) # ] # } ] }, { 'contract_filename': 'auction.sol', 'contract_name': 'SimpleAuction', 'constructor': ( ('uint256', 'address', 'address[]'), (1000, SENDER_ADDRESS, addresses[:TRANSACTION_LIMIT]) ), 'transactions': [ # { # 'function': ContractFunction('bid', ()), # 'values': (), # 'amount': 1*index, # 'caller': addr # } # for index, addr in enumerate(addresses[:TRANSACTION_LIMIT]) ], 'tests': [ { # increment the bid each iteration # so we can do the withdraw function for the losers # there can only be 1 winning bid, so the total number of bids is n+1 'test_name': 'bid', 'transactions': [ { 'function': ContractFunction('bid', ()), 'values': (), 'amount': 1000*index, 'caller': addr, } for index, addr in enumerate(addresses[:TEST_ITERATIONS]) ] }, # { # 'test_name': 'withdraw', # 'transactions': [ # { # 'function': ContractFunction('withdraw', ()), # 'values': (), # 'caller': addr, # } # for index, addr in enumerate(addresses[:TEST_ITERATIONS]) # ] # }, ] }, { 'contract_filename': 'crowdfunding.sol', 'contract_name': 'Crowdfunding', 'constructor': ( ('uint256', 'uint256', 'address[]'), (1, 1000, addresses[:TRANSACTION_LIMIT]), ), 'transactions': [ # { # 'function': ContractFunction('pledge', ('uint256',)), # 'values': (1,), # 'caller': addr, # 'amount': 1 # } # for addr in addresses[:TRANSACTION_LIMIT] ], 'tests': [ { 'test_name': 'pledge', 'transactions': [ { 'function': ContractFunction('pledge', ('uint256',)), 'values': (100,), 'caller': addr, 'amount': 1 } for addr in addresses[:TEST_ITERATIONS] ] }, # { # 'test_name': 'claimFunds', # 'transactions': [ # { # 'function': ContractFunction('claimFunds', ()), # 'values': (), # 'caller': SENDER_ADDRESS, # 'amount': 0 # } # for addr in addresses[:TEST_ITERATIONS] # ] # }, # { # 'test_name': 'getRefund', # 'transactions': [ # { # 'function': ContractFunction('getRefund', ()), # 'values': (), # 'caller': addr, # 'time': 9547698860 # } # for addr in addresses[:TEST_ITERATIONS] # ] # }, ] }, ]
en
0.21959
# ('uint256', 'string', 'string'), # (total_token_supply, 'Test', 'TEST'), # { # 'function': ContractFunction('transfer', ('address', 'uint256', 'address[]')), # 'values': (addr, 1*(10**16), addresses), # 'caller': SENDER_ADDRESS, # } # for addr in addresses[:TRANSACTION_LIMIT] # { # 'test_name': 'approve', # 'transactions': [ # { # 'function': ContractFunction( # 'approve', ('address', 'uint256')), # 'caller': SENDER_ADDRESS, # 'values': (get_random_address, get_random_number), # } # for iteration in range(TEST_ITERATIONS) # ] # } # ('uint256', 'string', 'string'), # (total_token_supply, 'Test', 'TEST'), # { # 'function': ContractFunction('safeTransferFrom', # ('address', 'address', 'uint256')), # 'values': (SENDER_ADDRESS, addr, index), # 'caller': SENDER_ADDRESS # } # for index, addr in enumerate(addresses[:TRANSACTION_LIMIT]) # { # 'test_name': 'safeTransferFrom', # 'transactions': [ # { # 'function': ContractFunction( # 'safeTransferFrom', ('address', 'address', 'uint256')), # 'values': (SENDER_ADDRESS, get_random_address, utils.get_random_token_id), # 'caller': SENDER_ADDRESS, # } # for iteration in range(TEST_ITERATIONS) # ] # }, # { # 'test_name': 'approve', # 'transactions': [ # { # 'function': ContractFunction( # 'approve', ('address', 'uint256')), # 'values': (SENDER_ADDRESS, utils.get_random_token_id), # 'caller': SENDER_ADDRESS # } # for iteration in range(TEST_ITERATIONS) # ] # } # { # 'function': ContractFunction('bid', ()), # 'values': (), # 'amount': 1*index, # 'caller': addr # } # for index, addr in enumerate(addresses[:TRANSACTION_LIMIT]) # increment the bid each iteration # so we can do the withdraw function for the losers # there can only be 1 winning bid, so the total number of bids is n+1 # { # 'test_name': 'withdraw', # 'transactions': [ # { # 'function': ContractFunction('withdraw', ()), # 'values': (), # 'caller': addr, # } # for index, addr in enumerate(addresses[:TEST_ITERATIONS]) # ] # }, # { # 'function': ContractFunction('pledge', ('uint256',)), # 'values': (1,), # 'caller': addr, # 'amount': 1 # } # for addr in addresses[:TRANSACTION_LIMIT] # { # 'test_name': 'claimFunds', # 'transactions': [ # { # 'function': ContractFunction('claimFunds', ()), # 'values': (), # 'caller': SENDER_ADDRESS, # 'amount': 0 # } # for addr in addresses[:TEST_ITERATIONS] # ] # }, # { # 'test_name': 'getRefund', # 'transactions': [ # { # 'function': ContractFunction('getRefund', ()), # 'values': (), # 'caller': addr, # 'time': 9547698860 # } # for addr in addresses[:TEST_ITERATIONS] # ] # },
2.088713
2
hw5-kde-plot-density.py
ardihikaru/mlsp
0
6624626
# library: pip install KDEpy # Source: https://github.com/tommyod/KDEpy # Docs: https://kdepy.readthedocs.io/en/latest/ ''' By performing PCA, we can analyze why reducing PCA component can reduce the accuracy of KDE calculation. ''' from sklearn.decomposition import PCA from KDEpy import FFTKDE from hw5.libs.common.dataset import Dataset from hw5.libs.common.util import plot_digit_data import matplotlib.pyplot as plt def index_digit_ones(Y_train): idx_ones = [] for i in range(len(Y_train)): if int(Y_train[i]) == 1: idx_ones.append(i) return idx_ones if __name__ == '__main__': # dataset = Dataset(train_data=1000, test_data=100) # dataset = Dataset(train_data=40, test_data=10) dataset = Dataset(train_data=80, test_data=20) # dataset = Dataset() X_train, Y_train, X_test, Y_test = dataset.get_dataset() print("before PCA = ", X_train.shape) # Dimensional reduction # pca = PCA(n_components=2, whiten=False) # pca = PCA(n_components=64, whiten=False) pca = PCA(n_components=80, whiten=False) X_train = pca.fit_transform(X_train) print("after PCA = ", X_train.shape) # print(X_train[1].shape) print(Y_train) idx_ones = index_digit_ones(Y_train) print("> idx_ones = ", idx_ones) idxs = [12, 3, 6, 14, 23, 24, 40, 59, 67] # this is extracted indices of digit=1; with idx=12 as digit=3 data = [] for idx in idxs: data.append(X_train[idx]) fig = plt.figure() # more styles: https://matplotlib.org/gallery/lines_bars_and_markers/line_styles_reference.html line_styles = ['--', '-', ':', ':', '-', ':', ':', ':', ':'] for i in range(len(idxs)): estimator = FFTKDE(kernel='gaussian', bw='silverman') # x[i], y[i] = estimator[i].fit(data[i], weights=None).evaluate() x, y = estimator.fit(data[i], weights=None).evaluate() # plt.plot(x[i], y[i], label='Digit='+str(Y_train[idxs[i]])) plt.plot(x, y, linestyle=line_styles[i], label='IDX='+str(idxs[i])+'; Digit='+str(Y_train[idxs[i]])) plt.legend() plt.show() fig.savefig('hw5/results/visualize_kde.png', dpi=fig.dpi) new_data = pca.inverse_transform(data) plot_digit_data(new_data, 'test_kde_plot_digits')
# library: pip install KDEpy # Source: https://github.com/tommyod/KDEpy # Docs: https://kdepy.readthedocs.io/en/latest/ ''' By performing PCA, we can analyze why reducing PCA component can reduce the accuracy of KDE calculation. ''' from sklearn.decomposition import PCA from KDEpy import FFTKDE from hw5.libs.common.dataset import Dataset from hw5.libs.common.util import plot_digit_data import matplotlib.pyplot as plt def index_digit_ones(Y_train): idx_ones = [] for i in range(len(Y_train)): if int(Y_train[i]) == 1: idx_ones.append(i) return idx_ones if __name__ == '__main__': # dataset = Dataset(train_data=1000, test_data=100) # dataset = Dataset(train_data=40, test_data=10) dataset = Dataset(train_data=80, test_data=20) # dataset = Dataset() X_train, Y_train, X_test, Y_test = dataset.get_dataset() print("before PCA = ", X_train.shape) # Dimensional reduction # pca = PCA(n_components=2, whiten=False) # pca = PCA(n_components=64, whiten=False) pca = PCA(n_components=80, whiten=False) X_train = pca.fit_transform(X_train) print("after PCA = ", X_train.shape) # print(X_train[1].shape) print(Y_train) idx_ones = index_digit_ones(Y_train) print("> idx_ones = ", idx_ones) idxs = [12, 3, 6, 14, 23, 24, 40, 59, 67] # this is extracted indices of digit=1; with idx=12 as digit=3 data = [] for idx in idxs: data.append(X_train[idx]) fig = plt.figure() # more styles: https://matplotlib.org/gallery/lines_bars_and_markers/line_styles_reference.html line_styles = ['--', '-', ':', ':', '-', ':', ':', ':', ':'] for i in range(len(idxs)): estimator = FFTKDE(kernel='gaussian', bw='silverman') # x[i], y[i] = estimator[i].fit(data[i], weights=None).evaluate() x, y = estimator.fit(data[i], weights=None).evaluate() # plt.plot(x[i], y[i], label='Digit='+str(Y_train[idxs[i]])) plt.plot(x, y, linestyle=line_styles[i], label='IDX='+str(idxs[i])+'; Digit='+str(Y_train[idxs[i]])) plt.legend() plt.show() fig.savefig('hw5/results/visualize_kde.png', dpi=fig.dpi) new_data = pca.inverse_transform(data) plot_digit_data(new_data, 'test_kde_plot_digits')
en
0.558269
# library: pip install KDEpy # Source: https://github.com/tommyod/KDEpy # Docs: https://kdepy.readthedocs.io/en/latest/ By performing PCA, we can analyze why reducing PCA component can reduce the accuracy of KDE calculation. # dataset = Dataset(train_data=1000, test_data=100) # dataset = Dataset(train_data=40, test_data=10) # dataset = Dataset() # Dimensional reduction # pca = PCA(n_components=2, whiten=False) # pca = PCA(n_components=64, whiten=False) # print(X_train[1].shape) # this is extracted indices of digit=1; with idx=12 as digit=3 # more styles: https://matplotlib.org/gallery/lines_bars_and_markers/line_styles_reference.html # x[i], y[i] = estimator[i].fit(data[i], weights=None).evaluate() # plt.plot(x[i], y[i], label='Digit='+str(Y_train[idxs[i]]))
3.593008
4
airzone/utils.py
Mike-de-bike/python-airzone
8
6624627
<gh_stars>1-10 import base64 def bitfield(n): ''' Obtains the binary array from the number ''' return [1 if digit=='1' else 0 for digit in bin(n)[2:]] def shifting(bitlist): ''' Obtain the number from the binary array ''' out = 0 for bit in bitlist: out = (out << 1) | bit return out def true_in_list(l): return [i for i,v in enumerate(l) if v] def pad_left_list(l, size, pad_value): for n in range(len(l), size): l = [pad_value] + l return l def pad_right_list(l, size, pad_value): for n in range(len(l), size): l = l + [pad_value] return l # toggleBit() returns an integer with the bit at 'offset' inverted, 0 -> 1 and 1 -> 0. def toggleBit(int_type, offset): mask = 1 << offset return(int_type ^ mask) # setBit() returns an integer with the bit at 'offset' set to 1. def setBit(int_type, offset): mask = 1 << offset return(int_type | mask) # clearBit() returns an integer with the bit at 'offset' cleared. def clearBit(int_type, offset): mask = ~(1 << offset) return(int_type & mask)
import base64 def bitfield(n): ''' Obtains the binary array from the number ''' return [1 if digit=='1' else 0 for digit in bin(n)[2:]] def shifting(bitlist): ''' Obtain the number from the binary array ''' out = 0 for bit in bitlist: out = (out << 1) | bit return out def true_in_list(l): return [i for i,v in enumerate(l) if v] def pad_left_list(l, size, pad_value): for n in range(len(l), size): l = [pad_value] + l return l def pad_right_list(l, size, pad_value): for n in range(len(l), size): l = l + [pad_value] return l # toggleBit() returns an integer with the bit at 'offset' inverted, 0 -> 1 and 1 -> 0. def toggleBit(int_type, offset): mask = 1 << offset return(int_type ^ mask) # setBit() returns an integer with the bit at 'offset' set to 1. def setBit(int_type, offset): mask = 1 << offset return(int_type | mask) # clearBit() returns an integer with the bit at 'offset' cleared. def clearBit(int_type, offset): mask = ~(1 << offset) return(int_type & mask)
en
0.716642
Obtains the binary array from the number Obtain the number from the binary array # toggleBit() returns an integer with the bit at 'offset' inverted, 0 -> 1 and 1 -> 0. # setBit() returns an integer with the bit at 'offset' set to 1. # clearBit() returns an integer with the bit at 'offset' cleared.
3.645576
4
elysianfields/__init__.py
readmodifywrite/elysianfields
0
6624628
from .fields import *
from .fields import *
none
1
1.171345
1
python/tvm/auto_scheduler/workload_registry.py
maxtnuk/incubator-tvm
2
6624629
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Workload registration and serialization. We use a json string to represent a workload (a computation graph). The format of the string is `[func_name, [args...]]`. The dag should be the return value of this `func_name(*args)`. Rationale: The workload is actually a compute dag defined by tvm dsl. But serializing compute dags and matching them efficiently is not easy. Therefore, we use the above string to encode a compute dag. These strings are efficient for serialization/matching and won't be too long. When we need the dag, we decode the string and call the function, which will return the dag. """ import pickle import json import tvm._ffi from .utils import serialize_args, deserialize_args, get_func_name # Global workload function and hash key registry # It stores two types of workload: # 1. User registered tasks. This type of workload is registered # by the decorator "register_workload" # 2. Extracted tasks from a relay program. This type of workload is # registered by function "register_workload_tensors". # # For 1, the dictionary maps a function name to its function pointer # For 2, the dictionary maps a hash key to a list of input/output tensors WORKLOAD_FUNC_REGISTRY = {} def register_workload(func_name, f=None, override=False): """Register a function that generates a certain workload. The input function should take hashable and jsonable arguments (int, float, tuple of int, tvm.tensor.Tensor, ...) and return a list of tvm.tensor.Tensor. Parameters ---------- func_name : Union[Function, str] The generation function that returns the compute declaration Tensors or its function name. f : Optional[Function] The generation function to be registered. override : boolean = False Whether override existing entry. Examples -------- .. code-block:: python @auto_scheduler.register_workload def matmul(N, M, K): A = te.placeholder((N, K), name='A') B = te.placeholder((K, M), name='B') k = te.reduce_axis((0, K), name='k') C = te.compute((N, M), lambda i, j: tvm.sum(A[i][k] * B[k][j], axis=[k]), name='C') return [A, B, C] """ global WORKLOAD_FUNC_REGISTRY if callable(func_name): f = func_name func_name = get_func_name(f) if not isinstance(func_name, str): raise ValueError("expect string function name") def register(myf): """internal register function""" if func_name in WORKLOAD_FUNC_REGISTRY and not override: raise RuntimeError("%s has been registered already" % func_name) WORKLOAD_FUNC_REGISTRY[func_name] = myf return myf if f: return register(f) return register def register_workload_tensors(tensors): """Register a workload by provding input/output tensors Parameters ---------- tensors: List[Tensor] The input/output tensors of a compute DAG Returns ------- key: str The workload key """ # pylint: disable=import-outside-toplevel from .compute_dag import ComputeDAG key = ComputeDAG(tensors).hash_key() WORKLOAD_FUNC_REGISTRY[key] = tensors return json.dumps((key,)) def make_workload_key(func, args): """Make a workload key by function and arguments. Parameters ---------- func : Union[Function, str] The function that returns the compute declaration Tensors. Can be the a function or the function name. args : Args The args of the function. Returns ------- workload_key : str The workload key of the function. """ global WORKLOAD_FUNC_REGISTRY if callable(func): func_name = get_func_name(func) elif isinstance(func, str): func_name = func else: raise ValueError( "Invalid function: " + str(func) + " . `make_workload_key` expects a callable function or its function name" ) if not func_name in WORKLOAD_FUNC_REGISTRY: raise ValueError( "%s is not registered. " % func, "Please register it with @auto_scheduler.register_workload", ) args = serialize_args(args) return json.dumps((func_name,) + args) @tvm._ffi.register_func("auto_scheduler.workload_key_to_tensors") def workload_key_to_tensors(workload_key): """Get the input/output tensors from the workload key. This method is usually used to create a ComputeDAG by workload key. Parameters ---------- workload_key : str The input workload key. Returns ------- tensors : List[Tensor] The registered compute declaration Tensors. """ global WORKLOAD_FUNC_REGISTRY workload = json.loads(workload_key) name = workload[0] value = WORKLOAD_FUNC_REGISTRY[name] # "value" can be either a function or a list of tensors if callable(value): # if it is a func args = deserialize_args(workload[1:]) return value(*args) # otherwise, it is a list of tensors return value def serialize_workload_registry_entry(workload_key): """ Serialize a workload registry entry. This is used when the start method of multiprocessing is spawn. We need to serialize the entry and register it in the new processes. Parameters ---------- workload_key : str The workload key Returns ------- data: Tuple The serialized pickable data """ global WORKLOAD_FUNC_REGISTRY workload = json.loads(workload_key) name = workload[0] value = WORKLOAD_FUNC_REGISTRY[name] return name, value def deserialize_workload_registry_entry(data): """ Deserialize a workload registry entry. This should be used along with :code:`serialize_workload_registry_entry` Parameters ---------- data: Tuple The return value of :code:`serialize_workload_registry_entry` """ global WORKLOAD_FUNC_REGISTRY name, value = data if name not in WORKLOAD_FUNC_REGISTRY: WORKLOAD_FUNC_REGISTRY[name] = value def save_workload_func_registry(filename): """Dump workload function registry to a pickle binary file. Parameters ---------- filename : str The filename to dump workload function registry to. """ global WORKLOAD_FUNC_REGISTRY pickle.dump(WORKLOAD_FUNC_REGISTRY, open(filename, "wb")) def load_workload_func_registry(filename): """Load workload function registry from a pickle binary file. Parameters ---------- filename : str The filename to load workload function registry from. """ global WORKLOAD_FUNC_REGISTRY WORKLOAD_FUNC_REGISTRY = pickle.load(open(filename, "rb"))
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Workload registration and serialization. We use a json string to represent a workload (a computation graph). The format of the string is `[func_name, [args...]]`. The dag should be the return value of this `func_name(*args)`. Rationale: The workload is actually a compute dag defined by tvm dsl. But serializing compute dags and matching them efficiently is not easy. Therefore, we use the above string to encode a compute dag. These strings are efficient for serialization/matching and won't be too long. When we need the dag, we decode the string and call the function, which will return the dag. """ import pickle import json import tvm._ffi from .utils import serialize_args, deserialize_args, get_func_name # Global workload function and hash key registry # It stores two types of workload: # 1. User registered tasks. This type of workload is registered # by the decorator "register_workload" # 2. Extracted tasks from a relay program. This type of workload is # registered by function "register_workload_tensors". # # For 1, the dictionary maps a function name to its function pointer # For 2, the dictionary maps a hash key to a list of input/output tensors WORKLOAD_FUNC_REGISTRY = {} def register_workload(func_name, f=None, override=False): """Register a function that generates a certain workload. The input function should take hashable and jsonable arguments (int, float, tuple of int, tvm.tensor.Tensor, ...) and return a list of tvm.tensor.Tensor. Parameters ---------- func_name : Union[Function, str] The generation function that returns the compute declaration Tensors or its function name. f : Optional[Function] The generation function to be registered. override : boolean = False Whether override existing entry. Examples -------- .. code-block:: python @auto_scheduler.register_workload def matmul(N, M, K): A = te.placeholder((N, K), name='A') B = te.placeholder((K, M), name='B') k = te.reduce_axis((0, K), name='k') C = te.compute((N, M), lambda i, j: tvm.sum(A[i][k] * B[k][j], axis=[k]), name='C') return [A, B, C] """ global WORKLOAD_FUNC_REGISTRY if callable(func_name): f = func_name func_name = get_func_name(f) if not isinstance(func_name, str): raise ValueError("expect string function name") def register(myf): """internal register function""" if func_name in WORKLOAD_FUNC_REGISTRY and not override: raise RuntimeError("%s has been registered already" % func_name) WORKLOAD_FUNC_REGISTRY[func_name] = myf return myf if f: return register(f) return register def register_workload_tensors(tensors): """Register a workload by provding input/output tensors Parameters ---------- tensors: List[Tensor] The input/output tensors of a compute DAG Returns ------- key: str The workload key """ # pylint: disable=import-outside-toplevel from .compute_dag import ComputeDAG key = ComputeDAG(tensors).hash_key() WORKLOAD_FUNC_REGISTRY[key] = tensors return json.dumps((key,)) def make_workload_key(func, args): """Make a workload key by function and arguments. Parameters ---------- func : Union[Function, str] The function that returns the compute declaration Tensors. Can be the a function or the function name. args : Args The args of the function. Returns ------- workload_key : str The workload key of the function. """ global WORKLOAD_FUNC_REGISTRY if callable(func): func_name = get_func_name(func) elif isinstance(func, str): func_name = func else: raise ValueError( "Invalid function: " + str(func) + " . `make_workload_key` expects a callable function or its function name" ) if not func_name in WORKLOAD_FUNC_REGISTRY: raise ValueError( "%s is not registered. " % func, "Please register it with @auto_scheduler.register_workload", ) args = serialize_args(args) return json.dumps((func_name,) + args) @tvm._ffi.register_func("auto_scheduler.workload_key_to_tensors") def workload_key_to_tensors(workload_key): """Get the input/output tensors from the workload key. This method is usually used to create a ComputeDAG by workload key. Parameters ---------- workload_key : str The input workload key. Returns ------- tensors : List[Tensor] The registered compute declaration Tensors. """ global WORKLOAD_FUNC_REGISTRY workload = json.loads(workload_key) name = workload[0] value = WORKLOAD_FUNC_REGISTRY[name] # "value" can be either a function or a list of tensors if callable(value): # if it is a func args = deserialize_args(workload[1:]) return value(*args) # otherwise, it is a list of tensors return value def serialize_workload_registry_entry(workload_key): """ Serialize a workload registry entry. This is used when the start method of multiprocessing is spawn. We need to serialize the entry and register it in the new processes. Parameters ---------- workload_key : str The workload key Returns ------- data: Tuple The serialized pickable data """ global WORKLOAD_FUNC_REGISTRY workload = json.loads(workload_key) name = workload[0] value = WORKLOAD_FUNC_REGISTRY[name] return name, value def deserialize_workload_registry_entry(data): """ Deserialize a workload registry entry. This should be used along with :code:`serialize_workload_registry_entry` Parameters ---------- data: Tuple The return value of :code:`serialize_workload_registry_entry` """ global WORKLOAD_FUNC_REGISTRY name, value = data if name not in WORKLOAD_FUNC_REGISTRY: WORKLOAD_FUNC_REGISTRY[name] = value def save_workload_func_registry(filename): """Dump workload function registry to a pickle binary file. Parameters ---------- filename : str The filename to dump workload function registry to. """ global WORKLOAD_FUNC_REGISTRY pickle.dump(WORKLOAD_FUNC_REGISTRY, open(filename, "wb")) def load_workload_func_registry(filename): """Load workload function registry from a pickle binary file. Parameters ---------- filename : str The filename to load workload function registry from. """ global WORKLOAD_FUNC_REGISTRY WORKLOAD_FUNC_REGISTRY = pickle.load(open(filename, "rb"))
en
0.657889
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. Workload registration and serialization. We use a json string to represent a workload (a computation graph). The format of the string is `[func_name, [args...]]`. The dag should be the return value of this `func_name(*args)`. Rationale: The workload is actually a compute dag defined by tvm dsl. But serializing compute dags and matching them efficiently is not easy. Therefore, we use the above string to encode a compute dag. These strings are efficient for serialization/matching and won't be too long. When we need the dag, we decode the string and call the function, which will return the dag. # Global workload function and hash key registry # It stores two types of workload: # 1. User registered tasks. This type of workload is registered # by the decorator "register_workload" # 2. Extracted tasks from a relay program. This type of workload is # registered by function "register_workload_tensors". # # For 1, the dictionary maps a function name to its function pointer # For 2, the dictionary maps a hash key to a list of input/output tensors Register a function that generates a certain workload. The input function should take hashable and jsonable arguments (int, float, tuple of int, tvm.tensor.Tensor, ...) and return a list of tvm.tensor.Tensor. Parameters ---------- func_name : Union[Function, str] The generation function that returns the compute declaration Tensors or its function name. f : Optional[Function] The generation function to be registered. override : boolean = False Whether override existing entry. Examples -------- .. code-block:: python @auto_scheduler.register_workload def matmul(N, M, K): A = te.placeholder((N, K), name='A') B = te.placeholder((K, M), name='B') k = te.reduce_axis((0, K), name='k') C = te.compute((N, M), lambda i, j: tvm.sum(A[i][k] * B[k][j], axis=[k]), name='C') return [A, B, C] internal register function Register a workload by provding input/output tensors Parameters ---------- tensors: List[Tensor] The input/output tensors of a compute DAG Returns ------- key: str The workload key # pylint: disable=import-outside-toplevel Make a workload key by function and arguments. Parameters ---------- func : Union[Function, str] The function that returns the compute declaration Tensors. Can be the a function or the function name. args : Args The args of the function. Returns ------- workload_key : str The workload key of the function. Get the input/output tensors from the workload key. This method is usually used to create a ComputeDAG by workload key. Parameters ---------- workload_key : str The input workload key. Returns ------- tensors : List[Tensor] The registered compute declaration Tensors. # "value" can be either a function or a list of tensors # if it is a func # otherwise, it is a list of tensors Serialize a workload registry entry. This is used when the start method of multiprocessing is spawn. We need to serialize the entry and register it in the new processes. Parameters ---------- workload_key : str The workload key Returns ------- data: Tuple The serialized pickable data Deserialize a workload registry entry. This should be used along with :code:`serialize_workload_registry_entry` Parameters ---------- data: Tuple The return value of :code:`serialize_workload_registry_entry` Dump workload function registry to a pickle binary file. Parameters ---------- filename : str The filename to dump workload function registry to. Load workload function registry from a pickle binary file. Parameters ---------- filename : str The filename to load workload function registry from.
2.079905
2
GmailWrapper_JE/venv/Lib/site-packages/pyasn1_modules/rfc3281.py
JE-Chen/je_old_repo
2
6624630
<filename>GmailWrapper_JE/venv/Lib/site-packages/pyasn1_modules/rfc3281.py # coding: utf-8 # # This file is part of pyasn1-modules software. # # Created by <NAME> with asn1ate tool. # Copyright (c) 2005-2019, <NAME> <<EMAIL>> # License: http://snmplabs.com/pyasn1/license.html # # An Internet Attribute Certificate Profile for Authorization # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3281.txt # from pyasn1.type import char from pyasn1.type import constraint from pyasn1.type import namedtype from pyasn1.type import namedval from pyasn1.type import tag from pyasn1.type import univ from pyasn1.type import useful from pyasn1_modules import rfc3280 MAX = float('inf') def _buildOid(*components): output = [] for x in tuple(components): if isinstance(x, univ.ObjectIdentifier): output.extend(list(x)) else: output.append(int(x)) return univ.ObjectIdentifier(output) class ObjectDigestInfo(univ.Sequence): pass ObjectDigestInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('digestedObjectType', univ.Enumerated( namedValues=namedval.NamedValues(('publicKey', 0), ('publicKeyCert', 1), ('otherObjectTypes', 2)))), namedtype.OptionalNamedType('otherObjectTypeID', univ.ObjectIdentifier()), namedtype.NamedType('digestAlgorithm', rfc3280.AlgorithmIdentifier()), namedtype.NamedType('objectDigest', univ.BitString()) ) class IssuerSerial(univ.Sequence): pass IssuerSerial.componentType = namedtype.NamedTypes( namedtype.NamedType('issuer', rfc3280.GeneralNames()), namedtype.NamedType('serial', rfc3280.CertificateSerialNumber()), namedtype.OptionalNamedType('issuerUID', rfc3280.UniqueIdentifier()) ) class TargetCert(univ.Sequence): pass TargetCert.componentType = namedtype.NamedTypes( namedtype.NamedType('targetCertificate', IssuerSerial()), namedtype.OptionalNamedType('targetName', rfc3280.GeneralName()), namedtype.OptionalNamedType('certDigestInfo', ObjectDigestInfo()) ) class Target(univ.Choice): pass Target.componentType = namedtype.NamedTypes( namedtype.NamedType('targetName', rfc3280.GeneralName().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('targetGroup', rfc3280.GeneralName().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.NamedType('targetCert', TargetCert().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) ) class Targets(univ.SequenceOf): pass Targets.componentType = Target() class ProxyInfo(univ.SequenceOf): pass ProxyInfo.componentType = Targets() id_at_role = _buildOid(rfc3280.id_at, 72) id_pe_aaControls = _buildOid(rfc3280.id_pe, 6) id_ce_targetInformation = _buildOid(rfc3280.id_ce, 55) id_pe_ac_auditIdentity = _buildOid(rfc3280.id_pe, 4) class ClassList(univ.BitString): pass ClassList.namedValues = namedval.NamedValues( ('unmarked', 0), ('unclassified', 1), ('restricted', 2), ('confidential', 3), ('secret', 4), ('topSecret', 5) ) class SecurityCategory(univ.Sequence): pass SecurityCategory.componentType = namedtype.NamedTypes( namedtype.NamedType('type', univ.ObjectIdentifier().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('value', univ.Any().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) ) class Clearance(univ.Sequence): pass Clearance.componentType = namedtype.NamedTypes( namedtype.NamedType('policyId', univ.ObjectIdentifier().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.DefaultedNamedType('classList', ClassList().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)).subtype( value="unclassified")), namedtype.OptionalNamedType('securityCategories', univ.SetOf(componentType=SecurityCategory()).subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) ) class AttCertVersion(univ.Integer): pass AttCertVersion.namedValues = namedval.NamedValues( ('v2', 1) ) id_aca = _buildOid(rfc3280.id_pkix, 10) id_at_clearance = _buildOid(2, 5, 1, 5, 55) class AttrSpec(univ.SequenceOf): pass AttrSpec.componentType = univ.ObjectIdentifier() class AAControls(univ.Sequence): pass AAControls.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), namedtype.OptionalNamedType('permittedAttrs', AttrSpec().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.OptionalNamedType('excludedAttrs', AttrSpec().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.DefaultedNamedType('permitUnSpecified', univ.Boolean().subtype(value=1)) ) class AttCertValidityPeriod(univ.Sequence): pass AttCertValidityPeriod.componentType = namedtype.NamedTypes( namedtype.NamedType('notBeforeTime', useful.GeneralizedTime()), namedtype.NamedType('notAfterTime', useful.GeneralizedTime()) ) id_aca_authenticationInfo = _buildOid(id_aca, 1) class V2Form(univ.Sequence): pass V2Form.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('issuerName', rfc3280.GeneralNames()), namedtype.OptionalNamedType('baseCertificateID', IssuerSerial().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), namedtype.OptionalNamedType('objectDigestInfo', ObjectDigestInfo().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) ) class AttCertIssuer(univ.Choice): pass AttCertIssuer.componentType = namedtype.NamedTypes( namedtype.NamedType('v1Form', rfc3280.GeneralNames()), namedtype.NamedType('v2Form', V2Form().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) ) class Holder(univ.Sequence): pass Holder.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('baseCertificateID', IssuerSerial().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), namedtype.OptionalNamedType('entityName', rfc3280.GeneralNames().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.OptionalNamedType('objectDigestInfo', ObjectDigestInfo().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) ) class AttributeCertificateInfo(univ.Sequence): pass AttributeCertificateInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('version', AttCertVersion()), namedtype.NamedType('holder', Holder()), namedtype.NamedType('issuer', AttCertIssuer()), namedtype.NamedType('signature', rfc3280.AlgorithmIdentifier()), namedtype.NamedType('serialNumber', rfc3280.CertificateSerialNumber()), namedtype.NamedType('attrCertValidityPeriod', AttCertValidityPeriod()), namedtype.NamedType('attributes', univ.SequenceOf(componentType=rfc3280.Attribute())), namedtype.OptionalNamedType('issuerUniqueID', rfc3280.UniqueIdentifier()), namedtype.OptionalNamedType('extensions', rfc3280.Extensions()) ) class AttributeCertificate(univ.Sequence): pass AttributeCertificate.componentType = namedtype.NamedTypes( namedtype.NamedType('acinfo', AttributeCertificateInfo()), namedtype.NamedType('signatureAlgorithm', rfc3280.AlgorithmIdentifier()), namedtype.NamedType('signatureValue', univ.BitString()) ) id_mod = _buildOid(rfc3280.id_pkix, 0) id_mod_attribute_cert = _buildOid(id_mod, 12) id_aca_accessIdentity = _buildOid(id_aca, 2) class RoleSyntax(univ.Sequence): pass RoleSyntax.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('roleAuthority', rfc3280.GeneralNames().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('roleName', rfc3280.GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) ) id_aca_chargingIdentity = _buildOid(id_aca, 3) class ACClearAttrs(univ.Sequence): pass ACClearAttrs.componentType = namedtype.NamedTypes( namedtype.NamedType('acIssuer', rfc3280.GeneralName()), namedtype.NamedType('acSerial', univ.Integer()), namedtype.NamedType('attrs', univ.SequenceOf(componentType=rfc3280.Attribute())) ) id_aca_group = _buildOid(id_aca, 4) id_pe_ac_proxying = _buildOid(rfc3280.id_pe, 10) class SvceAuthInfo(univ.Sequence): pass SvceAuthInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('service', rfc3280.GeneralName()), namedtype.NamedType('ident', rfc3280.GeneralName()), namedtype.OptionalNamedType('authInfo', univ.OctetString()) ) class IetfAttrSyntax(univ.Sequence): pass IetfAttrSyntax.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType( 'policyAuthority', rfc3280.GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)) ), namedtype.NamedType( 'values', univ.SequenceOf( componentType=univ.Choice( componentType=namedtype.NamedTypes( namedtype.NamedType('octets', univ.OctetString()), namedtype.NamedType('oid', univ.ObjectIdentifier()), namedtype.NamedType('string', char.UTF8String()) ) ) ) ) ) id_aca_encAttrs = _buildOid(id_aca, 6)
<filename>GmailWrapper_JE/venv/Lib/site-packages/pyasn1_modules/rfc3281.py # coding: utf-8 # # This file is part of pyasn1-modules software. # # Created by <NAME> with asn1ate tool. # Copyright (c) 2005-2019, <NAME> <<EMAIL>> # License: http://snmplabs.com/pyasn1/license.html # # An Internet Attribute Certificate Profile for Authorization # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3281.txt # from pyasn1.type import char from pyasn1.type import constraint from pyasn1.type import namedtype from pyasn1.type import namedval from pyasn1.type import tag from pyasn1.type import univ from pyasn1.type import useful from pyasn1_modules import rfc3280 MAX = float('inf') def _buildOid(*components): output = [] for x in tuple(components): if isinstance(x, univ.ObjectIdentifier): output.extend(list(x)) else: output.append(int(x)) return univ.ObjectIdentifier(output) class ObjectDigestInfo(univ.Sequence): pass ObjectDigestInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('digestedObjectType', univ.Enumerated( namedValues=namedval.NamedValues(('publicKey', 0), ('publicKeyCert', 1), ('otherObjectTypes', 2)))), namedtype.OptionalNamedType('otherObjectTypeID', univ.ObjectIdentifier()), namedtype.NamedType('digestAlgorithm', rfc3280.AlgorithmIdentifier()), namedtype.NamedType('objectDigest', univ.BitString()) ) class IssuerSerial(univ.Sequence): pass IssuerSerial.componentType = namedtype.NamedTypes( namedtype.NamedType('issuer', rfc3280.GeneralNames()), namedtype.NamedType('serial', rfc3280.CertificateSerialNumber()), namedtype.OptionalNamedType('issuerUID', rfc3280.UniqueIdentifier()) ) class TargetCert(univ.Sequence): pass TargetCert.componentType = namedtype.NamedTypes( namedtype.NamedType('targetCertificate', IssuerSerial()), namedtype.OptionalNamedType('targetName', rfc3280.GeneralName()), namedtype.OptionalNamedType('certDigestInfo', ObjectDigestInfo()) ) class Target(univ.Choice): pass Target.componentType = namedtype.NamedTypes( namedtype.NamedType('targetName', rfc3280.GeneralName().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('targetGroup', rfc3280.GeneralName().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.NamedType('targetCert', TargetCert().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) ) class Targets(univ.SequenceOf): pass Targets.componentType = Target() class ProxyInfo(univ.SequenceOf): pass ProxyInfo.componentType = Targets() id_at_role = _buildOid(rfc3280.id_at, 72) id_pe_aaControls = _buildOid(rfc3280.id_pe, 6) id_ce_targetInformation = _buildOid(rfc3280.id_ce, 55) id_pe_ac_auditIdentity = _buildOid(rfc3280.id_pe, 4) class ClassList(univ.BitString): pass ClassList.namedValues = namedval.NamedValues( ('unmarked', 0), ('unclassified', 1), ('restricted', 2), ('confidential', 3), ('secret', 4), ('topSecret', 5) ) class SecurityCategory(univ.Sequence): pass SecurityCategory.componentType = namedtype.NamedTypes( namedtype.NamedType('type', univ.ObjectIdentifier().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('value', univ.Any().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) ) class Clearance(univ.Sequence): pass Clearance.componentType = namedtype.NamedTypes( namedtype.NamedType('policyId', univ.ObjectIdentifier().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.DefaultedNamedType('classList', ClassList().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)).subtype( value="unclassified")), namedtype.OptionalNamedType('securityCategories', univ.SetOf(componentType=SecurityCategory()).subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) ) class AttCertVersion(univ.Integer): pass AttCertVersion.namedValues = namedval.NamedValues( ('v2', 1) ) id_aca = _buildOid(rfc3280.id_pkix, 10) id_at_clearance = _buildOid(2, 5, 1, 5, 55) class AttrSpec(univ.SequenceOf): pass AttrSpec.componentType = univ.ObjectIdentifier() class AAControls(univ.Sequence): pass AAControls.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), namedtype.OptionalNamedType('permittedAttrs', AttrSpec().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.OptionalNamedType('excludedAttrs', AttrSpec().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.DefaultedNamedType('permitUnSpecified', univ.Boolean().subtype(value=1)) ) class AttCertValidityPeriod(univ.Sequence): pass AttCertValidityPeriod.componentType = namedtype.NamedTypes( namedtype.NamedType('notBeforeTime', useful.GeneralizedTime()), namedtype.NamedType('notAfterTime', useful.GeneralizedTime()) ) id_aca_authenticationInfo = _buildOid(id_aca, 1) class V2Form(univ.Sequence): pass V2Form.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('issuerName', rfc3280.GeneralNames()), namedtype.OptionalNamedType('baseCertificateID', IssuerSerial().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), namedtype.OptionalNamedType('objectDigestInfo', ObjectDigestInfo().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) ) class AttCertIssuer(univ.Choice): pass AttCertIssuer.componentType = namedtype.NamedTypes( namedtype.NamedType('v1Form', rfc3280.GeneralNames()), namedtype.NamedType('v2Form', V2Form().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) ) class Holder(univ.Sequence): pass Holder.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('baseCertificateID', IssuerSerial().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), namedtype.OptionalNamedType('entityName', rfc3280.GeneralNames().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.OptionalNamedType('objectDigestInfo', ObjectDigestInfo().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) ) class AttributeCertificateInfo(univ.Sequence): pass AttributeCertificateInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('version', AttCertVersion()), namedtype.NamedType('holder', Holder()), namedtype.NamedType('issuer', AttCertIssuer()), namedtype.NamedType('signature', rfc3280.AlgorithmIdentifier()), namedtype.NamedType('serialNumber', rfc3280.CertificateSerialNumber()), namedtype.NamedType('attrCertValidityPeriod', AttCertValidityPeriod()), namedtype.NamedType('attributes', univ.SequenceOf(componentType=rfc3280.Attribute())), namedtype.OptionalNamedType('issuerUniqueID', rfc3280.UniqueIdentifier()), namedtype.OptionalNamedType('extensions', rfc3280.Extensions()) ) class AttributeCertificate(univ.Sequence): pass AttributeCertificate.componentType = namedtype.NamedTypes( namedtype.NamedType('acinfo', AttributeCertificateInfo()), namedtype.NamedType('signatureAlgorithm', rfc3280.AlgorithmIdentifier()), namedtype.NamedType('signatureValue', univ.BitString()) ) id_mod = _buildOid(rfc3280.id_pkix, 0) id_mod_attribute_cert = _buildOid(id_mod, 12) id_aca_accessIdentity = _buildOid(id_aca, 2) class RoleSyntax(univ.Sequence): pass RoleSyntax.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('roleAuthority', rfc3280.GeneralNames().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('roleName', rfc3280.GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) ) id_aca_chargingIdentity = _buildOid(id_aca, 3) class ACClearAttrs(univ.Sequence): pass ACClearAttrs.componentType = namedtype.NamedTypes( namedtype.NamedType('acIssuer', rfc3280.GeneralName()), namedtype.NamedType('acSerial', univ.Integer()), namedtype.NamedType('attrs', univ.SequenceOf(componentType=rfc3280.Attribute())) ) id_aca_group = _buildOid(id_aca, 4) id_pe_ac_proxying = _buildOid(rfc3280.id_pe, 10) class SvceAuthInfo(univ.Sequence): pass SvceAuthInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('service', rfc3280.GeneralName()), namedtype.NamedType('ident', rfc3280.GeneralName()), namedtype.OptionalNamedType('authInfo', univ.OctetString()) ) class IetfAttrSyntax(univ.Sequence): pass IetfAttrSyntax.componentType = namedtype.NamedTypes( namedtype.OptionalNamedType( 'policyAuthority', rfc3280.GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)) ), namedtype.NamedType( 'values', univ.SequenceOf( componentType=univ.Choice( componentType=namedtype.NamedTypes( namedtype.NamedType('octets', univ.OctetString()), namedtype.NamedType('oid', univ.ObjectIdentifier()), namedtype.NamedType('string', char.UTF8String()) ) ) ) ) ) id_aca_encAttrs = _buildOid(id_aca, 6)
en
0.688066
# coding: utf-8 # # This file is part of pyasn1-modules software. # # Created by <NAME> with asn1ate tool. # Copyright (c) 2005-2019, <NAME> <<EMAIL>> # License: http://snmplabs.com/pyasn1/license.html # # An Internet Attribute Certificate Profile for Authorization # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3281.txt #
2.050237
2
skill_discovery/hierarchical_agglomoration.py
UT-Austin-RPL/BUDS
9
6624631
<filename>skill_discovery/hierarchical_agglomoration.py """Hierarchical agglomoration""" import os import argparse import h5py import numpy as np import simplejson as json from sklearn.cluster import SpectralClustering from PIL import Image import cv2 import shutil import pickle from collections import namedtuple import networkx as nx from networkx.drawing.nx_agraph import graphviz_layout import matplotlib.pyplot as plt from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas import init_path from skill_discovery.hierarchical_agglomoration_utils import Node, HierarchicalAgglomorativeTree, save_agglomorative_tree from models.args_utils import get_common_args import hydra from omegaconf import OmegaConf, DictConfig import yaml from easydict import EasyDict def filter_labels(labels): for i in range(len(labels)): # In the beginning if i < 3: if labels[i+1] == labels[i+2] == labels[i+3] and labels[i] != labels[i+1]: labels[i] = labels[i+1] # At tail elif len(labels)-3 < i < len(labels) - 1: if labels[i-1] == labels[i-2] == labels[i-3] and labels[i] != labels[i-1]: labels[i] = labels[i-1] elif 3 <= i <= len(labels) - 3: # label = find_most_frequent_element(labels) if (labels[i-1] == labels[i-2] == labels[i+1] or labels[i-1] == labels[i+1] == labels[i+2]) and (labels[i-1] != labels[i]): labels[i] = labels[i-1] return labels @hydra.main(config_path="../conf", config_name="config") def main(hydra_cfg: DictConfig): yaml_config = OmegaConf.to_yaml(hydra_cfg, resolve=True) cfg = EasyDict(yaml.load(yaml_config)) print(f"Footprint mode: {cfg.agglomoration.footprint}, Dist mode: {cfg.agglomoration.dist}") # if args.skip: # if os.path.exists(f"skill_classification/trees/{args.dataset_name}_trees_{modality_str}_{args.footprint}_{args.dist}.pkl"): # print("Already constructed, skipping") # exit() modalities = cfg.repr.modalities modality_str = modalities[0] for modality in modalities[1:]: modality_str += f"_{modality}" modality_str += f"_{cfg.repr.alpha_kl}" if cfg.repr.no_skip: modality_str += "_no_skip" output_parent_dir = f"datasets/{cfg.data.dataset_name}/" demo_file = f"{output_parent_dir}/demo.hdf5" h5py_file = h5py.File(demo_file, "r") print(demo_file) num_eps = h5py_file["data"].attrs["num_eps"] embedding_hdf5_path = os.path.join(output_parent_dir, f"embedding_{modality_str}_{cfg.repr.z_dim}.hdf5") embedding_h5py_f = h5py.File(embedding_hdf5_path, "r") save_data = {"dataset_name": cfg.data.dataset_name} total_len = 0 X = [] image_names_list = [] ep_indices_list = [] indices_list = [] step = cfg.agglomoration.agglomoration_step try: shutil.rmtree("skill_classification/initial_clustering") except: pass initial_segments = {} init_X = [] X = [] X_indices = [] num_segments = 0 trees = {"trees": {}} for ep_idx in range(num_eps): embeddings = embedding_h5py_f[f"data/ep_{ep_idx}/embedding"][()] agentview_image_names_list = h5py_file[f"data/ep_{ep_idx}/agentview_image_names"][()] # print(embeddings.shape) image_names_list.append(agentview_image_names_list) init_X.append(embeddings) agglomorative_tree = HierarchicalAgglomorativeTree() agglomorative_tree.agglomoration(embeddings, step, footprint_mode=cfg.agglomoration.footprint, dist_mode=cfg.agglomoration.dist) agglomorative_tree.create_root_node() trees["trees"][ep_idx] = agglomorative_tree # Visualization save_agglomorative_tree(agglomorative_tree, agentview_image_names_list, ep_idx, cfg.data.dataset_name, footprint_mode=cfg.agglomoration.footprint, dist_mode=cfg.agglomoration.dist, modality_mode=modality_str) trees["info"] = {"dataset_name": cfg.data.dataset_name, "num_eps": num_eps} with open(f"skill_classification/trees/{cfg.data.dataset_name}_trees_{modality_str}_{cfg.agglomoration.footprint}_{cfg.agglomoration.dist}.pkl", "wb") as f: pickle.dump(trees, f) h5py_file.close() embedding_h5py_f.close() if __name__ == "__main__": main()
<filename>skill_discovery/hierarchical_agglomoration.py """Hierarchical agglomoration""" import os import argparse import h5py import numpy as np import simplejson as json from sklearn.cluster import SpectralClustering from PIL import Image import cv2 import shutil import pickle from collections import namedtuple import networkx as nx from networkx.drawing.nx_agraph import graphviz_layout import matplotlib.pyplot as plt from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas import init_path from skill_discovery.hierarchical_agglomoration_utils import Node, HierarchicalAgglomorativeTree, save_agglomorative_tree from models.args_utils import get_common_args import hydra from omegaconf import OmegaConf, DictConfig import yaml from easydict import EasyDict def filter_labels(labels): for i in range(len(labels)): # In the beginning if i < 3: if labels[i+1] == labels[i+2] == labels[i+3] and labels[i] != labels[i+1]: labels[i] = labels[i+1] # At tail elif len(labels)-3 < i < len(labels) - 1: if labels[i-1] == labels[i-2] == labels[i-3] and labels[i] != labels[i-1]: labels[i] = labels[i-1] elif 3 <= i <= len(labels) - 3: # label = find_most_frequent_element(labels) if (labels[i-1] == labels[i-2] == labels[i+1] or labels[i-1] == labels[i+1] == labels[i+2]) and (labels[i-1] != labels[i]): labels[i] = labels[i-1] return labels @hydra.main(config_path="../conf", config_name="config") def main(hydra_cfg: DictConfig): yaml_config = OmegaConf.to_yaml(hydra_cfg, resolve=True) cfg = EasyDict(yaml.load(yaml_config)) print(f"Footprint mode: {cfg.agglomoration.footprint}, Dist mode: {cfg.agglomoration.dist}") # if args.skip: # if os.path.exists(f"skill_classification/trees/{args.dataset_name}_trees_{modality_str}_{args.footprint}_{args.dist}.pkl"): # print("Already constructed, skipping") # exit() modalities = cfg.repr.modalities modality_str = modalities[0] for modality in modalities[1:]: modality_str += f"_{modality}" modality_str += f"_{cfg.repr.alpha_kl}" if cfg.repr.no_skip: modality_str += "_no_skip" output_parent_dir = f"datasets/{cfg.data.dataset_name}/" demo_file = f"{output_parent_dir}/demo.hdf5" h5py_file = h5py.File(demo_file, "r") print(demo_file) num_eps = h5py_file["data"].attrs["num_eps"] embedding_hdf5_path = os.path.join(output_parent_dir, f"embedding_{modality_str}_{cfg.repr.z_dim}.hdf5") embedding_h5py_f = h5py.File(embedding_hdf5_path, "r") save_data = {"dataset_name": cfg.data.dataset_name} total_len = 0 X = [] image_names_list = [] ep_indices_list = [] indices_list = [] step = cfg.agglomoration.agglomoration_step try: shutil.rmtree("skill_classification/initial_clustering") except: pass initial_segments = {} init_X = [] X = [] X_indices = [] num_segments = 0 trees = {"trees": {}} for ep_idx in range(num_eps): embeddings = embedding_h5py_f[f"data/ep_{ep_idx}/embedding"][()] agentview_image_names_list = h5py_file[f"data/ep_{ep_idx}/agentview_image_names"][()] # print(embeddings.shape) image_names_list.append(agentview_image_names_list) init_X.append(embeddings) agglomorative_tree = HierarchicalAgglomorativeTree() agglomorative_tree.agglomoration(embeddings, step, footprint_mode=cfg.agglomoration.footprint, dist_mode=cfg.agglomoration.dist) agglomorative_tree.create_root_node() trees["trees"][ep_idx] = agglomorative_tree # Visualization save_agglomorative_tree(agglomorative_tree, agentview_image_names_list, ep_idx, cfg.data.dataset_name, footprint_mode=cfg.agglomoration.footprint, dist_mode=cfg.agglomoration.dist, modality_mode=modality_str) trees["info"] = {"dataset_name": cfg.data.dataset_name, "num_eps": num_eps} with open(f"skill_classification/trees/{cfg.data.dataset_name}_trees_{modality_str}_{cfg.agglomoration.footprint}_{cfg.agglomoration.dist}.pkl", "wb") as f: pickle.dump(trees, f) h5py_file.close() embedding_h5py_f.close() if __name__ == "__main__": main()
en
0.402482
Hierarchical agglomoration # In the beginning # At tail # label = find_most_frequent_element(labels) # if args.skip: # if os.path.exists(f"skill_classification/trees/{args.dataset_name}_trees_{modality_str}_{args.footprint}_{args.dist}.pkl"): # print("Already constructed, skipping") # exit() # print(embeddings.shape) # Visualization
2.372501
2
setup-build.py
lights-a5/a-rtfc
0
6624632
#!/usr/bin/python3 import configparser from os import walk class Setup_preparer: translate_dict = {} def __init__(self, translate_dict_filepath): config = configparser.ConfigParser() config.read(translate_dict_filepath) for key in config["build_config"]: self.translate_dict[key] = config["build_config"][key] def setup_file(self, file_path): read_file = '' with open(file_path) as file: read_file = file.read() for key in self.translate_dict: read_file = read_file.replace(key, self.translate_dict[key]) with open(file_path, 'w') as file: file.write(read_file) setup_preparer = Setup_preparer("./setup_vars.ini") folders = ['api', 'client', 'config', 'mobile', 'overlay'] for folder in folders: for (dirpath, dirnames, filenames) in walk("./{}".format(folder)): if "node_modules" not in dirpath and "dist" not in dirpath: for filename in filenames: if '.js' in filename or '.vue' in filename: setup_preparer.setup_file(dirpath + "/{}".format(filename))
#!/usr/bin/python3 import configparser from os import walk class Setup_preparer: translate_dict = {} def __init__(self, translate_dict_filepath): config = configparser.ConfigParser() config.read(translate_dict_filepath) for key in config["build_config"]: self.translate_dict[key] = config["build_config"][key] def setup_file(self, file_path): read_file = '' with open(file_path) as file: read_file = file.read() for key in self.translate_dict: read_file = read_file.replace(key, self.translate_dict[key]) with open(file_path, 'w') as file: file.write(read_file) setup_preparer = Setup_preparer("./setup_vars.ini") folders = ['api', 'client', 'config', 'mobile', 'overlay'] for folder in folders: for (dirpath, dirnames, filenames) in walk("./{}".format(folder)): if "node_modules" not in dirpath and "dist" not in dirpath: for filename in filenames: if '.js' in filename or '.vue' in filename: setup_preparer.setup_file(dirpath + "/{}".format(filename))
fr
0.386793
#!/usr/bin/python3
2.369756
2
backend/accounts/api.py
aibek79/Django-React-knboard
665
6624633
<reponame>aibek79/Django-React-knboard import uuid import shortuuid from dj_rest_auth.registration.views import RegisterView from django.conf import settings from django.contrib.auth import get_user_model from rest_framework import filters from rest_framework import mixins from rest_framework.decorators import action from rest_framework.exceptions import PermissionDenied from rest_framework.generics import ListAPIView from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.status import HTTP_400_BAD_REQUEST from rest_framework.views import APIView from rest_framework.viewsets import GenericViewSet from rest_framework.viewsets import ReadOnlyModelViewSet from accounts.models import Avatar from boards.demo import create_demo_board, get_random_avatar from boards.models import Board from .permissions import IsSelf from .serializers import ( AvatarSerializer, UserSerializer, UserDetailSerializer, UserSearchSerializer, ) User = get_user_model() class ExcludeBoardMembersFilter(filters.BaseFilterBackend): """ Filter that only shows members that are not a member of board. """ result_limit = 8 filter_param = "board" def filter_queryset(self, request, queryset, view): board_id = request.query_params.get(self.filter_param) try: board = Board.objects.get(id=board_id) except (Board.DoesNotExist, ValueError): return queryset return queryset.exclude(id__in=board.members.all())[: self.result_limit] class UserViewSet( mixins.RetrieveModelMixin, mixins.UpdateModelMixin, GenericViewSet, ): serializer_class = UserSerializer queryset = User.objects.all() permission_classes = [IsAuthenticated, IsSelf] def get_serializer_class(self): if self.action == "retrieve" or self.action == "update": return UserDetailSerializer return super().get_serializer_class() @action(detail=True, methods=["post"]) def update_avatar(self, request, pk): avatar_id = request.data.get("id") avatar = Avatar.objects.get(id=avatar_id) user = self.get_object() user.avatar = avatar user.save() return Response(AvatarSerializer(instance=avatar).data) class UserSearchView(ListAPIView): queryset = User.objects.filter(is_active=True).all() serializer_class = UserSearchSerializer filter_backends = [filters.SearchFilter, ExcludeBoardMembersFilter] permission_classes = [IsAuthenticated] search_fields = ["username"] def get(self, request, *args, **kwargs): """ Don't want to make scraping users too easy. So there are some limits for this search endpoint. 1) The search must have at least 3 characters 2) A fixed low amount of users are returned at most """ params = request.query_params board_id = params.get("board", "") search = params.get("search", "") if not board_id.isdigit() or not Board.objects.filter(id=board_id).exists(): return Response(status=HTTP_400_BAD_REQUEST) if len(search) < 3: return Response([]) return super().get(request, *args, **kwargs) class AvatarViewSet(ReadOnlyModelViewSet): serializer_class = AvatarSerializer queryset = Avatar.objects.all() permission_classes = [IsAuthenticated] class AuthSetup(APIView): def get(self, request, *args, **kwargs): return Response({"ALLOW_GUEST_ACCESS": settings.ALLOW_GUEST_ACCESS}) class GuestRegistration(RegisterView): def create(self, request, *args, **kwargs): if not settings.ALLOW_GUEST_ACCESS: raise PermissionDenied password = str(<PASSWORD>()) guest_id = str(shortuuid.uuid())[:10] request.data.update( { "username": f"Guest-{guest_id}", "email": f"{<EMAIL>", "password1": password, "password2": password, } ) return super().create(request, *args, **kwargs) def perform_create(self, serializer): user = super().perform_create(serializer) user.is_guest = True user.avatar = get_random_avatar() user.save() create_demo_board(user) return user
import uuid import shortuuid from dj_rest_auth.registration.views import RegisterView from django.conf import settings from django.contrib.auth import get_user_model from rest_framework import filters from rest_framework import mixins from rest_framework.decorators import action from rest_framework.exceptions import PermissionDenied from rest_framework.generics import ListAPIView from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.status import HTTP_400_BAD_REQUEST from rest_framework.views import APIView from rest_framework.viewsets import GenericViewSet from rest_framework.viewsets import ReadOnlyModelViewSet from accounts.models import Avatar from boards.demo import create_demo_board, get_random_avatar from boards.models import Board from .permissions import IsSelf from .serializers import ( AvatarSerializer, UserSerializer, UserDetailSerializer, UserSearchSerializer, ) User = get_user_model() class ExcludeBoardMembersFilter(filters.BaseFilterBackend): """ Filter that only shows members that are not a member of board. """ result_limit = 8 filter_param = "board" def filter_queryset(self, request, queryset, view): board_id = request.query_params.get(self.filter_param) try: board = Board.objects.get(id=board_id) except (Board.DoesNotExist, ValueError): return queryset return queryset.exclude(id__in=board.members.all())[: self.result_limit] class UserViewSet( mixins.RetrieveModelMixin, mixins.UpdateModelMixin, GenericViewSet, ): serializer_class = UserSerializer queryset = User.objects.all() permission_classes = [IsAuthenticated, IsSelf] def get_serializer_class(self): if self.action == "retrieve" or self.action == "update": return UserDetailSerializer return super().get_serializer_class() @action(detail=True, methods=["post"]) def update_avatar(self, request, pk): avatar_id = request.data.get("id") avatar = Avatar.objects.get(id=avatar_id) user = self.get_object() user.avatar = avatar user.save() return Response(AvatarSerializer(instance=avatar).data) class UserSearchView(ListAPIView): queryset = User.objects.filter(is_active=True).all() serializer_class = UserSearchSerializer filter_backends = [filters.SearchFilter, ExcludeBoardMembersFilter] permission_classes = [IsAuthenticated] search_fields = ["username"] def get(self, request, *args, **kwargs): """ Don't want to make scraping users too easy. So there are some limits for this search endpoint. 1) The search must have at least 3 characters 2) A fixed low amount of users are returned at most """ params = request.query_params board_id = params.get("board", "") search = params.get("search", "") if not board_id.isdigit() or not Board.objects.filter(id=board_id).exists(): return Response(status=HTTP_400_BAD_REQUEST) if len(search) < 3: return Response([]) return super().get(request, *args, **kwargs) class AvatarViewSet(ReadOnlyModelViewSet): serializer_class = AvatarSerializer queryset = Avatar.objects.all() permission_classes = [IsAuthenticated] class AuthSetup(APIView): def get(self, request, *args, **kwargs): return Response({"ALLOW_GUEST_ACCESS": settings.ALLOW_GUEST_ACCESS}) class GuestRegistration(RegisterView): def create(self, request, *args, **kwargs): if not settings.ALLOW_GUEST_ACCESS: raise PermissionDenied password = str(<PASSWORD>()) guest_id = str(shortuuid.uuid())[:10] request.data.update( { "username": f"Guest-{guest_id}", "email": f"{<EMAIL>", "password1": password, "password2": password, } ) return super().create(request, *args, **kwargs) def perform_create(self, serializer): user = super().perform_create(serializer) user.is_guest = True user.avatar = get_random_avatar() user.save() create_demo_board(user) return user
en
0.973785
Filter that only shows members that are not a member of board. Don't want to make scraping users too easy. So there are some limits for this search endpoint. 1) The search must have at least 3 characters 2) A fixed low amount of users are returned at most
2.034262
2
server/app/util/data_locator.py
hy395/cellxgene
0
6624634
<filename>server/app/util/data_locator.py<gh_stars>0 import os import tempfile import fsspec from datetime import datetime class DataLocator(): """ DataLocator is a simple wrapper around fsspec functionality, and provides a set of functions to encapsulate a data location (URI or path), interogate metadata about the object at that location (size, existance, etc) and access the underlying data. https://filesystem-spec.readthedocs.io/en/latest/index.html Example: dl = DataLocator("/tmp/foo.h5ad") if dl.exists(): print(dl.size()) with dl.open() as f: thecontents = f.read() DataLocator will accept a URI or native path. Error handling is as defined in fsspec. """ def __init__(self, uri_or_path): self.uri_or_path = uri_or_path self.protocol, self.path = DataLocator._get_protocol_and_path(uri_or_path) # work-around for LocalFileSystem not treating file: and None as the same scheme/protocol self.cname = self.path if self.protocol == 'file' else self.uri_or_path # will throw RuntimeError if the protocol is unsupported self.fs = fsspec.filesystem(self.protocol) @staticmethod def _get_protocol_and_path(uri_or_path): if "://" in uri_or_path: protocol, path = uri_or_path.split("://", 1) # windows!!! Ignore single letter drive identifiers, # eg, G:\foo.txt if len(protocol) > 1: return protocol, path return None, uri_or_path def exists(self): return self.fs.exists(self.cname) def size(self): return self.fs.size(self.cname) def lastmodtime(self): """ return datetime object representing last modification time, or None if unavailable """ info = self.fs.info(self.cname) if self.islocal() and info is not None: return datetime.fromtimestamp(info['mtime']) else: return getattr(info, 'LastModified', None) def abspath(self): """ return the absolute path for the locator - only really does something for file: protocol, as all others are already absolute """ if self.islocal(): return os.path.abspath(self.path) else: return self.uri_or_path def isfile(self): return self.fs.isfile(self.cname) def open(self, *args): return self.fs.open(self.uri_or_path, *args) def islocal(self): return self.protocol is None or self.protocol == 'file' def local_handle(self): if self.islocal(): return LocalFilePath(self.path) # if not local, create a tmp file system object to contain the data, # and clean it up when done. with self.open() as src, tempfile.NamedTemporaryFile(prefix="cellxgene_", delete=False) as tmp: tmp.write(src.read()) tmp.close() src.close() tmp_path = tmp.name return LocalFilePath(tmp_path, delete=True) class LocalFilePath(): def __init__(self, tmp_path, delete=False): self.tmp_path = tmp_path self.delete = delete def __enter__(self): return self.tmp_path def __exit__(self, *args): if self.delete: os.unlink(self.tmp_path)
<filename>server/app/util/data_locator.py<gh_stars>0 import os import tempfile import fsspec from datetime import datetime class DataLocator(): """ DataLocator is a simple wrapper around fsspec functionality, and provides a set of functions to encapsulate a data location (URI or path), interogate metadata about the object at that location (size, existance, etc) and access the underlying data. https://filesystem-spec.readthedocs.io/en/latest/index.html Example: dl = DataLocator("/tmp/foo.h5ad") if dl.exists(): print(dl.size()) with dl.open() as f: thecontents = f.read() DataLocator will accept a URI or native path. Error handling is as defined in fsspec. """ def __init__(self, uri_or_path): self.uri_or_path = uri_or_path self.protocol, self.path = DataLocator._get_protocol_and_path(uri_or_path) # work-around for LocalFileSystem not treating file: and None as the same scheme/protocol self.cname = self.path if self.protocol == 'file' else self.uri_or_path # will throw RuntimeError if the protocol is unsupported self.fs = fsspec.filesystem(self.protocol) @staticmethod def _get_protocol_and_path(uri_or_path): if "://" in uri_or_path: protocol, path = uri_or_path.split("://", 1) # windows!!! Ignore single letter drive identifiers, # eg, G:\foo.txt if len(protocol) > 1: return protocol, path return None, uri_or_path def exists(self): return self.fs.exists(self.cname) def size(self): return self.fs.size(self.cname) def lastmodtime(self): """ return datetime object representing last modification time, or None if unavailable """ info = self.fs.info(self.cname) if self.islocal() and info is not None: return datetime.fromtimestamp(info['mtime']) else: return getattr(info, 'LastModified', None) def abspath(self): """ return the absolute path for the locator - only really does something for file: protocol, as all others are already absolute """ if self.islocal(): return os.path.abspath(self.path) else: return self.uri_or_path def isfile(self): return self.fs.isfile(self.cname) def open(self, *args): return self.fs.open(self.uri_or_path, *args) def islocal(self): return self.protocol is None or self.protocol == 'file' def local_handle(self): if self.islocal(): return LocalFilePath(self.path) # if not local, create a tmp file system object to contain the data, # and clean it up when done. with self.open() as src, tempfile.NamedTemporaryFile(prefix="cellxgene_", delete=False) as tmp: tmp.write(src.read()) tmp.close() src.close() tmp_path = tmp.name return LocalFilePath(tmp_path, delete=True) class LocalFilePath(): def __init__(self, tmp_path, delete=False): self.tmp_path = tmp_path self.delete = delete def __enter__(self): return self.tmp_path def __exit__(self, *args): if self.delete: os.unlink(self.tmp_path)
en
0.78462
DataLocator is a simple wrapper around fsspec functionality, and provides a set of functions to encapsulate a data location (URI or path), interogate metadata about the object at that location (size, existance, etc) and access the underlying data. https://filesystem-spec.readthedocs.io/en/latest/index.html Example: dl = DataLocator("/tmp/foo.h5ad") if dl.exists(): print(dl.size()) with dl.open() as f: thecontents = f.read() DataLocator will accept a URI or native path. Error handling is as defined in fsspec. # work-around for LocalFileSystem not treating file: and None as the same scheme/protocol # will throw RuntimeError if the protocol is unsupported # windows!!! Ignore single letter drive identifiers, # eg, G:\foo.txt return datetime object representing last modification time, or None if unavailable return the absolute path for the locator - only really does something for file: protocol, as all others are already absolute # if not local, create a tmp file system object to contain the data, # and clean it up when done.
3.329668
3
stumpy/aamp_motifs.py
TDAmeritrade/stumpy
2,296
6624635
<reponame>TDAmeritrade/stumpy<gh_stars>1000+ # STUMPY # Copyright 2019 TD Ameritrade. Released under the terms of the 3-Clause BSD license. # STUMPY is a trademark of TD Ameritrade IP Company, Inc. All rights reserved. import logging import numpy as np from . import core, config logger = logging.getLogger(__name__) def _aamp_motifs( T, P, T_subseq_isfinite, T_squared, excl_zone, min_neighbors, max_distance, cutoff, max_matches, max_motifs, ): """ Find the top non-normalized motifs (i.e., without z-normalization) for time series `T`. A subsequence, `Q`, becomes a candidate motif if there are at least `min_neighbor` number of other subsequence matches in `T` (outside the exclusion zone) with a distance less or equal to `max_distance`. Parameters ---------- T : numpy.ndarray The time series or sequence P : numpy.ndarray Matrix Profile of `T` T_subseq_isfinite : numpy.ndarray A boolean array that indicates whether a subsequence in `T` contains a `np.nan`/`np.inf` value (False) T_squared : numpy.ndarray Squared time series or sequence excl_zone : int Size of the exclusion zone min_neighbors : int The minimum number of similar matches a subsequence needs to have in order to be considered a motif. max_distance : float or function For a candidate motif, `Q`, and a non-trivial subsequence, `S`, `max_distance` is the maximum distance allowed between `Q` and `S` so that `S` is considered a match of `Q`. If `max_distance` is a function, then it must be a function that accepts a single parameter, `D`, in its function signature, which is the distance profile between `Q` and `T`. cutoff : float The largest matrix profile value (distance) that a candidate motif is allowed to have. max_matches : int The maximum number of similar matches to be returned. The resulting matches are sorted by distance (starting with the most similar). Note that the first match is always the self-match/trivial-match for each motif. max_motifs : int The maximum number of motifs to return. Return ------ motif_distances : numpy.ndarray The distances corresponding to a set of subsequence matches for each motif. Note that the first column always corresponds to the distance for the self-match/trivial-match for each motif. motif_indices : numpy.ndarray The indices corresponding to a set of subsequences matches for each motif Note that the first column always corresponds to the index for the self-match/trivial-match for each motif """ n = T.shape[1] l = P.shape[1] m = n - l + 1 motif_indices = [] motif_distances = [] candidate_idx = np.argmin(P[-1]) while len(motif_indices) < max_motifs: profile_value = P[-1, candidate_idx] if profile_value > cutoff: # pragma: no cover break # If max_distance is a constant (independent of the distance profile D of Q # and T), then we can stop the iteration if the matrix profile value of Q is # larger than the maximum distance. if ( isinstance(max_distance, float) and profile_value > max_distance ): # pragma: no cover break Q = T[:, candidate_idx : candidate_idx + m] query_matches = aamp_match( Q, T, max_matches=None, max_distance=max_distance, ) if len(query_matches) > min_neighbors: motif_distances.append(query_matches[:max_matches, 0]) motif_indices.append(query_matches[:max_matches, 1]) for idx in query_matches[:, 1]: core.apply_exclusion_zone(P, int(idx), excl_zone) candidate_idx = np.argmin(P[-1]) motif_distances = core._jagged_list_to_array( motif_distances, fill_value=np.nan, dtype="float64" ) motif_indices = core._jagged_list_to_array( motif_indices, fill_value=-1, dtype="int64" ) return motif_distances, motif_indices def aamp_motifs( T, P, min_neighbors=1, max_distance=None, cutoff=None, max_matches=10, max_motifs=1, ): """ Discover the top non-normalized motifs (i.e., without z-normalization) for time series `T`. A subsequence, `Q`, becomes a candidate motif if there are at least `min_neighbor` number of other subsequence matches in `T` (outside the exclusion zone) with a distance less or equal to `max_distance`. Parameters ---------- T : numpy.ndarray The time series or sequence P : numpy.ndarray Matrix Profile of `T` min_neighbors : int, default 1 The minimum number of similar matches a subsequence needs to have in order to be considered a motif. This defaults to `1`, which means that a subsequence must have at least one similar match in order to be considered a motif. max_distance : float or function, default None For a candidate motif, `Q`, and a non-trivial subsequence, `S`, `max_distance` is the maximum distance allowed between `Q` and `S` so that `S` is considered a match of `Q`. If `max_distance` is a function, then it must be a function that accepts a single parameter, `D`, in its function signature, which is the distance profile between `Q` and `T`. If None, this defaults to `np.nanmax([np.nanmean(D) - 2.0 * np.nanstd(D), np.nanmin(D)])`. cutoff : float, default None The largest matrix profile value (distance) that a candidate motif is allowed to have. If `None`, this defaults to `np.nanmax([np.nanmean(P) - 2.0 * np.nanstd(P), np.nanmin(P)])` max_matches : int, default 10 The maximum amount of similar matches of a motif representative to be returned. The resulting matches are sorted by distance, so a value of `10` means that the indices of the most similar `10` subsequences is returned. If `None`, all matches within `max_distance` of the motif representative will be returned. Note that the first match is always the self-match/trivial-match for each motif. max_motifs : int, default 1 The maximum number of motifs to return. Return ------ motif_distances : numpy.ndarray The distances corresponding to a set of subsequence matches for each motif. Note that the first column always corresponds to the distance for the self-match/trivial-match for each motif. motif_indices : numpy.ndarray The indices corresponding to a set of subsequences matches for each motif Note that the first column always corresponds to the index for the self-match/trivial-match for each motif. """ if max_motifs < 1: # pragma: no cover logger.warn( "The maximum number of motifs, `max_motifs`, " "must be greater than or equal to 1" ) logger.warn("`max_motifs` has been set to `1`") max_motifs = 1 if T.ndim != 1: # pragma: no cover raise ValueError( f"T is {T.ndim}-dimensional and must be 1-dimensional. " "Multidimensional motif discovery is not yet supported." ) if P.ndim != 1: # pragma: no cover raise ValueError( f"T is {P.ndim}-dimensional and must be 1-dimensional. " "Multidimensional motif discovery is not yet supported." ) m = T.shape[-1] - P.shape[-1] + 1 excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM)) if max_matches is None: # pragma: no cover max_matches = np.inf if cutoff is None: # pragma: no cover P_copy = P.copy().astype(np.float64) P_copy[np.isinf(P_copy)] = np.nan cutoff = np.nanmax( [np.nanmean(P_copy) - 2.0 * np.nanstd(P_copy), np.nanmin(P_copy)] ) T, T_subseq_isfinite = core.preprocess_non_normalized(T[np.newaxis, :], m) T_squared = np.sum(core.rolling_window(T * T, m), axis=-1) P = P[np.newaxis, :].astype(np.float64) motif_distances, motif_indices = _aamp_motifs( T, P, T_subseq_isfinite, T_squared, excl_zone, min_neighbors, max_distance, cutoff, max_matches, max_motifs, ) return motif_distances, motif_indices def aamp_match( Q, T, T_subseq_isfinite=None, T_squared=None, max_distance=None, max_matches=None, ): """ Find all matches of a query `Q` in a time series `T`, i.e. the indices of subsequences whose distances to `Q` are less or equal to `max_distance`, sorted by distance (lowest to highest). Around each occurrence an exclusion zone is applied before searching for the next. Parameters ---------- Q : numpy.ndarray The query sequence. It doesn't have to be a subsequence of `T` T : numpy.ndarray The time series of interest max_distance : float or function, default None Maximum distance between `Q` and a subsequence `S` for `S` to be considered a match. If a function, then it has to be a function of one argument `D`, which will be the distance profile of `Q` with `T` (a 1D numpy array of size `n-m+1`). If None, defaults to `np.nanmax([np.nanmean(D) - 2 * np.nanstd(D), np.nanmin(D)])` (i.e. at least the closest match will be returned). max_matches : int, default None The maximum amount of similar occurrences to be returned. The resulting occurrences are sorted by distance, so a value of `10` means that the indices of the most similar `10` subsequences is returned. If `None`, then all occurrences are returned. Returns ------- out : numpy.ndarray The first column consists of distances of subsequences of `T` whose distances to `Q` are smaller than `max_distance`, sorted by distance (lowest to highest). The second column consists of the corresponding indices in `T`. """ if len(Q.shape) == 1: Q = Q[np.newaxis, :] if len(T.shape) == 1: T = T[np.newaxis, :] d, n = T.shape m = Q.shape[1] excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM)) if max_matches is None: # pragma: no cover max_matches = np.inf if np.any(np.isnan(Q)) or np.any(np.isinf(Q)): # pragma: no cover raise ValueError("Q contains illegal values (NaN or inf)") if max_distance is None: # pragma: no cover def max_distance(D): D_copy = D.copy().astype(np.float64) D_copy[np.isinf(D_copy)] = np.nan return np.nanmax( [np.nanmean(D_copy) - 2.0 * np.nanstd(D_copy), np.nanmin(D_copy)] ) if T_subseq_isfinite is None or T_squared is None: T, T_subseq_isfinite = core.preprocess_non_normalized(T, m) T_squared = np.sum(core.rolling_window(T * T, m), axis=-1) D = [ core.mass_absolute(Q[i], T[i], T_subseq_isfinite[i], T_squared[i]) for i in range(d) ] D = np.sum(D, axis=0) / d if not isinstance(max_distance, float): max_distance = max_distance(D) matches = [] candidate_idx = np.argmin(D) while D[candidate_idx] <= max_distance and len(matches) < max_matches: matches.append([D[candidate_idx], candidate_idx]) core.apply_exclusion_zone(D, candidate_idx, excl_zone) candidate_idx = np.argmin(D) return np.array(matches, dtype=object)
# STUMPY # Copyright 2019 TD Ameritrade. Released under the terms of the 3-Clause BSD license. # STUMPY is a trademark of TD Ameritrade IP Company, Inc. All rights reserved. import logging import numpy as np from . import core, config logger = logging.getLogger(__name__) def _aamp_motifs( T, P, T_subseq_isfinite, T_squared, excl_zone, min_neighbors, max_distance, cutoff, max_matches, max_motifs, ): """ Find the top non-normalized motifs (i.e., without z-normalization) for time series `T`. A subsequence, `Q`, becomes a candidate motif if there are at least `min_neighbor` number of other subsequence matches in `T` (outside the exclusion zone) with a distance less or equal to `max_distance`. Parameters ---------- T : numpy.ndarray The time series or sequence P : numpy.ndarray Matrix Profile of `T` T_subseq_isfinite : numpy.ndarray A boolean array that indicates whether a subsequence in `T` contains a `np.nan`/`np.inf` value (False) T_squared : numpy.ndarray Squared time series or sequence excl_zone : int Size of the exclusion zone min_neighbors : int The minimum number of similar matches a subsequence needs to have in order to be considered a motif. max_distance : float or function For a candidate motif, `Q`, and a non-trivial subsequence, `S`, `max_distance` is the maximum distance allowed between `Q` and `S` so that `S` is considered a match of `Q`. If `max_distance` is a function, then it must be a function that accepts a single parameter, `D`, in its function signature, which is the distance profile between `Q` and `T`. cutoff : float The largest matrix profile value (distance) that a candidate motif is allowed to have. max_matches : int The maximum number of similar matches to be returned. The resulting matches are sorted by distance (starting with the most similar). Note that the first match is always the self-match/trivial-match for each motif. max_motifs : int The maximum number of motifs to return. Return ------ motif_distances : numpy.ndarray The distances corresponding to a set of subsequence matches for each motif. Note that the first column always corresponds to the distance for the self-match/trivial-match for each motif. motif_indices : numpy.ndarray The indices corresponding to a set of subsequences matches for each motif Note that the first column always corresponds to the index for the self-match/trivial-match for each motif """ n = T.shape[1] l = P.shape[1] m = n - l + 1 motif_indices = [] motif_distances = [] candidate_idx = np.argmin(P[-1]) while len(motif_indices) < max_motifs: profile_value = P[-1, candidate_idx] if profile_value > cutoff: # pragma: no cover break # If max_distance is a constant (independent of the distance profile D of Q # and T), then we can stop the iteration if the matrix profile value of Q is # larger than the maximum distance. if ( isinstance(max_distance, float) and profile_value > max_distance ): # pragma: no cover break Q = T[:, candidate_idx : candidate_idx + m] query_matches = aamp_match( Q, T, max_matches=None, max_distance=max_distance, ) if len(query_matches) > min_neighbors: motif_distances.append(query_matches[:max_matches, 0]) motif_indices.append(query_matches[:max_matches, 1]) for idx in query_matches[:, 1]: core.apply_exclusion_zone(P, int(idx), excl_zone) candidate_idx = np.argmin(P[-1]) motif_distances = core._jagged_list_to_array( motif_distances, fill_value=np.nan, dtype="float64" ) motif_indices = core._jagged_list_to_array( motif_indices, fill_value=-1, dtype="int64" ) return motif_distances, motif_indices def aamp_motifs( T, P, min_neighbors=1, max_distance=None, cutoff=None, max_matches=10, max_motifs=1, ): """ Discover the top non-normalized motifs (i.e., without z-normalization) for time series `T`. A subsequence, `Q`, becomes a candidate motif if there are at least `min_neighbor` number of other subsequence matches in `T` (outside the exclusion zone) with a distance less or equal to `max_distance`. Parameters ---------- T : numpy.ndarray The time series or sequence P : numpy.ndarray Matrix Profile of `T` min_neighbors : int, default 1 The minimum number of similar matches a subsequence needs to have in order to be considered a motif. This defaults to `1`, which means that a subsequence must have at least one similar match in order to be considered a motif. max_distance : float or function, default None For a candidate motif, `Q`, and a non-trivial subsequence, `S`, `max_distance` is the maximum distance allowed between `Q` and `S` so that `S` is considered a match of `Q`. If `max_distance` is a function, then it must be a function that accepts a single parameter, `D`, in its function signature, which is the distance profile between `Q` and `T`. If None, this defaults to `np.nanmax([np.nanmean(D) - 2.0 * np.nanstd(D), np.nanmin(D)])`. cutoff : float, default None The largest matrix profile value (distance) that a candidate motif is allowed to have. If `None`, this defaults to `np.nanmax([np.nanmean(P) - 2.0 * np.nanstd(P), np.nanmin(P)])` max_matches : int, default 10 The maximum amount of similar matches of a motif representative to be returned. The resulting matches are sorted by distance, so a value of `10` means that the indices of the most similar `10` subsequences is returned. If `None`, all matches within `max_distance` of the motif representative will be returned. Note that the first match is always the self-match/trivial-match for each motif. max_motifs : int, default 1 The maximum number of motifs to return. Return ------ motif_distances : numpy.ndarray The distances corresponding to a set of subsequence matches for each motif. Note that the first column always corresponds to the distance for the self-match/trivial-match for each motif. motif_indices : numpy.ndarray The indices corresponding to a set of subsequences matches for each motif Note that the first column always corresponds to the index for the self-match/trivial-match for each motif. """ if max_motifs < 1: # pragma: no cover logger.warn( "The maximum number of motifs, `max_motifs`, " "must be greater than or equal to 1" ) logger.warn("`max_motifs` has been set to `1`") max_motifs = 1 if T.ndim != 1: # pragma: no cover raise ValueError( f"T is {T.ndim}-dimensional and must be 1-dimensional. " "Multidimensional motif discovery is not yet supported." ) if P.ndim != 1: # pragma: no cover raise ValueError( f"T is {P.ndim}-dimensional and must be 1-dimensional. " "Multidimensional motif discovery is not yet supported." ) m = T.shape[-1] - P.shape[-1] + 1 excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM)) if max_matches is None: # pragma: no cover max_matches = np.inf if cutoff is None: # pragma: no cover P_copy = P.copy().astype(np.float64) P_copy[np.isinf(P_copy)] = np.nan cutoff = np.nanmax( [np.nanmean(P_copy) - 2.0 * np.nanstd(P_copy), np.nanmin(P_copy)] ) T, T_subseq_isfinite = core.preprocess_non_normalized(T[np.newaxis, :], m) T_squared = np.sum(core.rolling_window(T * T, m), axis=-1) P = P[np.newaxis, :].astype(np.float64) motif_distances, motif_indices = _aamp_motifs( T, P, T_subseq_isfinite, T_squared, excl_zone, min_neighbors, max_distance, cutoff, max_matches, max_motifs, ) return motif_distances, motif_indices def aamp_match( Q, T, T_subseq_isfinite=None, T_squared=None, max_distance=None, max_matches=None, ): """ Find all matches of a query `Q` in a time series `T`, i.e. the indices of subsequences whose distances to `Q` are less or equal to `max_distance`, sorted by distance (lowest to highest). Around each occurrence an exclusion zone is applied before searching for the next. Parameters ---------- Q : numpy.ndarray The query sequence. It doesn't have to be a subsequence of `T` T : numpy.ndarray The time series of interest max_distance : float or function, default None Maximum distance between `Q` and a subsequence `S` for `S` to be considered a match. If a function, then it has to be a function of one argument `D`, which will be the distance profile of `Q` with `T` (a 1D numpy array of size `n-m+1`). If None, defaults to `np.nanmax([np.nanmean(D) - 2 * np.nanstd(D), np.nanmin(D)])` (i.e. at least the closest match will be returned). max_matches : int, default None The maximum amount of similar occurrences to be returned. The resulting occurrences are sorted by distance, so a value of `10` means that the indices of the most similar `10` subsequences is returned. If `None`, then all occurrences are returned. Returns ------- out : numpy.ndarray The first column consists of distances of subsequences of `T` whose distances to `Q` are smaller than `max_distance`, sorted by distance (lowest to highest). The second column consists of the corresponding indices in `T`. """ if len(Q.shape) == 1: Q = Q[np.newaxis, :] if len(T.shape) == 1: T = T[np.newaxis, :] d, n = T.shape m = Q.shape[1] excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM)) if max_matches is None: # pragma: no cover max_matches = np.inf if np.any(np.isnan(Q)) or np.any(np.isinf(Q)): # pragma: no cover raise ValueError("Q contains illegal values (NaN or inf)") if max_distance is None: # pragma: no cover def max_distance(D): D_copy = D.copy().astype(np.float64) D_copy[np.isinf(D_copy)] = np.nan return np.nanmax( [np.nanmean(D_copy) - 2.0 * np.nanstd(D_copy), np.nanmin(D_copy)] ) if T_subseq_isfinite is None or T_squared is None: T, T_subseq_isfinite = core.preprocess_non_normalized(T, m) T_squared = np.sum(core.rolling_window(T * T, m), axis=-1) D = [ core.mass_absolute(Q[i], T[i], T_subseq_isfinite[i], T_squared[i]) for i in range(d) ] D = np.sum(D, axis=0) / d if not isinstance(max_distance, float): max_distance = max_distance(D) matches = [] candidate_idx = np.argmin(D) while D[candidate_idx] <= max_distance and len(matches) < max_matches: matches.append([D[candidate_idx], candidate_idx]) core.apply_exclusion_zone(D, candidate_idx, excl_zone) candidate_idx = np.argmin(D) return np.array(matches, dtype=object)
en
0.815879
# STUMPY # Copyright 2019 TD Ameritrade. Released under the terms of the 3-Clause BSD license. # STUMPY is a trademark of TD Ameritrade IP Company, Inc. All rights reserved. Find the top non-normalized motifs (i.e., without z-normalization) for time series `T`. A subsequence, `Q`, becomes a candidate motif if there are at least `min_neighbor` number of other subsequence matches in `T` (outside the exclusion zone) with a distance less or equal to `max_distance`. Parameters ---------- T : numpy.ndarray The time series or sequence P : numpy.ndarray Matrix Profile of `T` T_subseq_isfinite : numpy.ndarray A boolean array that indicates whether a subsequence in `T` contains a `np.nan`/`np.inf` value (False) T_squared : numpy.ndarray Squared time series or sequence excl_zone : int Size of the exclusion zone min_neighbors : int The minimum number of similar matches a subsequence needs to have in order to be considered a motif. max_distance : float or function For a candidate motif, `Q`, and a non-trivial subsequence, `S`, `max_distance` is the maximum distance allowed between `Q` and `S` so that `S` is considered a match of `Q`. If `max_distance` is a function, then it must be a function that accepts a single parameter, `D`, in its function signature, which is the distance profile between `Q` and `T`. cutoff : float The largest matrix profile value (distance) that a candidate motif is allowed to have. max_matches : int The maximum number of similar matches to be returned. The resulting matches are sorted by distance (starting with the most similar). Note that the first match is always the self-match/trivial-match for each motif. max_motifs : int The maximum number of motifs to return. Return ------ motif_distances : numpy.ndarray The distances corresponding to a set of subsequence matches for each motif. Note that the first column always corresponds to the distance for the self-match/trivial-match for each motif. motif_indices : numpy.ndarray The indices corresponding to a set of subsequences matches for each motif Note that the first column always corresponds to the index for the self-match/trivial-match for each motif # pragma: no cover # If max_distance is a constant (independent of the distance profile D of Q # and T), then we can stop the iteration if the matrix profile value of Q is # larger than the maximum distance. # pragma: no cover Discover the top non-normalized motifs (i.e., without z-normalization) for time series `T`. A subsequence, `Q`, becomes a candidate motif if there are at least `min_neighbor` number of other subsequence matches in `T` (outside the exclusion zone) with a distance less or equal to `max_distance`. Parameters ---------- T : numpy.ndarray The time series or sequence P : numpy.ndarray Matrix Profile of `T` min_neighbors : int, default 1 The minimum number of similar matches a subsequence needs to have in order to be considered a motif. This defaults to `1`, which means that a subsequence must have at least one similar match in order to be considered a motif. max_distance : float or function, default None For a candidate motif, `Q`, and a non-trivial subsequence, `S`, `max_distance` is the maximum distance allowed between `Q` and `S` so that `S` is considered a match of `Q`. If `max_distance` is a function, then it must be a function that accepts a single parameter, `D`, in its function signature, which is the distance profile between `Q` and `T`. If None, this defaults to `np.nanmax([np.nanmean(D) - 2.0 * np.nanstd(D), np.nanmin(D)])`. cutoff : float, default None The largest matrix profile value (distance) that a candidate motif is allowed to have. If `None`, this defaults to `np.nanmax([np.nanmean(P) - 2.0 * np.nanstd(P), np.nanmin(P)])` max_matches : int, default 10 The maximum amount of similar matches of a motif representative to be returned. The resulting matches are sorted by distance, so a value of `10` means that the indices of the most similar `10` subsequences is returned. If `None`, all matches within `max_distance` of the motif representative will be returned. Note that the first match is always the self-match/trivial-match for each motif. max_motifs : int, default 1 The maximum number of motifs to return. Return ------ motif_distances : numpy.ndarray The distances corresponding to a set of subsequence matches for each motif. Note that the first column always corresponds to the distance for the self-match/trivial-match for each motif. motif_indices : numpy.ndarray The indices corresponding to a set of subsequences matches for each motif Note that the first column always corresponds to the index for the self-match/trivial-match for each motif. # pragma: no cover # pragma: no cover # pragma: no cover # pragma: no cover # pragma: no cover Find all matches of a query `Q` in a time series `T`, i.e. the indices of subsequences whose distances to `Q` are less or equal to `max_distance`, sorted by distance (lowest to highest). Around each occurrence an exclusion zone is applied before searching for the next. Parameters ---------- Q : numpy.ndarray The query sequence. It doesn't have to be a subsequence of `T` T : numpy.ndarray The time series of interest max_distance : float or function, default None Maximum distance between `Q` and a subsequence `S` for `S` to be considered a match. If a function, then it has to be a function of one argument `D`, which will be the distance profile of `Q` with `T` (a 1D numpy array of size `n-m+1`). If None, defaults to `np.nanmax([np.nanmean(D) - 2 * np.nanstd(D), np.nanmin(D)])` (i.e. at least the closest match will be returned). max_matches : int, default None The maximum amount of similar occurrences to be returned. The resulting occurrences are sorted by distance, so a value of `10` means that the indices of the most similar `10` subsequences is returned. If `None`, then all occurrences are returned. Returns ------- out : numpy.ndarray The first column consists of distances of subsequences of `T` whose distances to `Q` are smaller than `max_distance`, sorted by distance (lowest to highest). The second column consists of the corresponding indices in `T`. # pragma: no cover # pragma: no cover # pragma: no cover
2.139571
2
fastapi_mail/msg.py
gogoku/fastapi-mail
3
6624636
import time import sys from email.mime.text import MIMEText from email.mime.base import MIMEBase from email.mime.multipart import MIMEMultipart from email.utils import formatdate, make_msgid from email.encoders import encode_base64 PY3 = sys.version_info[0] == 3 class MailMsg: """ Preaparation of class for email text :param subject: email subject header :param recipients: list of email addresses :param body: plain text message :param html: HTML message :param subtype: type of body parameter - "plain" or "html". Ignored if the html parameter is explicitly specified :param sender: email sender address :param cc: CC list :param bcc: BCC list :param reply_to: Reply-To list :param attachments: list of Attachment instances :param multipart_subtype: MultipartSubtypeEnum instance. Determines the nature of the parts of the message and their relationship to each other according to the MIME standard """ def __init__(self, **entries): self.__dict__.update(entries) self.msgId = make_msgid() def _mimetext(self, text, subtype="plain"): """Creates a MIMEText object""" return MIMEText(text, _subtype=subtype, _charset=self.charset) async def attach_file(self, message, attachment): print(attachment) for file in attachment: part = MIMEBase(_maintype="application", _subtype="octet-stream") part.set_payload(await file.read()) encode_base64(part) filename = file.filename try: filename and filename.encode('ascii') except UnicodeEncodeError: if not PY3: filename = filename.encode('utf8') filename = ('UTF8', '', filename) part.add_header( 'Content-Disposition', "attachment", filename=filename) self.message.attach(part) async def _message(self, sender): """Creates the email message""" self.message = MIMEMultipart(self.multipart_subtype.value) self.message.set_charset(self.charset) self.message['Date'] = formatdate(time.time(), localtime=True) self.message['Message-ID'] = self.msgId self.message["To"] = ', '.join(self.recipients) self.message["From"] = sender if self.subject: self.message["Subject"] = (self.subject) if self.cc: self.message["Cc"] = ', '.join(self.cc) if self.bcc: self.message["Bcc"] = ', '.join(self.bcc) if self.reply_to: self.message["Reply-To"] = ', '.join(self.reply_to) if self.body: if not self.html and self.subtype: self.message.attach(self._mimetext(self.body, self.subtype)) else: self.message.attach(self._mimetext(self.body)) if self.html: self.message.attach(self._mimetext(self.html, "html")) if self.attachments: await self.attach_file(self.message, self.attachments) return self.message async def as_string(self): return await self._message().as_string() def as_bytes(self): return self._message().as_bytes() def __str__(self): return self.as_string() def __bytes__(self): return self.as_bytes()
import time import sys from email.mime.text import MIMEText from email.mime.base import MIMEBase from email.mime.multipart import MIMEMultipart from email.utils import formatdate, make_msgid from email.encoders import encode_base64 PY3 = sys.version_info[0] == 3 class MailMsg: """ Preaparation of class for email text :param subject: email subject header :param recipients: list of email addresses :param body: plain text message :param html: HTML message :param subtype: type of body parameter - "plain" or "html". Ignored if the html parameter is explicitly specified :param sender: email sender address :param cc: CC list :param bcc: BCC list :param reply_to: Reply-To list :param attachments: list of Attachment instances :param multipart_subtype: MultipartSubtypeEnum instance. Determines the nature of the parts of the message and their relationship to each other according to the MIME standard """ def __init__(self, **entries): self.__dict__.update(entries) self.msgId = make_msgid() def _mimetext(self, text, subtype="plain"): """Creates a MIMEText object""" return MIMEText(text, _subtype=subtype, _charset=self.charset) async def attach_file(self, message, attachment): print(attachment) for file in attachment: part = MIMEBase(_maintype="application", _subtype="octet-stream") part.set_payload(await file.read()) encode_base64(part) filename = file.filename try: filename and filename.encode('ascii') except UnicodeEncodeError: if not PY3: filename = filename.encode('utf8') filename = ('UTF8', '', filename) part.add_header( 'Content-Disposition', "attachment", filename=filename) self.message.attach(part) async def _message(self, sender): """Creates the email message""" self.message = MIMEMultipart(self.multipart_subtype.value) self.message.set_charset(self.charset) self.message['Date'] = formatdate(time.time(), localtime=True) self.message['Message-ID'] = self.msgId self.message["To"] = ', '.join(self.recipients) self.message["From"] = sender if self.subject: self.message["Subject"] = (self.subject) if self.cc: self.message["Cc"] = ', '.join(self.cc) if self.bcc: self.message["Bcc"] = ', '.join(self.bcc) if self.reply_to: self.message["Reply-To"] = ', '.join(self.reply_to) if self.body: if not self.html and self.subtype: self.message.attach(self._mimetext(self.body, self.subtype)) else: self.message.attach(self._mimetext(self.body)) if self.html: self.message.attach(self._mimetext(self.html, "html")) if self.attachments: await self.attach_file(self.message, self.attachments) return self.message async def as_string(self): return await self._message().as_string() def as_bytes(self): return self._message().as_bytes() def __str__(self): return self.as_string() def __bytes__(self): return self.as_bytes()
en
0.562406
Preaparation of class for email text :param subject: email subject header :param recipients: list of email addresses :param body: plain text message :param html: HTML message :param subtype: type of body parameter - "plain" or "html". Ignored if the html parameter is explicitly specified :param sender: email sender address :param cc: CC list :param bcc: BCC list :param reply_to: Reply-To list :param attachments: list of Attachment instances :param multipart_subtype: MultipartSubtypeEnum instance. Determines the nature of the parts of the message and their relationship to each other according to the MIME standard Creates a MIMEText object Creates the email message
2.744729
3
tests/postgres_tests/test_json.py
Robin2555/Django_Bib
4
6624637
import datetime import operator import uuid from decimal import Decimal from django.core import checks, exceptions, serializers from django.core.serializers.json import DjangoJSONEncoder from django.db import connection from django.db.models import Count, F, OuterRef, Q, Subquery from django.db.models.expressions import RawSQL from django.db.models.functions import Cast from django.forms import CharField, Form, widgets from django.test.utils import CaptureQueriesContext, isolate_apps from django.utils.html import escape from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import JSONModel, PostgreSQLModel try: from django.contrib.postgres import forms from django.contrib.postgres.fields import JSONField from django.contrib.postgres.fields.jsonb import KeyTextTransform, KeyTransform except ImportError: pass class TestSaveLoad(PostgreSQLTestCase): def test_null(self): instance = JSONModel() instance.save() loaded = JSONModel.objects.get() self.assertIsNone(loaded.field) def test_empty_object(self): instance = JSONModel(field={}) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, {}) def test_empty_list(self): instance = JSONModel(field=[]) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, []) def test_boolean(self): instance = JSONModel(field=True) instance.save() loaded = JSONModel.objects.get() self.assertIs(loaded.field, True) def test_string(self): instance = JSONModel(field='why?') instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, 'why?') def test_number(self): instance = JSONModel(field=1) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, 1) def test_realistic_object(self): obj = { 'a': 'b', 'c': 1, 'd': ['e', {'f': 'g'}], 'h': True, 'i': False, 'j': None, } instance = JSONModel(field=obj) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, obj) def test_custom_encoding(self): """ JSONModel.field_custom has a custom DjangoJSONEncoder. """ some_uuid = uuid.uuid4() obj_before = { 'date': datetime.date(2016, 8, 12), 'datetime': datetime.datetime(2016, 8, 12, 13, 44, 47, 575981), 'decimal': Decimal('10.54'), 'uuid': some_uuid, } obj_after = { 'date': '2016-08-12', 'datetime': '2016-08-12T13:44:47.575', 'decimal': '10.54', 'uuid': str(some_uuid), } JSONModel.objects.create(field_custom=obj_before) loaded = JSONModel.objects.get() self.assertEqual(loaded.field_custom, obj_after) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = JSONModel.objects.bulk_create([ JSONModel(field=None), JSONModel(field=True), JSONModel(field=False), JSONModel(field='yes'), JSONModel(field=7), JSONModel(field=[]), JSONModel(field={}), JSONModel(field={ 'a': 'b', 'c': 1, }), JSONModel(field={ 'a': 'b', 'c': 1, 'd': ['e', {'f': 'g'}], 'h': True, 'i': False, 'j': None, 'k': {'l': 'm'}, }), JSONModel(field=[1, [2]]), JSONModel(field={ 'k': True, 'l': False, }), JSONModel(field={ 'foo': 'bar', 'baz': {'a': 'b', 'c': 'd'}, 'bar': ['foo', 'bar'], 'bax': {'foo': 'bar'}, }), ]) def test_exact(self): self.assertSequenceEqual( JSONModel.objects.filter(field__exact={}), [self.objs[6]] ) def test_exact_complex(self): self.assertSequenceEqual( JSONModel.objects.filter(field__exact={'a': 'b', 'c': 1}), [self.objs[7]] ) def test_isnull(self): self.assertSequenceEqual( JSONModel.objects.filter(field__isnull=True), [self.objs[0]] ) def test_ordering_by_transform(self): objs = [ JSONModel.objects.create(field={'ord': 93, 'name': 'bar'}), JSONModel.objects.create(field={'ord': 22.1, 'name': 'foo'}), JSONModel.objects.create(field={'ord': -1, 'name': 'baz'}), JSONModel.objects.create(field={'ord': 21.931902, 'name': 'spam'}), JSONModel.objects.create(field={'ord': -100291029, 'name': 'eggs'}), ] query = JSONModel.objects.filter(field__name__isnull=False).order_by('field__ord') self.assertSequenceEqual(query, [objs[4], objs[2], objs[3], objs[1], objs[0]]) def test_ordering_grouping_by_key_transform(self): base_qs = JSONModel.objects.filter(field__d__0__isnull=False) for qs in ( base_qs.order_by('field__d__0'), base_qs.annotate(key=KeyTransform('0', KeyTransform('d', 'field'))).order_by('key'), ): self.assertSequenceEqual(qs, [self.objs[8]]) qs = JSONModel.objects.filter(field__isnull=False) self.assertQuerysetEqual( qs.values('field__d__0').annotate(count=Count('field__d__0')).order_by('count'), [1, 10], operator.itemgetter('count'), ) self.assertQuerysetEqual( qs.filter(field__isnull=False).annotate( key=KeyTextTransform('f', KeyTransform('1', KeyTransform('d', 'field'))), ).values('key').annotate(count=Count('key')).order_by('count'), [(None, 0), ('g', 1)], operator.itemgetter('key', 'count'), ) def test_key_transform_raw_expression(self): expr = RawSQL('%s::jsonb', ['{"x": "bar"}']) self.assertSequenceEqual( JSONModel.objects.filter(field__foo=KeyTransform('x', expr)), [self.objs[-1]], ) def test_key_transform_expression(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__0__isnull=False).annotate( key=KeyTransform('d', 'field'), ).annotate( chain=KeyTransform('0', 'key'), expr=KeyTransform('0', Cast('key', JSONField())), ).filter(chain=F('expr')), [self.objs[8]], ) def test_deep_values(self): query = JSONModel.objects.values_list('field__k__l') self.assertSequenceEqual( query, [ (None,), (None,), (None,), (None,), (None,), (None,), (None,), (None,), ('m',), (None,), (None,), (None,), ] ) def test_deep_distinct(self): query = JSONModel.objects.distinct('field__k__l').values_list('field__k__l') self.assertSequenceEqual(query, [('m',), (None,)]) def test_isnull_key(self): # key__isnull works the same as has_key='key'. self.assertSequenceEqual( JSONModel.objects.filter(field__a__isnull=True), self.objs[:7] + self.objs[9:] ) self.assertSequenceEqual( JSONModel.objects.filter(field__a__isnull=False), [self.objs[7], self.objs[8]] ) def test_none_key(self): self.assertSequenceEqual(JSONModel.objects.filter(field__j=None), [self.objs[8]]) def test_none_key_exclude(self): obj = JSONModel.objects.create(field={'j': 1}) self.assertSequenceEqual(JSONModel.objects.exclude(field__j=None), [obj]) def test_isnull_key_or_none(self): obj = JSONModel.objects.create(field={'a': None}) self.assertSequenceEqual( JSONModel.objects.filter(Q(field__a__isnull=True) | Q(field__a=None)), self.objs[:7] + self.objs[9:] + [obj] ) def test_contains(self): self.assertSequenceEqual( JSONModel.objects.filter(field__contains={'a': 'b'}), [self.objs[7], self.objs[8]] ) def test_contained_by(self): self.assertSequenceEqual( JSONModel.objects.filter(field__contained_by={'a': 'b', 'c': 1, 'h': True}), [self.objs[6], self.objs[7]] ) def test_has_key(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_key='a'), [self.objs[7], self.objs[8]] ) def test_has_keys(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_keys=['a', 'c', 'h']), [self.objs[8]] ) def test_has_any_keys(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_any_keys=['c', 'l']), [self.objs[7], self.objs[8], self.objs[10]] ) def test_shallow_list_lookup(self): self.assertSequenceEqual( JSONModel.objects.filter(field__0=1), [self.objs[9]] ) def test_shallow_obj_lookup(self): self.assertSequenceEqual( JSONModel.objects.filter(field__a='b'), [self.objs[7], self.objs[8]] ) def test_obj_subquery_lookup(self): qs = JSONModel.objects.annotate( value=Subquery(JSONModel.objects.filter(pk=OuterRef('pk')).values('field')), ).filter(value__a='b') self.assertSequenceEqual(qs, [self.objs[7], self.objs[8]]) def test_deep_lookup_objs(self): self.assertSequenceEqual( JSONModel.objects.filter(field__k__l='m'), [self.objs[8]] ) def test_shallow_lookup_obj_target(self): self.assertSequenceEqual( JSONModel.objects.filter(field__k={'l': 'm'}), [self.objs[8]] ) def test_deep_lookup_array(self): self.assertSequenceEqual( JSONModel.objects.filter(field__1__0=2), [self.objs[9]] ) def test_deep_lookup_mixed(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__1__f='g'), [self.objs[8]] ) def test_deep_lookup_transform(self): self.assertSequenceEqual( JSONModel.objects.filter(field__c__gt=1), [] ) self.assertSequenceEqual( JSONModel.objects.filter(field__c__lt=5), [self.objs[7], self.objs[8]] ) def test_usage_in_subquery(self): self.assertSequenceEqual( JSONModel.objects.filter(id__in=JSONModel.objects.filter(field__c=1)), self.objs[7:9] ) def test_iexact(self): self.assertTrue(JSONModel.objects.filter(field__foo__iexact='BaR').exists()) self.assertFalse(JSONModel.objects.filter(field__foo__iexact='"BaR"').exists()) def test_icontains(self): self.assertFalse(JSONModel.objects.filter(field__foo__icontains='"bar"').exists()) def test_startswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__startswith='b').exists()) def test_istartswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__istartswith='B').exists()) def test_endswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__endswith='r').exists()) def test_iendswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__iendswith='R').exists()) def test_regex(self): self.assertTrue(JSONModel.objects.filter(field__foo__regex=r'^bar$').exists()) def test_iregex(self): self.assertTrue(JSONModel.objects.filter(field__foo__iregex=r'^bAr$').exists()) def test_key_sql_injection(self): with CaptureQueriesContext(connection) as queries: self.assertFalse( JSONModel.objects.filter(**{ """field__test' = '"a"') OR 1 = 1 OR ('d""": 'x', }).exists() ) self.assertIn( """."field" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"' """, queries[0]['sql'], ) def test_lookups_with_key_transform(self): tests = ( ('field__d__contains', 'e'), ('field__baz__contained_by', {'a': 'b', 'c': 'd', 'e': 'f'}), ('field__baz__has_key', 'c'), ('field__baz__has_keys', ['a', 'c']), ('field__baz__has_any_keys', ['a', 'x']), ('field__contains', KeyTransform('bax', 'field')), ( 'field__contained_by', KeyTransform('x', RawSQL('%s::jsonb', ['{"x": {"a": "b", "c": 1, "d": "e"}}'])), ), ('field__has_key', KeyTextTransform('foo', 'field')), ) for lookup, value in tests: with self.subTest(lookup=lookup): self.assertTrue(JSONModel.objects.filter( **{lookup: value}, ).exists()) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_invalid_default(self): class MyModel(PostgreSQLModel): field = JSONField(default={}) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "JSONField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `dict` instead of `{}`.', obj=MyModel._meta.get_field('field'), id='postgres.E003', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = JSONField(default=dict) model = MyModel() self.assertEqual(model.check(), []) def test_valid_default_none(self): class MyModel(PostgreSQLModel): field = JSONField(default=None) model = MyModel() self.assertEqual(model.check(), []) class TestSerialization(PostgreSQLSimpleTestCase): test_data = ( '[{"fields": {"field": %s, "field_custom": null}, ' '"model": "postgres_tests.jsonmodel", "pk": null}]' ) test_values = ( # (Python value, serialized value), ({'a': 'b', 'c': None}, '{"a": "b", "c": null}'), ('abc', '"abc"'), ('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'), ) def test_dumping(self): for value, serialized in self.test_values: with self.subTest(value=value): instance = JSONModel(field=value) data = serializers.serialize('json', [instance]) self.assertJSONEqual(data, self.test_data % serialized) def test_loading(self): for value, serialized in self.test_values: with self.subTest(value=value): instance = list(serializers.deserialize('json', self.test_data % serialized))[0].object self.assertEqual(instance.field, value) class TestValidation(PostgreSQLSimpleTestCase): def test_not_serializable(self): field = JSONField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(datetime.timedelta(days=1), None) self.assertEqual(cm.exception.code, 'invalid') self.assertEqual(cm.exception.message % cm.exception.params, "Value must be valid JSON.") def test_custom_encoder(self): with self.assertRaisesMessage(ValueError, "The encoder parameter must be a callable object."): field = JSONField(encoder=DjangoJSONEncoder()) field = JSONField(encoder=DjangoJSONEncoder) self.assertEqual(field.clean(datetime.timedelta(days=1), None), datetime.timedelta(days=1)) class TestFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = forms.JSONField() value = field.clean('{"a": "b"}') self.assertEqual(value, {'a': 'b'}) def test_valid_empty(self): field = forms.JSONField(required=False) value = field.clean('') self.assertIsNone(value) def test_invalid(self): field = forms.JSONField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('{some badly formed: json}') self.assertEqual(cm.exception.messages[0], "'{some badly formed: json}' value must be valid JSON.") def test_formfield(self): model_field = JSONField() form_field = model_field.formfield() self.assertIsInstance(form_field, forms.JSONField) def test_formfield_disabled(self): class JsonForm(Form): name = CharField() jfield = forms.JSONField(disabled=True) form = JsonForm({'name': 'xyz', 'jfield': '["bar"]'}, initial={'jfield': ['foo']}) self.assertIn('[&quot;foo&quot;]</textarea>', form.as_p()) def test_prepare_value(self): field = forms.JSONField() self.assertEqual(field.prepare_value({'a': 'b'}), '{"a": "b"}') self.assertEqual(field.prepare_value(None), 'null') self.assertEqual(field.prepare_value('foo'), '"foo"') def test_redisplay_wrong_input(self): """ When displaying a bound form (typically due to invalid input), the form should not overquote JSONField inputs. """ class JsonForm(Form): name = CharField(max_length=2) jfield = forms.JSONField() # JSONField input is fine, name is too long form = JsonForm({'name': 'xyz', 'jfield': '["foo"]'}) self.assertIn('[&quot;foo&quot;]</textarea>', form.as_p()) # This time, the JSONField input is wrong form = JsonForm({'name': 'xy', 'jfield': '{"foo"}'}) # Appears once in the textarea and once in the error message self.assertEqual(form.as_p().count(escape('{"foo"}')), 2) def test_widget(self): """The default widget of a JSONField is a Textarea.""" field = forms.JSONField() self.assertIsInstance(field.widget, widgets.Textarea) def test_custom_widget_kwarg(self): """The widget can be overridden with a kwarg.""" field = forms.JSONField(widget=widgets.Input) self.assertIsInstance(field.widget, widgets.Input) def test_custom_widget_attribute(self): """The widget can be overridden with an attribute.""" class CustomJSONField(forms.JSONField): widget = widgets.Input field = CustomJSONField() self.assertIsInstance(field.widget, widgets.Input) def test_already_converted_value(self): field = forms.JSONField(required=False) tests = [ '["a", "b", "c"]', '{"a": 1, "b": 2}', '1', '1.5', '"foo"', 'true', 'false', 'null', ] for json_string in tests: val = field.clean(json_string) self.assertEqual(field.clean(val), val) def test_has_changed(self): field = forms.JSONField() self.assertIs(field.has_changed({'a': True}, '{"a": 1}'), True) self.assertIs(field.has_changed({'a': 1, 'b': 2}, '{"b": 2, "a": 1}'), False)
import datetime import operator import uuid from decimal import Decimal from django.core import checks, exceptions, serializers from django.core.serializers.json import DjangoJSONEncoder from django.db import connection from django.db.models import Count, F, OuterRef, Q, Subquery from django.db.models.expressions import RawSQL from django.db.models.functions import Cast from django.forms import CharField, Form, widgets from django.test.utils import CaptureQueriesContext, isolate_apps from django.utils.html import escape from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import JSONModel, PostgreSQLModel try: from django.contrib.postgres import forms from django.contrib.postgres.fields import JSONField from django.contrib.postgres.fields.jsonb import KeyTextTransform, KeyTransform except ImportError: pass class TestSaveLoad(PostgreSQLTestCase): def test_null(self): instance = JSONModel() instance.save() loaded = JSONModel.objects.get() self.assertIsNone(loaded.field) def test_empty_object(self): instance = JSONModel(field={}) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, {}) def test_empty_list(self): instance = JSONModel(field=[]) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, []) def test_boolean(self): instance = JSONModel(field=True) instance.save() loaded = JSONModel.objects.get() self.assertIs(loaded.field, True) def test_string(self): instance = JSONModel(field='why?') instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, 'why?') def test_number(self): instance = JSONModel(field=1) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, 1) def test_realistic_object(self): obj = { 'a': 'b', 'c': 1, 'd': ['e', {'f': 'g'}], 'h': True, 'i': False, 'j': None, } instance = JSONModel(field=obj) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, obj) def test_custom_encoding(self): """ JSONModel.field_custom has a custom DjangoJSONEncoder. """ some_uuid = uuid.uuid4() obj_before = { 'date': datetime.date(2016, 8, 12), 'datetime': datetime.datetime(2016, 8, 12, 13, 44, 47, 575981), 'decimal': Decimal('10.54'), 'uuid': some_uuid, } obj_after = { 'date': '2016-08-12', 'datetime': '2016-08-12T13:44:47.575', 'decimal': '10.54', 'uuid': str(some_uuid), } JSONModel.objects.create(field_custom=obj_before) loaded = JSONModel.objects.get() self.assertEqual(loaded.field_custom, obj_after) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = JSONModel.objects.bulk_create([ JSONModel(field=None), JSONModel(field=True), JSONModel(field=False), JSONModel(field='yes'), JSONModel(field=7), JSONModel(field=[]), JSONModel(field={}), JSONModel(field={ 'a': 'b', 'c': 1, }), JSONModel(field={ 'a': 'b', 'c': 1, 'd': ['e', {'f': 'g'}], 'h': True, 'i': False, 'j': None, 'k': {'l': 'm'}, }), JSONModel(field=[1, [2]]), JSONModel(field={ 'k': True, 'l': False, }), JSONModel(field={ 'foo': 'bar', 'baz': {'a': 'b', 'c': 'd'}, 'bar': ['foo', 'bar'], 'bax': {'foo': 'bar'}, }), ]) def test_exact(self): self.assertSequenceEqual( JSONModel.objects.filter(field__exact={}), [self.objs[6]] ) def test_exact_complex(self): self.assertSequenceEqual( JSONModel.objects.filter(field__exact={'a': 'b', 'c': 1}), [self.objs[7]] ) def test_isnull(self): self.assertSequenceEqual( JSONModel.objects.filter(field__isnull=True), [self.objs[0]] ) def test_ordering_by_transform(self): objs = [ JSONModel.objects.create(field={'ord': 93, 'name': 'bar'}), JSONModel.objects.create(field={'ord': 22.1, 'name': 'foo'}), JSONModel.objects.create(field={'ord': -1, 'name': 'baz'}), JSONModel.objects.create(field={'ord': 21.931902, 'name': 'spam'}), JSONModel.objects.create(field={'ord': -100291029, 'name': 'eggs'}), ] query = JSONModel.objects.filter(field__name__isnull=False).order_by('field__ord') self.assertSequenceEqual(query, [objs[4], objs[2], objs[3], objs[1], objs[0]]) def test_ordering_grouping_by_key_transform(self): base_qs = JSONModel.objects.filter(field__d__0__isnull=False) for qs in ( base_qs.order_by('field__d__0'), base_qs.annotate(key=KeyTransform('0', KeyTransform('d', 'field'))).order_by('key'), ): self.assertSequenceEqual(qs, [self.objs[8]]) qs = JSONModel.objects.filter(field__isnull=False) self.assertQuerysetEqual( qs.values('field__d__0').annotate(count=Count('field__d__0')).order_by('count'), [1, 10], operator.itemgetter('count'), ) self.assertQuerysetEqual( qs.filter(field__isnull=False).annotate( key=KeyTextTransform('f', KeyTransform('1', KeyTransform('d', 'field'))), ).values('key').annotate(count=Count('key')).order_by('count'), [(None, 0), ('g', 1)], operator.itemgetter('key', 'count'), ) def test_key_transform_raw_expression(self): expr = RawSQL('%s::jsonb', ['{"x": "bar"}']) self.assertSequenceEqual( JSONModel.objects.filter(field__foo=KeyTransform('x', expr)), [self.objs[-1]], ) def test_key_transform_expression(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__0__isnull=False).annotate( key=KeyTransform('d', 'field'), ).annotate( chain=KeyTransform('0', 'key'), expr=KeyTransform('0', Cast('key', JSONField())), ).filter(chain=F('expr')), [self.objs[8]], ) def test_deep_values(self): query = JSONModel.objects.values_list('field__k__l') self.assertSequenceEqual( query, [ (None,), (None,), (None,), (None,), (None,), (None,), (None,), (None,), ('m',), (None,), (None,), (None,), ] ) def test_deep_distinct(self): query = JSONModel.objects.distinct('field__k__l').values_list('field__k__l') self.assertSequenceEqual(query, [('m',), (None,)]) def test_isnull_key(self): # key__isnull works the same as has_key='key'. self.assertSequenceEqual( JSONModel.objects.filter(field__a__isnull=True), self.objs[:7] + self.objs[9:] ) self.assertSequenceEqual( JSONModel.objects.filter(field__a__isnull=False), [self.objs[7], self.objs[8]] ) def test_none_key(self): self.assertSequenceEqual(JSONModel.objects.filter(field__j=None), [self.objs[8]]) def test_none_key_exclude(self): obj = JSONModel.objects.create(field={'j': 1}) self.assertSequenceEqual(JSONModel.objects.exclude(field__j=None), [obj]) def test_isnull_key_or_none(self): obj = JSONModel.objects.create(field={'a': None}) self.assertSequenceEqual( JSONModel.objects.filter(Q(field__a__isnull=True) | Q(field__a=None)), self.objs[:7] + self.objs[9:] + [obj] ) def test_contains(self): self.assertSequenceEqual( JSONModel.objects.filter(field__contains={'a': 'b'}), [self.objs[7], self.objs[8]] ) def test_contained_by(self): self.assertSequenceEqual( JSONModel.objects.filter(field__contained_by={'a': 'b', 'c': 1, 'h': True}), [self.objs[6], self.objs[7]] ) def test_has_key(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_key='a'), [self.objs[7], self.objs[8]] ) def test_has_keys(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_keys=['a', 'c', 'h']), [self.objs[8]] ) def test_has_any_keys(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_any_keys=['c', 'l']), [self.objs[7], self.objs[8], self.objs[10]] ) def test_shallow_list_lookup(self): self.assertSequenceEqual( JSONModel.objects.filter(field__0=1), [self.objs[9]] ) def test_shallow_obj_lookup(self): self.assertSequenceEqual( JSONModel.objects.filter(field__a='b'), [self.objs[7], self.objs[8]] ) def test_obj_subquery_lookup(self): qs = JSONModel.objects.annotate( value=Subquery(JSONModel.objects.filter(pk=OuterRef('pk')).values('field')), ).filter(value__a='b') self.assertSequenceEqual(qs, [self.objs[7], self.objs[8]]) def test_deep_lookup_objs(self): self.assertSequenceEqual( JSONModel.objects.filter(field__k__l='m'), [self.objs[8]] ) def test_shallow_lookup_obj_target(self): self.assertSequenceEqual( JSONModel.objects.filter(field__k={'l': 'm'}), [self.objs[8]] ) def test_deep_lookup_array(self): self.assertSequenceEqual( JSONModel.objects.filter(field__1__0=2), [self.objs[9]] ) def test_deep_lookup_mixed(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__1__f='g'), [self.objs[8]] ) def test_deep_lookup_transform(self): self.assertSequenceEqual( JSONModel.objects.filter(field__c__gt=1), [] ) self.assertSequenceEqual( JSONModel.objects.filter(field__c__lt=5), [self.objs[7], self.objs[8]] ) def test_usage_in_subquery(self): self.assertSequenceEqual( JSONModel.objects.filter(id__in=JSONModel.objects.filter(field__c=1)), self.objs[7:9] ) def test_iexact(self): self.assertTrue(JSONModel.objects.filter(field__foo__iexact='BaR').exists()) self.assertFalse(JSONModel.objects.filter(field__foo__iexact='"BaR"').exists()) def test_icontains(self): self.assertFalse(JSONModel.objects.filter(field__foo__icontains='"bar"').exists()) def test_startswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__startswith='b').exists()) def test_istartswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__istartswith='B').exists()) def test_endswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__endswith='r').exists()) def test_iendswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__iendswith='R').exists()) def test_regex(self): self.assertTrue(JSONModel.objects.filter(field__foo__regex=r'^bar$').exists()) def test_iregex(self): self.assertTrue(JSONModel.objects.filter(field__foo__iregex=r'^bAr$').exists()) def test_key_sql_injection(self): with CaptureQueriesContext(connection) as queries: self.assertFalse( JSONModel.objects.filter(**{ """field__test' = '"a"') OR 1 = 1 OR ('d""": 'x', }).exists() ) self.assertIn( """."field" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"' """, queries[0]['sql'], ) def test_lookups_with_key_transform(self): tests = ( ('field__d__contains', 'e'), ('field__baz__contained_by', {'a': 'b', 'c': 'd', 'e': 'f'}), ('field__baz__has_key', 'c'), ('field__baz__has_keys', ['a', 'c']), ('field__baz__has_any_keys', ['a', 'x']), ('field__contains', KeyTransform('bax', 'field')), ( 'field__contained_by', KeyTransform('x', RawSQL('%s::jsonb', ['{"x": {"a": "b", "c": 1, "d": "e"}}'])), ), ('field__has_key', KeyTextTransform('foo', 'field')), ) for lookup, value in tests: with self.subTest(lookup=lookup): self.assertTrue(JSONModel.objects.filter( **{lookup: value}, ).exists()) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_invalid_default(self): class MyModel(PostgreSQLModel): field = JSONField(default={}) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "JSONField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `dict` instead of `{}`.', obj=MyModel._meta.get_field('field'), id='postgres.E003', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = JSONField(default=dict) model = MyModel() self.assertEqual(model.check(), []) def test_valid_default_none(self): class MyModel(PostgreSQLModel): field = JSONField(default=None) model = MyModel() self.assertEqual(model.check(), []) class TestSerialization(PostgreSQLSimpleTestCase): test_data = ( '[{"fields": {"field": %s, "field_custom": null}, ' '"model": "postgres_tests.jsonmodel", "pk": null}]' ) test_values = ( # (Python value, serialized value), ({'a': 'b', 'c': None}, '{"a": "b", "c": null}'), ('abc', '"abc"'), ('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'), ) def test_dumping(self): for value, serialized in self.test_values: with self.subTest(value=value): instance = JSONModel(field=value) data = serializers.serialize('json', [instance]) self.assertJSONEqual(data, self.test_data % serialized) def test_loading(self): for value, serialized in self.test_values: with self.subTest(value=value): instance = list(serializers.deserialize('json', self.test_data % serialized))[0].object self.assertEqual(instance.field, value) class TestValidation(PostgreSQLSimpleTestCase): def test_not_serializable(self): field = JSONField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(datetime.timedelta(days=1), None) self.assertEqual(cm.exception.code, 'invalid') self.assertEqual(cm.exception.message % cm.exception.params, "Value must be valid JSON.") def test_custom_encoder(self): with self.assertRaisesMessage(ValueError, "The encoder parameter must be a callable object."): field = JSONField(encoder=DjangoJSONEncoder()) field = JSONField(encoder=DjangoJSONEncoder) self.assertEqual(field.clean(datetime.timedelta(days=1), None), datetime.timedelta(days=1)) class TestFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = forms.JSONField() value = field.clean('{"a": "b"}') self.assertEqual(value, {'a': 'b'}) def test_valid_empty(self): field = forms.JSONField(required=False) value = field.clean('') self.assertIsNone(value) def test_invalid(self): field = forms.JSONField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('{some badly formed: json}') self.assertEqual(cm.exception.messages[0], "'{some badly formed: json}' value must be valid JSON.") def test_formfield(self): model_field = JSONField() form_field = model_field.formfield() self.assertIsInstance(form_field, forms.JSONField) def test_formfield_disabled(self): class JsonForm(Form): name = CharField() jfield = forms.JSONField(disabled=True) form = JsonForm({'name': 'xyz', 'jfield': '["bar"]'}, initial={'jfield': ['foo']}) self.assertIn('[&quot;foo&quot;]</textarea>', form.as_p()) def test_prepare_value(self): field = forms.JSONField() self.assertEqual(field.prepare_value({'a': 'b'}), '{"a": "b"}') self.assertEqual(field.prepare_value(None), 'null') self.assertEqual(field.prepare_value('foo'), '"foo"') def test_redisplay_wrong_input(self): """ When displaying a bound form (typically due to invalid input), the form should not overquote JSONField inputs. """ class JsonForm(Form): name = CharField(max_length=2) jfield = forms.JSONField() # JSONField input is fine, name is too long form = JsonForm({'name': 'xyz', 'jfield': '["foo"]'}) self.assertIn('[&quot;foo&quot;]</textarea>', form.as_p()) # This time, the JSONField input is wrong form = JsonForm({'name': 'xy', 'jfield': '{"foo"}'}) # Appears once in the textarea and once in the error message self.assertEqual(form.as_p().count(escape('{"foo"}')), 2) def test_widget(self): """The default widget of a JSONField is a Textarea.""" field = forms.JSONField() self.assertIsInstance(field.widget, widgets.Textarea) def test_custom_widget_kwarg(self): """The widget can be overridden with a kwarg.""" field = forms.JSONField(widget=widgets.Input) self.assertIsInstance(field.widget, widgets.Input) def test_custom_widget_attribute(self): """The widget can be overridden with an attribute.""" class CustomJSONField(forms.JSONField): widget = widgets.Input field = CustomJSONField() self.assertIsInstance(field.widget, widgets.Input) def test_already_converted_value(self): field = forms.JSONField(required=False) tests = [ '["a", "b", "c"]', '{"a": 1, "b": 2}', '1', '1.5', '"foo"', 'true', 'false', 'null', ] for json_string in tests: val = field.clean(json_string) self.assertEqual(field.clean(val), val) def test_has_changed(self): field = forms.JSONField() self.assertIs(field.has_changed({'a': True}, '{"a": 1}'), True) self.assertIs(field.has_changed({'a': 1, 'b': 2}, '{"b": 2, "a": 1}'), False)
en
0.688887
JSONModel.field_custom has a custom DjangoJSONEncoder. # key__isnull works the same as has_key='key'. field__test' = '"a"') OR 1 = 1 OR ('d ."field" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"' # (Python value, serialized value), When displaying a bound form (typically due to invalid input), the form should not overquote JSONField inputs. # JSONField input is fine, name is too long # This time, the JSONField input is wrong # Appears once in the textarea and once in the error message The default widget of a JSONField is a Textarea. The widget can be overridden with a kwarg. The widget can be overridden with an attribute.
2.203234
2
resources/tests/test_reservation_api.py
haltu/hmlvaraus-backend
1
6624638
import pytest import datetime import re from django.contrib.auth import get_user_model from django.core.urlresolvers import reverse from django.core import mail from django.test.utils import override_settings from django.utils import dateparse, timezone from guardian.shortcuts import assign_perm from freezegun import freeze_time from resources.models import (Period, Day, Reservation, Resource, ReservationMetadataField, ReservationMetadataSet) from users.models import User from .utils import check_disallowed_methods, assert_non_field_errors_contain, check_received_mail_exists DEFAULT_RESERVATION_EXTRA_FIELDS = ('reserver_name', 'reserver_phone_number', 'reserver_address_street', 'reserver_address_zip', 'reserver_address_city', 'billing_address_street', 'billing_address_zip', 'billing_address_city', 'company', 'event_description', 'reserver_id', 'number_of_participants', 'reserver_email_address') DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS = ('reserver_name', 'reserver_phone_number', 'reserver_address_street', 'reserver_address_zip', 'reserver_address_city', 'event_description', 'reserver_id', 'reserver_email_address') @pytest.fixture def list_url(): return reverse('reservation-list') @pytest.fixture def detail_url(reservation): return reverse('reservation-detail', kwargs={'pk': reservation.pk}) @pytest.mark.django_db @pytest.fixture(autouse=True) def day_and_period(resource_in_unit): period = Period.objects.create( start='2005-04-01', end='2115-05-01', resource_id=resource_in_unit.id, name='test_period' ) Day.objects.create(period=period, weekday=3, opens='08:00', closes='16:00') @pytest.mark.django_db @pytest.fixture def reservation_data(resource_in_unit): return { 'resource': resource_in_unit.pk, 'begin': '2115-04-04T11:00:00+02:00', 'end': '2115-04-04T12:00:00+02:00' } @pytest.fixture def reservation_data_extra(reservation_data): extra_data = reservation_data.copy() extra_data.update({ 'reserver_name': 'Test Reserver', 'reserver_phone_number': '0700555555', 'reserver_address_street': 'Omenatie 102', 'reserver_address_zip': '00930', 'reserver_address_city': 'Helsinki', 'event_description': 'a very secret meeting', 'reserver_id': '1234567-8', 'number_of_participants': 5000, 'billing_address_street': 'Pihlajakatu', 'billing_address_zip': '00001', 'billing_address_city': 'Tampere', 'company': 'a very secret association', 'reserver_email_address': '<EMAIL>', }) return extra_data @pytest.mark.django_db @pytest.fixture def reservation(resource_in_unit, user): return Reservation.objects.create( resource=resource_in_unit, begin='2115-04-04T09:00:00+02:00', end='2115-04-04T10:00:00+02:00', user=user, ) @pytest.mark.django_db @pytest.fixture def other_resource(space_resource_type, test_unit): return Resource.objects.create( type=space_resource_type, authentication="none", name="other resource", unit=test_unit, id="otherresourceid", ) @pytest.fixture def reservations_in_all_states(resource_in_unit, user): all_states = (Reservation.CANCELLED, Reservation.CONFIRMED, Reservation.DENIED, Reservation.REQUESTED) reservations = dict() for i, state in enumerate(all_states, 4): reservations[state] = Reservation.objects.create( resource=resource_in_unit, begin='2115-04-0%sT09:00:00+02:00' % i, end='2115-04-0%sT10:00:00+02:00' % i, user=user, state=state ) return reservations @pytest.mark.django_db def test_disallowed_methods(all_user_types_api_client, list_url): """ Tests that PUT, PATCH and DELETE aren't allowed to reservation list endpoint. """ check_disallowed_methods(all_user_types_api_client, (list_url, ), ('put', 'patch', 'delete')) @pytest.mark.django_db def test_reservation_requires_authenticated_user(api_client, list_url, reservation_data): """ Tests that an unauthenticated user cannot create a reservation. """ response = api_client.post(list_url, data=reservation_data) assert response.status_code == 401 @pytest.mark.django_db def test_authenticated_user_can_make_reservation(api_client, list_url, reservation_data, resource_in_unit, user): """ Tests that an authenticated user can create a reservation. """ api_client.force_authenticate(user=user) response = api_client.post(list_url, data=reservation_data) assert response.status_code == 201 reservation = Reservation.objects.filter(user=user).latest('created_at') assert reservation.resource == resource_in_unit assert reservation.begin == dateparse.parse_datetime('2115-04-04T11:00:00+02:00') assert reservation.end == dateparse.parse_datetime('2115-04-04T12:00:00+02:00') @pytest.mark.django_db def test_authenticated_user_can_modify_reservation( api_client, detail_url, reservation_data, resource_in_unit, user): """ Tests that an authenticated user can modify her own reservation """ api_client.force_authenticate(user=user) response = api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 reservation = Reservation.objects.get(pk=response.data['id']) assert reservation.resource == resource_in_unit assert reservation.begin == dateparse.parse_datetime('2115-04-04T11:00:00+02:00') assert reservation.end == dateparse.parse_datetime('2115-04-04T12:00:00+02:00') @pytest.mark.django_db def test_authenticated_user_can_delete_reservation(api_client, detail_url, reservation, user): """ Tests that an authenticated user can delete her own reservation """ api_client.force_authenticate(user=user) reservation_id = reservation.id response = api_client.delete(detail_url) assert response.status_code == 204 assert Reservation.objects.filter(pk=reservation_id).count() == 1 reservation.refresh_from_db() assert reservation.state == Reservation.CANCELLED @pytest.mark.django_db def test_reservation_limit_per_user(api_client, list_url, reservation, reservation_data, user): """ Tests that a user cannot exceed her active reservation limit for one resource. """ api_client.force_authenticate(user=user) # the user already has one reservation, making another reservation should not be possible as the active reservation # limit is one response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'Maximum number of active reservations for this resource exceeded.') @pytest.mark.django_db def test_old_reservations_are_excluded(api_client, list_url, resource_in_unit, reservation_data, user): """ Tests that a reservation in the past doesn't count when checking reservation limit. """ # the user already has this reservation which is in the past. Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2005-04-07T09:00:00+02:00'), end=dateparse.parse_datetime('2005-04-07T10:00:00+02:00'), user=user, ) api_client.force_authenticate(user=user) # making another reservation should be possible because the other reservation is in the past. response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 201 @pytest.mark.django_db def test_staff_has_no_reservation_limit(api_client, list_url, reservation, reservation_data, user): """ Tests that the reservation limits for a resource do not apply to staff. """ user.is_staff = True user.save() api_client.force_authenticate(user=user) # the staff member already has one reservation, and should be able to make a second one regardless of the fact that # that the limit is one. response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 201 @pytest.mark.django_db def test_normal_user_cannot_make_reservation_outside_open_hours(api_client, list_url, reservation_data, user): """ Tests that a normal user cannot make reservations outside open hours. """ api_client.force_authenticate(user=user) # invalid day reservation_data['begin'] = '2115-06-01T09:00:00+02:00' reservation_data['end'] = '2115-06-01T10:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'You must start and end the reservation during opening hours') # valid begin time, end time after closing time reservation_data['begin'] = '2115-04-04T10:00:00+02:00' reservation_data['end'] = '2115-04-04T21:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'You must start and end the reservation during opening hours') # begin time before opens, valid end time reservation_data['begin'] = '2115-04-04T05:00:00+02:00' reservation_data['end'] = '2115-04-04T10:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'You must start and end the reservation during opening hours') @pytest.mark.django_db def test_normal_user_cannot_make_reservation_longer_than_max_period(api_client, list_url, reservation_data, user): """ Tests that a normal user cannot make reservations longer than the resource's max period. """ api_client.force_authenticate(user=user) # the reservation's length is 3h (11 -> 14) while the maximum is 2h reservation_data['end'] = '2115-04-04T14:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'The maximum reservation length is') @pytest.mark.django_db def test_staff_user_can_make_reservation_outside_open_hours(api_client, list_url, reservation_data, user): """ Tests that a staff member can make reservations outside opening hours. Also tests that the resource's max period doesn't limit staff. """ user.is_staff = True user.save() api_client.force_authenticate(user=user) # begin time before opening time, end time after closing time, longer than max period 2h reservation_data['begin'] = '2115-04-04T05:00:00+02:00' reservation_data['end'] = '2115-04-04T21:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 201 @pytest.mark.django_db def test_comments_are_only_for_staff(api_client, list_url, reservation_data, user): api_client.force_authenticate(user=user) reservation_data['comments'] = 'test comment' response = api_client.post(list_url, data=reservation_data) assert response.status_code == 400 user.is_staff = True user.save() response = api_client.post(list_url, data=reservation_data) assert response.status_code == 201 response = api_client.get(response.data['url']) assert response.data['comments'] == 'test comment' user.is_staff = False user.save() response = api_client.get(response.data['url']) assert 'comments' not in response.data @pytest.mark.django_db def test_user_data_correct_and_only_for_staff(api_client, reservation, user): """ Tests that user object is returned within Reservation data and it is in the correct form. Also tests that only staff can see the user object. """ api_client.force_authenticate(user=user) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = api_client.get(detail_url) assert 'user' not in response.data user.is_staff = True user.save() response = api_client.get(detail_url) user_obj = response.data['user'] assert len(user_obj) == 3 assert user_obj['display_name'] == '<NAME>' assert user_obj['email'] == '<EMAIL>' assert user_obj['id'] is not None @pytest.mark.django_db def test_reservation_can_be_modified_by_overlapping_reservation(api_client, reservation, reservation_data, user): """ Tests that a reservation can be modified with times that overlap with the original times. """ api_client.force_authenticate(user=user) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) # try to extend the original reservation by 1 hour reservation_data['begin'] = '2115-04-04T09:00:00+02:00' reservation_data['end'] = '2115-04-04T11:00:00+02:00' response = api_client.put(detail_url, reservation_data) assert response.status_code == 200 reservation = Reservation.objects.get(pk=reservation.pk) assert reservation.begin == dateparse.parse_datetime('2115-04-04T09:00:00+02:00') assert reservation.end == dateparse.parse_datetime('2115-04-04T11:00:00+02:00') @pytest.mark.django_db def test_non_reservable_resource_restrictions(api_client, list_url, resource_in_unit, reservation_data, user): """ Tests that a normal user cannot make a reservation to a non reservable resource but staff can. Creating a new reservation with POST and updating an existing one with PUT are both tested. """ resource_in_unit.reservable = False resource_in_unit.save() api_client.force_authenticate(user=user) response = api_client.post(list_url, data=reservation_data) assert response.status_code == 403 # Create a reservation and try to change that with PUT reservation = Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2115-04-07T09:00:00+02:00'), end=dateparse.parse_datetime('2115-04-07T10:00:00+02:00'), user=user, ) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = api_client.put(detail_url, reservation_data) assert response.status_code == 403 # a staff member should be allowed to create and update user.is_staff = True user.save() response = api_client.post(list_url, data=reservation_data) assert response.status_code == 201 reservation_data['begin'] = dateparse.parse_datetime('2115-04-08T09:00:00+02:00') reservation_data['end'] = dateparse.parse_datetime('2115-04-08T10:00:00+02:00') response = api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 @pytest.mark.django_db def test_reservation_restrictions_by_owner(api_client, list_url, reservation, reservation_data, user, user2): """ Tests that a normal user can't modify other people's reservations while a staff member can. """ detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) api_client.force_authenticate(user=user2) response = api_client.put(detail_url, reservation_data) assert response.status_code == 403 response = api_client.delete(detail_url, reservation_data) assert response.status_code == 403 # a staff member should be allowed to perform every modifying method even that she is not the user in # the reservation user2.is_staff = True user2.save() response = api_client.put(detail_url, reservation_data) assert response.status_code == 200 response = api_client.delete(detail_url, reservation_data) assert response.status_code == 204 @pytest.mark.django_db def test_normal_users_cannot_make_reservations_for_others( api_client, list_url, reservation, reservation_data, user, user2): """ Tests that a normal user cannot make a reservation for other people. """ api_client.force_authenticate(user=user) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) # set bigger max reservations limit so that it won't be a limiting factor here reservation.resource.max_reservations_per_user = 2 reservation.resource.save() # set another user for new reservations reservation_data['user'] = {'id': user2.uuid} # modify an existing reservation, and verify that user isn't changed response = api_client.put(detail_url, data=reservation_data, format='json') assert response.status_code == 200 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user # make a new reservation and verify that user isn't the other one reservation_data['begin'] = dateparse.parse_datetime('2115-04-04T13:00:00+02:00') reservation_data['end'] = dateparse.parse_datetime('2115-04-04T14:00:00+02:00') response = api_client.post(list_url, data=reservation_data, format='json') assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user @pytest.mark.django_db def test_reservation_staff_members_can_make_reservations_for_others( api_client, list_url, reservation, reservation_data, user, user2): """ Tests that a staff member can make reservations for other people without normal user restrictions. """ user.is_staff = True user.save() api_client.force_authenticate(user=user) # dealing with another user's reservation reservation.user = user2 reservation.save() reservation_data['user'] = {'id': user2.uuid} # modify an existing reservation detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = api_client.put(detail_url, data=reservation_data, format='json') assert response.status_code == 200 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user2 # create a new reservation, which is also too long, outside the opening hours and exceeds normal user # reservation limit. creating such a reservation for a normal user should be possible for a staff member reservation_data['begin'] = dateparse.parse_datetime('2115-04-04T13:00:00+02:00') reservation_data['end'] = dateparse.parse_datetime('2115-04-04T20:00:00+02:00') response = api_client.post(list_url, data=reservation_data, format='json') assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user2 @pytest.mark.django_db def test_reservation_user_filter(api_client, list_url, reservation, resource_in_unit, user, user2): """ Tests that reservation user and is_own filtering work correctly. """ reservation2 = Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2115-04-07T11:00:00+02:00'), end=dateparse.parse_datetime('2115-04-07T12:00:00+02:00'), user=user2, ) # even unauthenticated user should see all the reservations response = api_client.get(list_url) assert response.data['count'] == 2 # filtering by user response = api_client.get(list_url + '?user=%s' % user.uuid) assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id # filtering by is_own api_client.force_authenticate(user=user) response = api_client.get(list_url + '?is_own=true') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id response = api_client.get(list_url + '?is_own=false') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation2.id @pytest.mark.django_db def test_reservation_time_filters(api_client, list_url, reservation, resource_in_unit, user): reservation2 = Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2015-04-07T11:00:00+02:00'), end=dateparse.parse_datetime('2015-04-07T12:00:00+02:00'), user=user, ) # without the filter, only the reservation in the future should be returned response = api_client.get(list_url) assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id # with the 'all' filter, both reservations should be returned response = api_client.get(list_url + '?all=true') assert response.data['count'] == 2 assert {reservation.id, reservation2.id}.issubset(set(res['id'] for res in response.data['results'])) # with start or end, both reservations should be returned # filtering by start date only response = api_client.get(list_url + '?start=2065-04-06') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id # filtering by end date only response = api_client.get(list_url + '?end=2065-04-06') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation2.id # filtering by start and end times response = api_client.get(list_url + '?start=2065-04-06T11:00:00%2b02:00' + '&end=2065-04-06T12:00:00%2b02:00') assert response.data['count'] == 0 response = api_client.get(list_url + '?start=2005-04-07T11:30:00%2b02:00' + '&end=2115-04-04T09:30:00%2b02:00') assert response.data['count'] == 2 assert {reservation.id, reservation2.id}.issubset(set(res['id'] for res in response.data['results'])) @pytest.mark.parametrize("input_hours,input_mins,expected", [ (2, 30, '2 hours 30 minutes'), (1, 30, '1 hour 30 minutes'), (1, 0, '1 hour'), (0, 30, '30 minutes'), (0, 1, '1 minute'), ]) @pytest.mark.django_db def test_max_reservation_period_error_message( api_client, list_url, resource_in_unit, reservation_data, user, input_hours, input_mins, expected): """ Tests that maximum reservation period error is returned in correct humanized form. """ reservation_data['end'] = '2115-04-04T16:00:00+02:00' # too long reservation resource_in_unit.max_period=datetime.timedelta(hours=input_hours, minutes=input_mins) resource_in_unit.save() api_client.force_authenticate(user=user) response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert response.data['non_field_errors'][0] == 'The maximum reservation length is %s' % expected @pytest.mark.django_db def test_reservation_excels(staff_api_client, list_url, detail_url, reservation, user): """ Tests that reservation list and detail endpoints return .xlsx files when requested """ response = staff_api_client.get( list_url, HTTP_ACCEPT='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', HTTP_ACCEPT_LANGUAGE='en', ) assert response.status_code == 200 assert response._headers['content-disposition'] == ('Content-Disposition', 'attachment; filename=reservations.xlsx') assert len(response.content) > 0 response = staff_api_client.get( detail_url, HTTP_ACCEPT='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', HTTP_ACCEPT_LANGUAGE='en', ) assert response.status_code == 200 assert response._headers['content-disposition'] == ( 'Content-Disposition', 'attachment; filename=reservation-{}.xlsx'.format(reservation.pk)) assert len(response.content) > 0 @pytest.mark.parametrize('need_manual_confirmation, expected_state', [ (False, Reservation.CONFIRMED), (True, Reservation.REQUESTED) ]) @pytest.mark.django_db def test_state_on_new_reservations(user_api_client, list_url, reservation_data_extra, resource_in_unit, need_manual_confirmation, expected_state): resource_in_unit.need_manual_confirmation = need_manual_confirmation if need_manual_confirmation: resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.latest('created_at') assert reservation.state == expected_state @pytest.mark.parametrize('state', [ 'illegal_state', '', None, ]) @pytest.mark.django_db def test_illegal_state_set(user_api_client, list_url, detail_url, reservation_data, state): reservation_data['state'] = state response = user_api_client.post(list_url, data=reservation_data, format='json') assert response.status_code == 400 assert 'state' in response.data response = user_api_client.put(detail_url, data=reservation_data, format='json') assert response.status_code == 400 assert 'state' in response.data @pytest.mark.parametrize('need_manual_confirmation', [ False, True ]) @pytest.mark.django_db def test_extra_fields_visibility(user_api_client, list_url, detail_url, reservation, resource_in_unit, need_manual_confirmation): resource_in_unit.need_manual_confirmation = need_manual_confirmation if need_manual_confirmation: resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() for url in (list_url, detail_url): response = user_api_client.get(url) assert response.status_code == 200 reservation_data = response.data['results'][0] if 'results' in response.data else response.data for field_name in DEFAULT_RESERVATION_EXTRA_FIELDS: assert (field_name in reservation_data) is need_manual_confirmation @pytest.mark.django_db def test_extra_fields_required_for_paid_reservations(user_api_client, staff_api_client, staff_user, list_url, resource_in_unit, reservation_data): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) assign_perm('can_approve_reservation', staff_user, resource_in_unit.unit) response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) @pytest.mark.django_db def test_staff_event_restrictions(user_api_client, staff_api_client, staff_user, list_url, resource_in_unit, reservation_data): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() reservation_data['staff_event'] = True # normal user response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) # staff member response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) # staff with permission but reserver_name and event_description missing assign_perm('can_approve_reservation', staff_user, resource_in_unit.unit) response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert {'reserver_name', 'event_description'} == set(response.data) @pytest.mark.django_db def test_new_staff_event_gets_confirmed(user_api_client, staff_api_client, staff_user, list_url, resource_in_unit, reservation_data, reservation_data_extra): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() reservation_data['staff_event'] = True # reservation should not be be confirmed if the user doesn't have approve permission response = staff_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.get(id=response.data['id']) assert reservation.state == Reservation.REQUESTED reservation.delete() assign_perm('can_approve_reservation', staff_user, resource_in_unit.unit) reservation_data['reserver_name'] = '<NAME>' reservation_data['event_description'] = 'herra huun bileet' response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 reservation = Reservation.objects.get(id=response.data['id']) assert reservation.state == Reservation.CONFIRMED @pytest.mark.django_db def test_extra_fields_can_be_set_for_paid_reservations(user_api_client, list_url, reservation_data_extra, resource_in_unit): resource_in_unit.max_reservations_per_user = 2 resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.latest('created_at') assert reservation.reserver_address_street == 'Omenatie 102' reservation_data_extra['reserver_address_street'] = 'Karhutie 8' response = user_api_client.put('%s%s/' % (list_url, reservation.pk), data=reservation_data_extra) assert response.status_code == 200 reservation.refresh_from_db() assert reservation.reserver_address_street == 'Karhutie 8' @pytest.mark.django_db def test_extra_fields_ignored_for_non_paid_reservations(user_api_client, list_url, reservation_data_extra, resource_in_unit): response = user_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.latest('created_at') assert reservation.reserver_name == '' assert reservation.number_of_participants is None @pytest.mark.django_db def test_user_can_see_her_reservations_in_all_states(user_api_client, list_url, reservations_in_all_states): response = user_api_client.get(list_url) assert response.status_code == 200 assert response.data['count'] == 4 @pytest.mark.django_db def test_user_cannot_see_others_denied_or_cancelled_reservations(api_client, user2, list_url, reservations_in_all_states): api_client.force_authenticate(user=user2) response = api_client.get(list_url) assert response.status_code == 200 assert response.data['count'] == 2 assert set([Reservation.CONFIRMED, Reservation.REQUESTED]) == set(r['state'] for r in response.data['results']) @pytest.mark.django_db def test_staff_can_see_reservations_in_all_states(staff_api_client, list_url, reservations_in_all_states): response = staff_api_client.get(list_url) assert response.status_code == 200 assert response.data['count'] == 4 @pytest.mark.django_db def test_reservation_cannot_be_confirmed_without_permission(user_api_client, staff_api_client, detail_url, reservation, reservation_data): reservation.state = Reservation.REQUESTED reservation.save() reservation_data['state'] = Reservation.CONFIRMED response = user_api_client.put(detail_url, data=reservation_data) assert response.status_code == 400 assert 'state' in response.data response = staff_api_client.put(detail_url, data=reservation_data) assert response.status_code == 400 assert 'state' in response.data @pytest.mark.django_db def test_reservation_can_be_confirmed_with_permission(staff_api_client, staff_user, detail_url, reservation, reservation_data): reservation.state = Reservation.REQUESTED reservation.save() reservation_data['state'] = Reservation.CONFIRMED assign_perm('can_approve_reservation', staff_user, reservation.resource.unit) response = staff_api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 reservation.refresh_from_db() assert reservation.state == Reservation.CONFIRMED assert reservation.approver == staff_user @pytest.mark.django_db def test_user_cannot_modify_or_cancel_manually_confirmed_reservation(user_api_client, detail_url, reservation, reservation_data_extra, resource_in_unit): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.put(detail_url, data=reservation_data_extra) assert response.status_code == 403 response = user_api_client.delete(detail_url) assert response.status_code == 403 @pytest.mark.parametrize('username, expected_visibility', [ (None, False), # unauthenticated user ('test_user', True), # own reservation ('test_user2', False), # someone else's reservation ('test_staff_user', True) # staff ]) @pytest.mark.django_db def test_extra_fields_visibility_for_different_user_types(api_client, user, user2, staff_user, list_url, detail_url, reservation, resource_in_unit, username, expected_visibility): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() if username: api_client.force_authenticate(user=User.objects.get(username=username)) for url in (list_url, detail_url): response = api_client.get(url) assert response.status_code == 200 reservation_data = response.data['results'][0] if 'results' in response.data else response.data for field_name in DEFAULT_RESERVATION_EXTRA_FIELDS: assert (field_name in reservation_data) is expected_visibility @pytest.mark.parametrize('state', [ Reservation.CANCELLED, Reservation.DENIED ]) @pytest.mark.django_db def test_denied_and_cancelled_reservations_not_active(user_api_client, reservation, reservation_data, list_url, resource_in_unit, state): reservation.state = state reservation.save() # test reservation max limit response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 # test overlapping reservation resource_in_unit.max_reservations_per_user = 2 resource_in_unit.save() reservation_data['begin'] = reservation.begin reservation_data['end'] = reservation.end response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 @pytest.mark.django_db def test_cannot_make_reservation_in_the_past(user_api_client, reservation_data, list_url): reservation_data.update( begin='2010-04-04T11:00:00+02:00', end='2010-04-04T12:00:00+02:00' ) response = user_api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'past') @pytest.mark.django_db def test_need_manual_confirmation_filter(user_api_client, user, list_url, reservation, other_resource): other_resource.need_manual_confirmation = True other_resource.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') other_resource.save() reservation_needing_confirmation = Reservation.objects.create( resource=other_resource, begin='2115-04-05T09:00:00+02:00', end='2115-04-05T10:00:00+02:00', user=user, ) # no filter, expect both reservations response = user_api_client.get(list_url) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == {reservation.id, reservation_needing_confirmation.id} # filter false, expect only first reservation response = user_api_client.get('%s%s' % (list_url, '?need_manual_confirmation=false')) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == {reservation.id} # filter true, expect only second reservation response = user_api_client.get('%s%s' % (list_url, '?need_manual_confirmation=true')) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == {reservation_needing_confirmation.id} @pytest.mark.parametrize('state_filter, expected_states', [ ('', ['requested', 'confirmed', 'denied', 'cancelled']), ('?state=requested', ['requested']), ('?state=confirmed,requested', ['confirmed', 'requested']), ('?state=confirmed, requested ,', ['confirmed', 'requested']) ]) @pytest.mark.django_db def test_state_filters(user_api_client, user, list_url, reservations_in_all_states, state_filter, expected_states): response = user_api_client.get('%s%s' % (list_url, state_filter)) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == set(reservations_in_all_states[state].id for state in expected_states) @override_settings(RESPA_MAILS_ENABLED=True) @pytest.mark.django_db def test_reservation_mails(staff_api_client, staff_user, user_api_client, test_unit2, list_url, reservation_data_extra): resource = Resource.objects.get(id=reservation_data_extra['resource']) resource.need_manual_confirmation = True resource.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource.save() assign_perm('can_approve_reservation', staff_user, resource.unit) # create other staff user who should not receive mails because he doesn't have permission to the right unit other_official = get_user_model().objects.create( username='other_unit_official', first_name='Ozzy', last_name='Official', email='<EMAIL>', is_staff=True, preferred_language='en' ) assign_perm('can_approve_reservation', other_official, test_unit2) # test REQUESTED reservation_data_extra['state'] = Reservation.REQUESTED response = user_api_client.post(list_url, data=reservation_data_extra, format='json') assert response.status_code == 201 # 2 mails should be sent, one to the customer, and one to the staff user who can approve the reservation # (and no mail for the other staff user) assert len(mail.outbox) == 2 check_received_mail_exists( "You've made a preliminary reservation", reservation_data_extra['reserver_email_address'], 'made a preliminary reservation', clear_outbox=False ) check_received_mail_exists( 'Reservation requested', staff_user.email, 'A new preliminary reservation has been made' ) detail_url = '%s%s/' % (list_url, response.data['id']) # test DENIED reservation_data_extra['state'] = Reservation.DENIED response = staff_api_client.put(detail_url, data=reservation_data_extra, format='json') assert response.status_code == 200 assert len(mail.outbox) == 1 check_received_mail_exists( 'Reservation denied', reservation_data_extra['reserver_email_address'], 'has been denied.' ) # test CONFIRMED reservation_data_extra['state'] = Reservation.CONFIRMED response = staff_api_client.put(detail_url, data=reservation_data_extra, format='json') assert response.status_code == 200 assert len(mail.outbox) == 1 check_received_mail_exists( 'Reservation confirmed', reservation_data_extra['reserver_email_address'], 'has been confirmed.', clear_outbox=False ) assert 'this resource rocks' in str(mail.outbox[0].message()) mail.outbox = [] # test CANCELLED reservation_data_extra['state'] = Reservation.CANCELLED response = staff_api_client.delete(detail_url, format='json') assert response.status_code == 204 assert len(mail.outbox) == 1 check_received_mail_exists( 'Reservation cancelled', reservation_data_extra['reserver_email_address'], 'has been cancelled.' ) @pytest.mark.django_db def test_can_approve_filter(staff_api_client, staff_user, list_url, reservation): reservation.resource.need_manual_confirmation = True reservation.resource.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') reservation.resource.save() reservation.state = Reservation.REQUESTED reservation.save() response = staff_api_client.get('%s%s' % (list_url, '?can_approve=true')) assert response.status_code == 200 assert len(response.data['results']) == 0 assign_perm('can_approve_reservation', staff_user, reservation.resource.unit) response = staff_api_client.get('%s%s' % (list_url, '?can_approve=true')) assert response.status_code == 200 assert len(response.data['results']) == 1 @pytest.mark.django_db def test_access_code_cannot_be_set_if_type_none(user_api_client, list_url, resource_in_unit, reservation_data): reservation_data['access_code'] = '023543' response = user_api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'This field cannot have a value with this resource' in response.data['access_code'] @pytest.mark.django_db def test_invalid_pin6_access_code(user_api_client, list_url, resource_in_unit, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation_data['access_code'] = 'xxx' reservation_data['resource'] = resource_in_unit.id response = user_api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'Invalid value' in response.data['access_code'] @pytest.mark.django_db def test_pin6_access_code_is_generated_if_not_set(user_api_client, list_url, resource_in_unit, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert re.match('^[0-9]{6}$', new_reservation.access_code) @pytest.mark.django_db def test_pin6_access_code_can_be_set(user_api_client, list_url, resource_in_unit, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation_data['access_code'] = '023543' reservation_data['resource'] = resource_in_unit.id response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.access_code == '023543' @pytest.mark.django_db def test_pin6_access_code_cannot_be_modified(user_api_client, resource_in_unit, reservation, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation.access_code = '123456' reservation.save() reservation_data['access_code'] = '654321' detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = user_api_client.put(detail_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'This field cannot be changed' in response.data['access_code'] @pytest.mark.parametrize('username, has_perm, expected', [ ('test_user', False, True), # own reservation ('test_user2', False, False), # someone else's reservation ('test_user2', True, True), # someone else's reservation but having the permission ('test_staff_user', False, True) # staff ]) @pytest.mark.django_db def test_access_code_visibility(user, user2, staff_user, api_client, resource_in_unit, reservation, username, has_perm, expected): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation.access_code = '123456' reservation.save() detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) current_user = User.objects.get(username=username) if has_perm: assign_perm('can_view_reservation_access_code', current_user, resource_in_unit.unit) api_client.force_authenticate(current_user) response = api_client.get(detail_url) assert response.status_code == 200 if expected: assert response.data['access_code'] == '123456' else: assert 'access_code' not in response.data @override_settings(RESPA_MAILS_ENABLED=True) @pytest.mark.django_db def test_reservation_created_with_access_code_mail(user_api_client, user, resource_in_unit, list_url, reservation_data): # The mail should not be sent if access code type is none response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 assert len(mail.outbox) == 0 reservation_data['access_code'] = '007007' resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() Reservation.objects.get(id=response.data['id']).delete() response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 check_received_mail_exists( 'Reservation created', user.email, 'Your access code for the resource: 007007', ) # Verify that modifying the reservation doesn't trigger the mail reservation_data['end'] = '2115-04-04T12:00:00+02:00' detail_url = reverse('reservation-detail', kwargs={'pk': response.data['id']}) response = user_api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 assert len(mail.outbox) == 0 @freeze_time('2016-10-25') @pytest.mark.django_db def test_reservation_reservable_before(user_api_client, resource_in_unit, list_url, reservation_data): resource_in_unit.reservable_days_in_advance = 10 resource_in_unit.save() reservation_data['begin'] = timezone.now().replace(hour=12, minute=0, second=0) + datetime.timedelta(days=11) reservation_data['end'] = timezone.now().replace(hour=13, minute=0, second=0) + datetime.timedelta(days=11) response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert_non_field_errors_contain(response, 'The resource is reservable only before') reservation_data['begin'] = timezone.now().replace(hour=12, minute=0, second=0) + datetime.timedelta(days=9) reservation_data['end'] = timezone.now().replace(hour=13, minute=0, second=0) + datetime.timedelta(days=9) response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 @pytest.mark.django_db def test_reservation_metadata_set(user_api_client, reservation, list_url, reservation_data): detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) field_1 = ReservationMetadataField.objects.get(field_name='reserver_name') field_2 = ReservationMetadataField.objects.get(field_name='reserver_phone_number') metadata_set = ReservationMetadataSet.objects.create( name='test_set', ) metadata_set.supported_fields = [field_1, field_2] metadata_set.required_fields = [field_1] reservation.resource.reservation_metadata_set = metadata_set reservation.resource.save(update_fields=('reservation_metadata_set',)) reservation_data['resource'] = reservation.resource.pk response = user_api_client.put(detail_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'This field is required.' in response.data['reserver_name'] reservation_data['reserver_name'] = 'Mr. Reserver' reservation_data['reserver_phone_number'] = '0700-555555' reservation_data['reserver_address_street'] = 'ignored street 7' response = user_api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 reservation.refresh_from_db() assert reservation.reserver_name == 'Mr. Reserver' assert reservation.reserver_phone_number == '0700-555555' assert reservation.reserver_address_street != 'ignored street 7' @pytest.mark.django_db def test_detail_endpoint_does_not_need_all_true_filter(user_api_client, user, resource_in_unit): reservation_in_the_past = Reservation.objects.create( resource=resource_in_unit, begin='2005-04-04T09:00:00+02:00', end='2005-04-04T10:00:00+02:00', user=user, ) detail_url = reverse('reservation-detail', kwargs={'pk': reservation_in_the_past.pk}) response = user_api_client.get(detail_url) assert response.status_code == 200
import pytest import datetime import re from django.contrib.auth import get_user_model from django.core.urlresolvers import reverse from django.core import mail from django.test.utils import override_settings from django.utils import dateparse, timezone from guardian.shortcuts import assign_perm from freezegun import freeze_time from resources.models import (Period, Day, Reservation, Resource, ReservationMetadataField, ReservationMetadataSet) from users.models import User from .utils import check_disallowed_methods, assert_non_field_errors_contain, check_received_mail_exists DEFAULT_RESERVATION_EXTRA_FIELDS = ('reserver_name', 'reserver_phone_number', 'reserver_address_street', 'reserver_address_zip', 'reserver_address_city', 'billing_address_street', 'billing_address_zip', 'billing_address_city', 'company', 'event_description', 'reserver_id', 'number_of_participants', 'reserver_email_address') DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS = ('reserver_name', 'reserver_phone_number', 'reserver_address_street', 'reserver_address_zip', 'reserver_address_city', 'event_description', 'reserver_id', 'reserver_email_address') @pytest.fixture def list_url(): return reverse('reservation-list') @pytest.fixture def detail_url(reservation): return reverse('reservation-detail', kwargs={'pk': reservation.pk}) @pytest.mark.django_db @pytest.fixture(autouse=True) def day_and_period(resource_in_unit): period = Period.objects.create( start='2005-04-01', end='2115-05-01', resource_id=resource_in_unit.id, name='test_period' ) Day.objects.create(period=period, weekday=3, opens='08:00', closes='16:00') @pytest.mark.django_db @pytest.fixture def reservation_data(resource_in_unit): return { 'resource': resource_in_unit.pk, 'begin': '2115-04-04T11:00:00+02:00', 'end': '2115-04-04T12:00:00+02:00' } @pytest.fixture def reservation_data_extra(reservation_data): extra_data = reservation_data.copy() extra_data.update({ 'reserver_name': 'Test Reserver', 'reserver_phone_number': '0700555555', 'reserver_address_street': 'Omenatie 102', 'reserver_address_zip': '00930', 'reserver_address_city': 'Helsinki', 'event_description': 'a very secret meeting', 'reserver_id': '1234567-8', 'number_of_participants': 5000, 'billing_address_street': 'Pihlajakatu', 'billing_address_zip': '00001', 'billing_address_city': 'Tampere', 'company': 'a very secret association', 'reserver_email_address': '<EMAIL>', }) return extra_data @pytest.mark.django_db @pytest.fixture def reservation(resource_in_unit, user): return Reservation.objects.create( resource=resource_in_unit, begin='2115-04-04T09:00:00+02:00', end='2115-04-04T10:00:00+02:00', user=user, ) @pytest.mark.django_db @pytest.fixture def other_resource(space_resource_type, test_unit): return Resource.objects.create( type=space_resource_type, authentication="none", name="other resource", unit=test_unit, id="otherresourceid", ) @pytest.fixture def reservations_in_all_states(resource_in_unit, user): all_states = (Reservation.CANCELLED, Reservation.CONFIRMED, Reservation.DENIED, Reservation.REQUESTED) reservations = dict() for i, state in enumerate(all_states, 4): reservations[state] = Reservation.objects.create( resource=resource_in_unit, begin='2115-04-0%sT09:00:00+02:00' % i, end='2115-04-0%sT10:00:00+02:00' % i, user=user, state=state ) return reservations @pytest.mark.django_db def test_disallowed_methods(all_user_types_api_client, list_url): """ Tests that PUT, PATCH and DELETE aren't allowed to reservation list endpoint. """ check_disallowed_methods(all_user_types_api_client, (list_url, ), ('put', 'patch', 'delete')) @pytest.mark.django_db def test_reservation_requires_authenticated_user(api_client, list_url, reservation_data): """ Tests that an unauthenticated user cannot create a reservation. """ response = api_client.post(list_url, data=reservation_data) assert response.status_code == 401 @pytest.mark.django_db def test_authenticated_user_can_make_reservation(api_client, list_url, reservation_data, resource_in_unit, user): """ Tests that an authenticated user can create a reservation. """ api_client.force_authenticate(user=user) response = api_client.post(list_url, data=reservation_data) assert response.status_code == 201 reservation = Reservation.objects.filter(user=user).latest('created_at') assert reservation.resource == resource_in_unit assert reservation.begin == dateparse.parse_datetime('2115-04-04T11:00:00+02:00') assert reservation.end == dateparse.parse_datetime('2115-04-04T12:00:00+02:00') @pytest.mark.django_db def test_authenticated_user_can_modify_reservation( api_client, detail_url, reservation_data, resource_in_unit, user): """ Tests that an authenticated user can modify her own reservation """ api_client.force_authenticate(user=user) response = api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 reservation = Reservation.objects.get(pk=response.data['id']) assert reservation.resource == resource_in_unit assert reservation.begin == dateparse.parse_datetime('2115-04-04T11:00:00+02:00') assert reservation.end == dateparse.parse_datetime('2115-04-04T12:00:00+02:00') @pytest.mark.django_db def test_authenticated_user_can_delete_reservation(api_client, detail_url, reservation, user): """ Tests that an authenticated user can delete her own reservation """ api_client.force_authenticate(user=user) reservation_id = reservation.id response = api_client.delete(detail_url) assert response.status_code == 204 assert Reservation.objects.filter(pk=reservation_id).count() == 1 reservation.refresh_from_db() assert reservation.state == Reservation.CANCELLED @pytest.mark.django_db def test_reservation_limit_per_user(api_client, list_url, reservation, reservation_data, user): """ Tests that a user cannot exceed her active reservation limit for one resource. """ api_client.force_authenticate(user=user) # the user already has one reservation, making another reservation should not be possible as the active reservation # limit is one response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'Maximum number of active reservations for this resource exceeded.') @pytest.mark.django_db def test_old_reservations_are_excluded(api_client, list_url, resource_in_unit, reservation_data, user): """ Tests that a reservation in the past doesn't count when checking reservation limit. """ # the user already has this reservation which is in the past. Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2005-04-07T09:00:00+02:00'), end=dateparse.parse_datetime('2005-04-07T10:00:00+02:00'), user=user, ) api_client.force_authenticate(user=user) # making another reservation should be possible because the other reservation is in the past. response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 201 @pytest.mark.django_db def test_staff_has_no_reservation_limit(api_client, list_url, reservation, reservation_data, user): """ Tests that the reservation limits for a resource do not apply to staff. """ user.is_staff = True user.save() api_client.force_authenticate(user=user) # the staff member already has one reservation, and should be able to make a second one regardless of the fact that # that the limit is one. response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 201 @pytest.mark.django_db def test_normal_user_cannot_make_reservation_outside_open_hours(api_client, list_url, reservation_data, user): """ Tests that a normal user cannot make reservations outside open hours. """ api_client.force_authenticate(user=user) # invalid day reservation_data['begin'] = '2115-06-01T09:00:00+02:00' reservation_data['end'] = '2115-06-01T10:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'You must start and end the reservation during opening hours') # valid begin time, end time after closing time reservation_data['begin'] = '2115-04-04T10:00:00+02:00' reservation_data['end'] = '2115-04-04T21:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'You must start and end the reservation during opening hours') # begin time before opens, valid end time reservation_data['begin'] = '2115-04-04T05:00:00+02:00' reservation_data['end'] = '2115-04-04T10:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'You must start and end the reservation during opening hours') @pytest.mark.django_db def test_normal_user_cannot_make_reservation_longer_than_max_period(api_client, list_url, reservation_data, user): """ Tests that a normal user cannot make reservations longer than the resource's max period. """ api_client.force_authenticate(user=user) # the reservation's length is 3h (11 -> 14) while the maximum is 2h reservation_data['end'] = '2115-04-04T14:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'The maximum reservation length is') @pytest.mark.django_db def test_staff_user_can_make_reservation_outside_open_hours(api_client, list_url, reservation_data, user): """ Tests that a staff member can make reservations outside opening hours. Also tests that the resource's max period doesn't limit staff. """ user.is_staff = True user.save() api_client.force_authenticate(user=user) # begin time before opening time, end time after closing time, longer than max period 2h reservation_data['begin'] = '2115-04-04T05:00:00+02:00' reservation_data['end'] = '2115-04-04T21:00:00+02:00' response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 201 @pytest.mark.django_db def test_comments_are_only_for_staff(api_client, list_url, reservation_data, user): api_client.force_authenticate(user=user) reservation_data['comments'] = 'test comment' response = api_client.post(list_url, data=reservation_data) assert response.status_code == 400 user.is_staff = True user.save() response = api_client.post(list_url, data=reservation_data) assert response.status_code == 201 response = api_client.get(response.data['url']) assert response.data['comments'] == 'test comment' user.is_staff = False user.save() response = api_client.get(response.data['url']) assert 'comments' not in response.data @pytest.mark.django_db def test_user_data_correct_and_only_for_staff(api_client, reservation, user): """ Tests that user object is returned within Reservation data and it is in the correct form. Also tests that only staff can see the user object. """ api_client.force_authenticate(user=user) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = api_client.get(detail_url) assert 'user' not in response.data user.is_staff = True user.save() response = api_client.get(detail_url) user_obj = response.data['user'] assert len(user_obj) == 3 assert user_obj['display_name'] == '<NAME>' assert user_obj['email'] == '<EMAIL>' assert user_obj['id'] is not None @pytest.mark.django_db def test_reservation_can_be_modified_by_overlapping_reservation(api_client, reservation, reservation_data, user): """ Tests that a reservation can be modified with times that overlap with the original times. """ api_client.force_authenticate(user=user) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) # try to extend the original reservation by 1 hour reservation_data['begin'] = '2115-04-04T09:00:00+02:00' reservation_data['end'] = '2115-04-04T11:00:00+02:00' response = api_client.put(detail_url, reservation_data) assert response.status_code == 200 reservation = Reservation.objects.get(pk=reservation.pk) assert reservation.begin == dateparse.parse_datetime('2115-04-04T09:00:00+02:00') assert reservation.end == dateparse.parse_datetime('2115-04-04T11:00:00+02:00') @pytest.mark.django_db def test_non_reservable_resource_restrictions(api_client, list_url, resource_in_unit, reservation_data, user): """ Tests that a normal user cannot make a reservation to a non reservable resource but staff can. Creating a new reservation with POST and updating an existing one with PUT are both tested. """ resource_in_unit.reservable = False resource_in_unit.save() api_client.force_authenticate(user=user) response = api_client.post(list_url, data=reservation_data) assert response.status_code == 403 # Create a reservation and try to change that with PUT reservation = Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2115-04-07T09:00:00+02:00'), end=dateparse.parse_datetime('2115-04-07T10:00:00+02:00'), user=user, ) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = api_client.put(detail_url, reservation_data) assert response.status_code == 403 # a staff member should be allowed to create and update user.is_staff = True user.save() response = api_client.post(list_url, data=reservation_data) assert response.status_code == 201 reservation_data['begin'] = dateparse.parse_datetime('2115-04-08T09:00:00+02:00') reservation_data['end'] = dateparse.parse_datetime('2115-04-08T10:00:00+02:00') response = api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 @pytest.mark.django_db def test_reservation_restrictions_by_owner(api_client, list_url, reservation, reservation_data, user, user2): """ Tests that a normal user can't modify other people's reservations while a staff member can. """ detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) api_client.force_authenticate(user=user2) response = api_client.put(detail_url, reservation_data) assert response.status_code == 403 response = api_client.delete(detail_url, reservation_data) assert response.status_code == 403 # a staff member should be allowed to perform every modifying method even that she is not the user in # the reservation user2.is_staff = True user2.save() response = api_client.put(detail_url, reservation_data) assert response.status_code == 200 response = api_client.delete(detail_url, reservation_data) assert response.status_code == 204 @pytest.mark.django_db def test_normal_users_cannot_make_reservations_for_others( api_client, list_url, reservation, reservation_data, user, user2): """ Tests that a normal user cannot make a reservation for other people. """ api_client.force_authenticate(user=user) detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) # set bigger max reservations limit so that it won't be a limiting factor here reservation.resource.max_reservations_per_user = 2 reservation.resource.save() # set another user for new reservations reservation_data['user'] = {'id': user2.uuid} # modify an existing reservation, and verify that user isn't changed response = api_client.put(detail_url, data=reservation_data, format='json') assert response.status_code == 200 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user # make a new reservation and verify that user isn't the other one reservation_data['begin'] = dateparse.parse_datetime('2115-04-04T13:00:00+02:00') reservation_data['end'] = dateparse.parse_datetime('2115-04-04T14:00:00+02:00') response = api_client.post(list_url, data=reservation_data, format='json') assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user @pytest.mark.django_db def test_reservation_staff_members_can_make_reservations_for_others( api_client, list_url, reservation, reservation_data, user, user2): """ Tests that a staff member can make reservations for other people without normal user restrictions. """ user.is_staff = True user.save() api_client.force_authenticate(user=user) # dealing with another user's reservation reservation.user = user2 reservation.save() reservation_data['user'] = {'id': user2.uuid} # modify an existing reservation detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = api_client.put(detail_url, data=reservation_data, format='json') assert response.status_code == 200 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user2 # create a new reservation, which is also too long, outside the opening hours and exceeds normal user # reservation limit. creating such a reservation for a normal user should be possible for a staff member reservation_data['begin'] = dateparse.parse_datetime('2115-04-04T13:00:00+02:00') reservation_data['end'] = dateparse.parse_datetime('2115-04-04T20:00:00+02:00') response = api_client.post(list_url, data=reservation_data, format='json') assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.user == user2 @pytest.mark.django_db def test_reservation_user_filter(api_client, list_url, reservation, resource_in_unit, user, user2): """ Tests that reservation user and is_own filtering work correctly. """ reservation2 = Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2115-04-07T11:00:00+02:00'), end=dateparse.parse_datetime('2115-04-07T12:00:00+02:00'), user=user2, ) # even unauthenticated user should see all the reservations response = api_client.get(list_url) assert response.data['count'] == 2 # filtering by user response = api_client.get(list_url + '?user=%s' % user.uuid) assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id # filtering by is_own api_client.force_authenticate(user=user) response = api_client.get(list_url + '?is_own=true') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id response = api_client.get(list_url + '?is_own=false') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation2.id @pytest.mark.django_db def test_reservation_time_filters(api_client, list_url, reservation, resource_in_unit, user): reservation2 = Reservation.objects.create( resource=resource_in_unit, begin=dateparse.parse_datetime('2015-04-07T11:00:00+02:00'), end=dateparse.parse_datetime('2015-04-07T12:00:00+02:00'), user=user, ) # without the filter, only the reservation in the future should be returned response = api_client.get(list_url) assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id # with the 'all' filter, both reservations should be returned response = api_client.get(list_url + '?all=true') assert response.data['count'] == 2 assert {reservation.id, reservation2.id}.issubset(set(res['id'] for res in response.data['results'])) # with start or end, both reservations should be returned # filtering by start date only response = api_client.get(list_url + '?start=2065-04-06') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation.id # filtering by end date only response = api_client.get(list_url + '?end=2065-04-06') assert response.data['count'] == 1 assert response.data['results'][0]['id'] == reservation2.id # filtering by start and end times response = api_client.get(list_url + '?start=2065-04-06T11:00:00%2b02:00' + '&end=2065-04-06T12:00:00%2b02:00') assert response.data['count'] == 0 response = api_client.get(list_url + '?start=2005-04-07T11:30:00%2b02:00' + '&end=2115-04-04T09:30:00%2b02:00') assert response.data['count'] == 2 assert {reservation.id, reservation2.id}.issubset(set(res['id'] for res in response.data['results'])) @pytest.mark.parametrize("input_hours,input_mins,expected", [ (2, 30, '2 hours 30 minutes'), (1, 30, '1 hour 30 minutes'), (1, 0, '1 hour'), (0, 30, '30 minutes'), (0, 1, '1 minute'), ]) @pytest.mark.django_db def test_max_reservation_period_error_message( api_client, list_url, resource_in_unit, reservation_data, user, input_hours, input_mins, expected): """ Tests that maximum reservation period error is returned in correct humanized form. """ reservation_data['end'] = '2115-04-04T16:00:00+02:00' # too long reservation resource_in_unit.max_period=datetime.timedelta(hours=input_hours, minutes=input_mins) resource_in_unit.save() api_client.force_authenticate(user=user) response = api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert response.data['non_field_errors'][0] == 'The maximum reservation length is %s' % expected @pytest.mark.django_db def test_reservation_excels(staff_api_client, list_url, detail_url, reservation, user): """ Tests that reservation list and detail endpoints return .xlsx files when requested """ response = staff_api_client.get( list_url, HTTP_ACCEPT='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', HTTP_ACCEPT_LANGUAGE='en', ) assert response.status_code == 200 assert response._headers['content-disposition'] == ('Content-Disposition', 'attachment; filename=reservations.xlsx') assert len(response.content) > 0 response = staff_api_client.get( detail_url, HTTP_ACCEPT='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', HTTP_ACCEPT_LANGUAGE='en', ) assert response.status_code == 200 assert response._headers['content-disposition'] == ( 'Content-Disposition', 'attachment; filename=reservation-{}.xlsx'.format(reservation.pk)) assert len(response.content) > 0 @pytest.mark.parametrize('need_manual_confirmation, expected_state', [ (False, Reservation.CONFIRMED), (True, Reservation.REQUESTED) ]) @pytest.mark.django_db def test_state_on_new_reservations(user_api_client, list_url, reservation_data_extra, resource_in_unit, need_manual_confirmation, expected_state): resource_in_unit.need_manual_confirmation = need_manual_confirmation if need_manual_confirmation: resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.latest('created_at') assert reservation.state == expected_state @pytest.mark.parametrize('state', [ 'illegal_state', '', None, ]) @pytest.mark.django_db def test_illegal_state_set(user_api_client, list_url, detail_url, reservation_data, state): reservation_data['state'] = state response = user_api_client.post(list_url, data=reservation_data, format='json') assert response.status_code == 400 assert 'state' in response.data response = user_api_client.put(detail_url, data=reservation_data, format='json') assert response.status_code == 400 assert 'state' in response.data @pytest.mark.parametrize('need_manual_confirmation', [ False, True ]) @pytest.mark.django_db def test_extra_fields_visibility(user_api_client, list_url, detail_url, reservation, resource_in_unit, need_manual_confirmation): resource_in_unit.need_manual_confirmation = need_manual_confirmation if need_manual_confirmation: resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() for url in (list_url, detail_url): response = user_api_client.get(url) assert response.status_code == 200 reservation_data = response.data['results'][0] if 'results' in response.data else response.data for field_name in DEFAULT_RESERVATION_EXTRA_FIELDS: assert (field_name in reservation_data) is need_manual_confirmation @pytest.mark.django_db def test_extra_fields_required_for_paid_reservations(user_api_client, staff_api_client, staff_user, list_url, resource_in_unit, reservation_data): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) assign_perm('can_approve_reservation', staff_user, resource_in_unit.unit) response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) @pytest.mark.django_db def test_staff_event_restrictions(user_api_client, staff_api_client, staff_user, list_url, resource_in_unit, reservation_data): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() reservation_data['staff_event'] = True # normal user response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) # staff member response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert set(DEFAULT_REQUIRED_RESERVATION_EXTRA_FIELDS) == set(response.data) # staff with permission but reserver_name and event_description missing assign_perm('can_approve_reservation', staff_user, resource_in_unit.unit) response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert {'reserver_name', 'event_description'} == set(response.data) @pytest.mark.django_db def test_new_staff_event_gets_confirmed(user_api_client, staff_api_client, staff_user, list_url, resource_in_unit, reservation_data, reservation_data_extra): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() reservation_data['staff_event'] = True # reservation should not be be confirmed if the user doesn't have approve permission response = staff_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.get(id=response.data['id']) assert reservation.state == Reservation.REQUESTED reservation.delete() assign_perm('can_approve_reservation', staff_user, resource_in_unit.unit) reservation_data['reserver_name'] = '<NAME>' reservation_data['event_description'] = 'herra huun bileet' response = staff_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 reservation = Reservation.objects.get(id=response.data['id']) assert reservation.state == Reservation.CONFIRMED @pytest.mark.django_db def test_extra_fields_can_be_set_for_paid_reservations(user_api_client, list_url, reservation_data_extra, resource_in_unit): resource_in_unit.max_reservations_per_user = 2 resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.latest('created_at') assert reservation.reserver_address_street == 'Omenatie 102' reservation_data_extra['reserver_address_street'] = 'Karhutie 8' response = user_api_client.put('%s%s/' % (list_url, reservation.pk), data=reservation_data_extra) assert response.status_code == 200 reservation.refresh_from_db() assert reservation.reserver_address_street == 'Karhutie 8' @pytest.mark.django_db def test_extra_fields_ignored_for_non_paid_reservations(user_api_client, list_url, reservation_data_extra, resource_in_unit): response = user_api_client.post(list_url, data=reservation_data_extra) assert response.status_code == 201 reservation = Reservation.objects.latest('created_at') assert reservation.reserver_name == '' assert reservation.number_of_participants is None @pytest.mark.django_db def test_user_can_see_her_reservations_in_all_states(user_api_client, list_url, reservations_in_all_states): response = user_api_client.get(list_url) assert response.status_code == 200 assert response.data['count'] == 4 @pytest.mark.django_db def test_user_cannot_see_others_denied_or_cancelled_reservations(api_client, user2, list_url, reservations_in_all_states): api_client.force_authenticate(user=user2) response = api_client.get(list_url) assert response.status_code == 200 assert response.data['count'] == 2 assert set([Reservation.CONFIRMED, Reservation.REQUESTED]) == set(r['state'] for r in response.data['results']) @pytest.mark.django_db def test_staff_can_see_reservations_in_all_states(staff_api_client, list_url, reservations_in_all_states): response = staff_api_client.get(list_url) assert response.status_code == 200 assert response.data['count'] == 4 @pytest.mark.django_db def test_reservation_cannot_be_confirmed_without_permission(user_api_client, staff_api_client, detail_url, reservation, reservation_data): reservation.state = Reservation.REQUESTED reservation.save() reservation_data['state'] = Reservation.CONFIRMED response = user_api_client.put(detail_url, data=reservation_data) assert response.status_code == 400 assert 'state' in response.data response = staff_api_client.put(detail_url, data=reservation_data) assert response.status_code == 400 assert 'state' in response.data @pytest.mark.django_db def test_reservation_can_be_confirmed_with_permission(staff_api_client, staff_user, detail_url, reservation, reservation_data): reservation.state = Reservation.REQUESTED reservation.save() reservation_data['state'] = Reservation.CONFIRMED assign_perm('can_approve_reservation', staff_user, reservation.resource.unit) response = staff_api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 reservation.refresh_from_db() assert reservation.state == Reservation.CONFIRMED assert reservation.approver == staff_user @pytest.mark.django_db def test_user_cannot_modify_or_cancel_manually_confirmed_reservation(user_api_client, detail_url, reservation, reservation_data_extra, resource_in_unit): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() response = user_api_client.put(detail_url, data=reservation_data_extra) assert response.status_code == 403 response = user_api_client.delete(detail_url) assert response.status_code == 403 @pytest.mark.parametrize('username, expected_visibility', [ (None, False), # unauthenticated user ('test_user', True), # own reservation ('test_user2', False), # someone else's reservation ('test_staff_user', True) # staff ]) @pytest.mark.django_db def test_extra_fields_visibility_for_different_user_types(api_client, user, user2, staff_user, list_url, detail_url, reservation, resource_in_unit, username, expected_visibility): resource_in_unit.need_manual_confirmation = True resource_in_unit.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource_in_unit.save() if username: api_client.force_authenticate(user=User.objects.get(username=username)) for url in (list_url, detail_url): response = api_client.get(url) assert response.status_code == 200 reservation_data = response.data['results'][0] if 'results' in response.data else response.data for field_name in DEFAULT_RESERVATION_EXTRA_FIELDS: assert (field_name in reservation_data) is expected_visibility @pytest.mark.parametrize('state', [ Reservation.CANCELLED, Reservation.DENIED ]) @pytest.mark.django_db def test_denied_and_cancelled_reservations_not_active(user_api_client, reservation, reservation_data, list_url, resource_in_unit, state): reservation.state = state reservation.save() # test reservation max limit response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 # test overlapping reservation resource_in_unit.max_reservations_per_user = 2 resource_in_unit.save() reservation_data['begin'] = reservation.begin reservation_data['end'] = reservation.end response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 @pytest.mark.django_db def test_cannot_make_reservation_in_the_past(user_api_client, reservation_data, list_url): reservation_data.update( begin='2010-04-04T11:00:00+02:00', end='2010-04-04T12:00:00+02:00' ) response = user_api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert_non_field_errors_contain(response, 'past') @pytest.mark.django_db def test_need_manual_confirmation_filter(user_api_client, user, list_url, reservation, other_resource): other_resource.need_manual_confirmation = True other_resource.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') other_resource.save() reservation_needing_confirmation = Reservation.objects.create( resource=other_resource, begin='2115-04-05T09:00:00+02:00', end='2115-04-05T10:00:00+02:00', user=user, ) # no filter, expect both reservations response = user_api_client.get(list_url) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == {reservation.id, reservation_needing_confirmation.id} # filter false, expect only first reservation response = user_api_client.get('%s%s' % (list_url, '?need_manual_confirmation=false')) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == {reservation.id} # filter true, expect only second reservation response = user_api_client.get('%s%s' % (list_url, '?need_manual_confirmation=true')) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == {reservation_needing_confirmation.id} @pytest.mark.parametrize('state_filter, expected_states', [ ('', ['requested', 'confirmed', 'denied', 'cancelled']), ('?state=requested', ['requested']), ('?state=confirmed,requested', ['confirmed', 'requested']), ('?state=confirmed, requested ,', ['confirmed', 'requested']) ]) @pytest.mark.django_db def test_state_filters(user_api_client, user, list_url, reservations_in_all_states, state_filter, expected_states): response = user_api_client.get('%s%s' % (list_url, state_filter)) assert response.status_code == 200 reservation_ids = set([res['id'] for res in response.data['results']]) assert reservation_ids == set(reservations_in_all_states[state].id for state in expected_states) @override_settings(RESPA_MAILS_ENABLED=True) @pytest.mark.django_db def test_reservation_mails(staff_api_client, staff_user, user_api_client, test_unit2, list_url, reservation_data_extra): resource = Resource.objects.get(id=reservation_data_extra['resource']) resource.need_manual_confirmation = True resource.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') resource.save() assign_perm('can_approve_reservation', staff_user, resource.unit) # create other staff user who should not receive mails because he doesn't have permission to the right unit other_official = get_user_model().objects.create( username='other_unit_official', first_name='Ozzy', last_name='Official', email='<EMAIL>', is_staff=True, preferred_language='en' ) assign_perm('can_approve_reservation', other_official, test_unit2) # test REQUESTED reservation_data_extra['state'] = Reservation.REQUESTED response = user_api_client.post(list_url, data=reservation_data_extra, format='json') assert response.status_code == 201 # 2 mails should be sent, one to the customer, and one to the staff user who can approve the reservation # (and no mail for the other staff user) assert len(mail.outbox) == 2 check_received_mail_exists( "You've made a preliminary reservation", reservation_data_extra['reserver_email_address'], 'made a preliminary reservation', clear_outbox=False ) check_received_mail_exists( 'Reservation requested', staff_user.email, 'A new preliminary reservation has been made' ) detail_url = '%s%s/' % (list_url, response.data['id']) # test DENIED reservation_data_extra['state'] = Reservation.DENIED response = staff_api_client.put(detail_url, data=reservation_data_extra, format='json') assert response.status_code == 200 assert len(mail.outbox) == 1 check_received_mail_exists( 'Reservation denied', reservation_data_extra['reserver_email_address'], 'has been denied.' ) # test CONFIRMED reservation_data_extra['state'] = Reservation.CONFIRMED response = staff_api_client.put(detail_url, data=reservation_data_extra, format='json') assert response.status_code == 200 assert len(mail.outbox) == 1 check_received_mail_exists( 'Reservation confirmed', reservation_data_extra['reserver_email_address'], 'has been confirmed.', clear_outbox=False ) assert 'this resource rocks' in str(mail.outbox[0].message()) mail.outbox = [] # test CANCELLED reservation_data_extra['state'] = Reservation.CANCELLED response = staff_api_client.delete(detail_url, format='json') assert response.status_code == 204 assert len(mail.outbox) == 1 check_received_mail_exists( 'Reservation cancelled', reservation_data_extra['reserver_email_address'], 'has been cancelled.' ) @pytest.mark.django_db def test_can_approve_filter(staff_api_client, staff_user, list_url, reservation): reservation.resource.need_manual_confirmation = True reservation.resource.reservation_metadata_set = ReservationMetadataSet.objects.get(name='default') reservation.resource.save() reservation.state = Reservation.REQUESTED reservation.save() response = staff_api_client.get('%s%s' % (list_url, '?can_approve=true')) assert response.status_code == 200 assert len(response.data['results']) == 0 assign_perm('can_approve_reservation', staff_user, reservation.resource.unit) response = staff_api_client.get('%s%s' % (list_url, '?can_approve=true')) assert response.status_code == 200 assert len(response.data['results']) == 1 @pytest.mark.django_db def test_access_code_cannot_be_set_if_type_none(user_api_client, list_url, resource_in_unit, reservation_data): reservation_data['access_code'] = '023543' response = user_api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'This field cannot have a value with this resource' in response.data['access_code'] @pytest.mark.django_db def test_invalid_pin6_access_code(user_api_client, list_url, resource_in_unit, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation_data['access_code'] = 'xxx' reservation_data['resource'] = resource_in_unit.id response = user_api_client.post(list_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'Invalid value' in response.data['access_code'] @pytest.mark.django_db def test_pin6_access_code_is_generated_if_not_set(user_api_client, list_url, resource_in_unit, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert re.match('^[0-9]{6}$', new_reservation.access_code) @pytest.mark.django_db def test_pin6_access_code_can_be_set(user_api_client, list_url, resource_in_unit, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation_data['access_code'] = '023543' reservation_data['resource'] = resource_in_unit.id response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 new_reservation = Reservation.objects.get(id=response.data['id']) assert new_reservation.access_code == '023543' @pytest.mark.django_db def test_pin6_access_code_cannot_be_modified(user_api_client, resource_in_unit, reservation, reservation_data): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation.access_code = '123456' reservation.save() reservation_data['access_code'] = '654321' detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) response = user_api_client.put(detail_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'This field cannot be changed' in response.data['access_code'] @pytest.mark.parametrize('username, has_perm, expected', [ ('test_user', False, True), # own reservation ('test_user2', False, False), # someone else's reservation ('test_user2', True, True), # someone else's reservation but having the permission ('test_staff_user', False, True) # staff ]) @pytest.mark.django_db def test_access_code_visibility(user, user2, staff_user, api_client, resource_in_unit, reservation, username, has_perm, expected): resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() reservation.access_code = '123456' reservation.save() detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) current_user = User.objects.get(username=username) if has_perm: assign_perm('can_view_reservation_access_code', current_user, resource_in_unit.unit) api_client.force_authenticate(current_user) response = api_client.get(detail_url) assert response.status_code == 200 if expected: assert response.data['access_code'] == '123456' else: assert 'access_code' not in response.data @override_settings(RESPA_MAILS_ENABLED=True) @pytest.mark.django_db def test_reservation_created_with_access_code_mail(user_api_client, user, resource_in_unit, list_url, reservation_data): # The mail should not be sent if access code type is none response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 assert len(mail.outbox) == 0 reservation_data['access_code'] = '007007' resource_in_unit.access_code_type = Resource.ACCESS_CODE_TYPE_PIN6 resource_in_unit.save() Reservation.objects.get(id=response.data['id']).delete() response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 check_received_mail_exists( 'Reservation created', user.email, 'Your access code for the resource: 007007', ) # Verify that modifying the reservation doesn't trigger the mail reservation_data['end'] = '2115-04-04T12:00:00+02:00' detail_url = reverse('reservation-detail', kwargs={'pk': response.data['id']}) response = user_api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 assert len(mail.outbox) == 0 @freeze_time('2016-10-25') @pytest.mark.django_db def test_reservation_reservable_before(user_api_client, resource_in_unit, list_url, reservation_data): resource_in_unit.reservable_days_in_advance = 10 resource_in_unit.save() reservation_data['begin'] = timezone.now().replace(hour=12, minute=0, second=0) + datetime.timedelta(days=11) reservation_data['end'] = timezone.now().replace(hour=13, minute=0, second=0) + datetime.timedelta(days=11) response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 400 assert_non_field_errors_contain(response, 'The resource is reservable only before') reservation_data['begin'] = timezone.now().replace(hour=12, minute=0, second=0) + datetime.timedelta(days=9) reservation_data['end'] = timezone.now().replace(hour=13, minute=0, second=0) + datetime.timedelta(days=9) response = user_api_client.post(list_url, data=reservation_data) assert response.status_code == 201 @pytest.mark.django_db def test_reservation_metadata_set(user_api_client, reservation, list_url, reservation_data): detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk}) field_1 = ReservationMetadataField.objects.get(field_name='reserver_name') field_2 = ReservationMetadataField.objects.get(field_name='reserver_phone_number') metadata_set = ReservationMetadataSet.objects.create( name='test_set', ) metadata_set.supported_fields = [field_1, field_2] metadata_set.required_fields = [field_1] reservation.resource.reservation_metadata_set = metadata_set reservation.resource.save(update_fields=('reservation_metadata_set',)) reservation_data['resource'] = reservation.resource.pk response = user_api_client.put(detail_url, data=reservation_data, HTTP_ACCEPT_LANGUAGE='en') assert response.status_code == 400 assert 'This field is required.' in response.data['reserver_name'] reservation_data['reserver_name'] = 'Mr. Reserver' reservation_data['reserver_phone_number'] = '0700-555555' reservation_data['reserver_address_street'] = 'ignored street 7' response = user_api_client.put(detail_url, data=reservation_data) assert response.status_code == 200 reservation.refresh_from_db() assert reservation.reserver_name == 'Mr. Reserver' assert reservation.reserver_phone_number == '0700-555555' assert reservation.reserver_address_street != 'ignored street 7' @pytest.mark.django_db def test_detail_endpoint_does_not_need_all_true_filter(user_api_client, user, resource_in_unit): reservation_in_the_past = Reservation.objects.create( resource=resource_in_unit, begin='2005-04-04T09:00:00+02:00', end='2005-04-04T10:00:00+02:00', user=user, ) detail_url = reverse('reservation-detail', kwargs={'pk': reservation_in_the_past.pk}) response = user_api_client.get(detail_url) assert response.status_code == 200
en
0.947105
Tests that PUT, PATCH and DELETE aren't allowed to reservation list endpoint. Tests that an unauthenticated user cannot create a reservation. Tests that an authenticated user can create a reservation. Tests that an authenticated user can modify her own reservation Tests that an authenticated user can delete her own reservation Tests that a user cannot exceed her active reservation limit for one resource. # the user already has one reservation, making another reservation should not be possible as the active reservation # limit is one Tests that a reservation in the past doesn't count when checking reservation limit. # the user already has this reservation which is in the past. # making another reservation should be possible because the other reservation is in the past. Tests that the reservation limits for a resource do not apply to staff. # the staff member already has one reservation, and should be able to make a second one regardless of the fact that # that the limit is one. Tests that a normal user cannot make reservations outside open hours. # invalid day # valid begin time, end time after closing time # begin time before opens, valid end time Tests that a normal user cannot make reservations longer than the resource's max period. # the reservation's length is 3h (11 -> 14) while the maximum is 2h Tests that a staff member can make reservations outside opening hours. Also tests that the resource's max period doesn't limit staff. # begin time before opening time, end time after closing time, longer than max period 2h Tests that user object is returned within Reservation data and it is in the correct form. Also tests that only staff can see the user object. Tests that a reservation can be modified with times that overlap with the original times. # try to extend the original reservation by 1 hour Tests that a normal user cannot make a reservation to a non reservable resource but staff can. Creating a new reservation with POST and updating an existing one with PUT are both tested. # Create a reservation and try to change that with PUT # a staff member should be allowed to create and update Tests that a normal user can't modify other people's reservations while a staff member can. # a staff member should be allowed to perform every modifying method even that she is not the user in # the reservation Tests that a normal user cannot make a reservation for other people. # set bigger max reservations limit so that it won't be a limiting factor here # set another user for new reservations # modify an existing reservation, and verify that user isn't changed # make a new reservation and verify that user isn't the other one Tests that a staff member can make reservations for other people without normal user restrictions. # dealing with another user's reservation # modify an existing reservation # create a new reservation, which is also too long, outside the opening hours and exceeds normal user # reservation limit. creating such a reservation for a normal user should be possible for a staff member Tests that reservation user and is_own filtering work correctly. # even unauthenticated user should see all the reservations # filtering by user # filtering by is_own # without the filter, only the reservation in the future should be returned # with the 'all' filter, both reservations should be returned # with start or end, both reservations should be returned # filtering by start date only # filtering by end date only # filtering by start and end times Tests that maximum reservation period error is returned in correct humanized form. # too long reservation Tests that reservation list and detail endpoints return .xlsx files when requested # normal user # staff member # staff with permission but reserver_name and event_description missing # reservation should not be be confirmed if the user doesn't have approve permission # unauthenticated user # own reservation # someone else's reservation # staff # test reservation max limit # test overlapping reservation # no filter, expect both reservations # filter false, expect only first reservation # filter true, expect only second reservation # create other staff user who should not receive mails because he doesn't have permission to the right unit # test REQUESTED # 2 mails should be sent, one to the customer, and one to the staff user who can approve the reservation # (and no mail for the other staff user) # test DENIED # test CONFIRMED # test CANCELLED # own reservation # someone else's reservation # someone else's reservation but having the permission # staff # The mail should not be sent if access code type is none # Verify that modifying the reservation doesn't trigger the mail
1.900383
2
phs_adventure.py
ehmatthes/phs_adventure
2
6624639
<filename>phs_adventure.py import os import sys import engine # Overall approach: show a message, then show choices, # then respond to choices while 1: # Start the adventure. os.system('clear') message = "You are standing at the entrance to the new Pacific High School." choices = {} choices['1'] = "Enter." choices['2'] = "Turn around and jump into the water." print(message) choice = engine.get_choice(choices) # Respond to first choice. if choice == '1': os.system('clear') phs_values = ['honesty', 'perseverance', 'humor', 'acceptance', 'respect'] key = '' while key.lower() not in phs_values: os.system('clear') print("You must know one of the five secrets in order to enter.") key = input("Please tell me one of the secrets: ") # You know the secret! os.system('clear') print("You have entered!") print("You notice that there is new paint on the walls.") print("Would you like to: ") print("\n[1] Open the door to Hillary's classroom.") print("[2] Go sit by Phil's desk.") print("[3] Go into Eric's class to do some Python.") choice = input("\nWhat is your choice? ") if choice == '1': os.system('clear') key = '' while key.lower() != 'british': os.system('clear') print("\nNot so fast!") key = input("What accent are you likely to hear in a moment?") # You know the secret! os.system('clear') print("A British-sounding woman has just shouted at you.") elif choice == '2': # phil's desk os.system('clear') print("You are standing in front of Phil's desk.") print("Phil is talking on the phone about giraffes in the sky.") print("Would you like to: ") print("\n[1]Keep listening for a while.") print("[2]Interrupt Phil's conversation.") print("[3]Quietly sneak a Hames pass off of Phil's desk.") choice = input("\nWhat is your choice?") elif choice == '3': # eric's room pass else: print("I don't understand.") elif choice == '2': os.system('clear') print("You drowned! Just kidding.") print("You are now riding a dolphin.") print("Would you like to: ") print("\n[1] Head towards the magical underwater kingdom of Zubida.") print("\n[2] Head towards a magical land far, far away from here.") choice = input("\nWhat is your choice? ") else: print("I don't understand your choice.")
<filename>phs_adventure.py import os import sys import engine # Overall approach: show a message, then show choices, # then respond to choices while 1: # Start the adventure. os.system('clear') message = "You are standing at the entrance to the new Pacific High School." choices = {} choices['1'] = "Enter." choices['2'] = "Turn around and jump into the water." print(message) choice = engine.get_choice(choices) # Respond to first choice. if choice == '1': os.system('clear') phs_values = ['honesty', 'perseverance', 'humor', 'acceptance', 'respect'] key = '' while key.lower() not in phs_values: os.system('clear') print("You must know one of the five secrets in order to enter.") key = input("Please tell me one of the secrets: ") # You know the secret! os.system('clear') print("You have entered!") print("You notice that there is new paint on the walls.") print("Would you like to: ") print("\n[1] Open the door to Hillary's classroom.") print("[2] Go sit by Phil's desk.") print("[3] Go into Eric's class to do some Python.") choice = input("\nWhat is your choice? ") if choice == '1': os.system('clear') key = '' while key.lower() != 'british': os.system('clear') print("\nNot so fast!") key = input("What accent are you likely to hear in a moment?") # You know the secret! os.system('clear') print("A British-sounding woman has just shouted at you.") elif choice == '2': # phil's desk os.system('clear') print("You are standing in front of Phil's desk.") print("Phil is talking on the phone about giraffes in the sky.") print("Would you like to: ") print("\n[1]Keep listening for a while.") print("[2]Interrupt Phil's conversation.") print("[3]Quietly sneak a Hames pass off of Phil's desk.") choice = input("\nWhat is your choice?") elif choice == '3': # eric's room pass else: print("I don't understand.") elif choice == '2': os.system('clear') print("You drowned! Just kidding.") print("You are now riding a dolphin.") print("Would you like to: ") print("\n[1] Head towards the magical underwater kingdom of Zubida.") print("\n[2] Head towards a magical land far, far away from here.") choice = input("\nWhat is your choice? ") else: print("I don't understand your choice.")
en
0.837711
# Overall approach: show a message, then show choices, # then respond to choices # Start the adventure. # Respond to first choice. # You know the secret! # You know the secret! # phil's desk # eric's room
3.937984
4
python/ccxt/yobit.py
yufengwei/xex-dev
0
6624640
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxt.liqui import liqui from ccxt.base.errors import ExchangeError from ccxt.base.errors import AuthenticationError from ccxt.base.errors import InsufficientFunds class yobit (liqui): def describe(self): return self.deep_extend(super(yobit, self).describe(), { 'id': 'yobit', 'name': 'YoBit', 'countries': ['RU'], 'rateLimit': 3000, # responses are cached every 2 seconds 'version': '3', 'has': { 'createDepositAddress': True, 'fetchDepositAddress': True, 'fetchDeposits': False, 'fetchWithdrawals': False, 'fetchTransactions': False, 'fetchTickers': False, 'CORS': False, 'withdraw': True, }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/27766910-cdcbfdae-5eea-11e7-9859-03fea873272d.jpg', 'api': { 'public': 'https://yobit.net/api', 'private': 'https://yobit.net/tapi', }, 'www': 'https://www.yobit.net', 'doc': 'https://www.yobit.net/en/api/', 'fees': 'https://www.yobit.net/en/fees/', }, 'api': { 'public': { 'get': [ 'depth/{pair}', 'info', 'ticker/{pair}', 'trades/{pair}', ], }, 'private': { 'post': [ 'ActiveOrders', 'CancelOrder', 'GetDepositAddress', 'getInfo', 'OrderInfo', 'Trade', 'TradeHistory', 'WithdrawCoinsToAddress', ], }, }, 'fees': { 'trading': { 'maker': 0.002, 'taker': 0.002, }, 'funding': { 'withdraw': {}, }, }, 'commonCurrencies': { 'AIR': 'AirCoin', 'ANI': 'ANICoin', 'ANT': 'AntsCoin', # what is self, a coin for ants? 'ATMCHA': 'ATM', 'ASN': 'Ascension', 'AST': 'Astral', 'ATM': 'Autumncoin', 'BCC': 'BCH', 'BCS': 'BitcoinStake', 'BLN': 'Bulleon', 'BOT': 'BOTcoin', 'BON': 'BONES', 'BPC': 'BitcoinPremium', 'BTS': 'Bitshares2', 'CAT': 'BitClave', 'CMT': 'CometCoin', 'COV': 'Coven Coin', 'COVX': 'COV', 'CPC': 'Capricoin', 'CS': 'CryptoSpots', 'DCT': 'Discount', 'DGD': 'DarkGoldCoin', 'DIRT': 'DIRTY', 'DROP': 'FaucetCoin', 'EKO': 'EkoCoin', 'ENTER': 'ENTRC', 'EPC': 'ExperienceCoin', 'ERT': 'Eristica Token', 'ESC': 'EdwardSnowden', 'EUROPE': 'EUROP', 'EXT': 'LifeExtension', 'FUNK': 'FUNKCoin', 'GCC': 'GlobalCryptocurrency', 'GEN': 'Genstake', 'GENE': 'Genesiscoin', 'GOLD': 'GoldMint', 'GOT': 'Giotto Coin', 'HTML5': 'HTML', 'HYPERX': 'HYPER', 'ICN': 'iCoin', 'INSANE': 'INSN', 'JNT': 'JointCoin', 'JPC': 'JupiterCoin', 'KNC': 'KingN Coin', 'LBTCX': 'LiteBitcoin', 'LIZI': 'LiZi', 'LOC': 'LocoCoin', 'LOCX': 'LOC', 'LUNYR': 'LUN', 'LUN': 'LunarCoin', # they just change the ticker if it is already taken 'MDT': 'Midnight', 'NAV': 'NavajoCoin', 'NBT': 'NiceBytes', 'OMG': 'OMGame', 'PAC': '$PAC', 'PLAY': 'PlayCoin', 'PIVX': 'Darknet', 'PRS': 'PRE', 'PUTIN': 'PUT', 'STK': 'StakeCoin', 'SUB': 'Subscriptio', 'PAY': 'EPAY', 'PLC': 'Platin Coin', 'RCN': 'RCoin', 'REP': 'Republicoin', 'RUR': 'RUB', 'XIN': 'XINCoin', }, 'options': { 'fetchOrdersRequiresSymbol': True, 'fetchTickersMaxLength': 512, }, 'exceptions': { 'broad': { 'Total transaction amount': ExchangeError, # {"success": 0, "error": "Total transaction amount is less than minimal total: 0.00010000"} 'Insufficient funds': InsufficientFunds, 'invalid key': AuthenticationError, }, }, }) def parse_order_status(self, status): statuses = { '0': 'open', '1': 'closed', '2': 'canceled', '3': 'open', # or partially-filled and closed? https://github.com/ccxt/ccxt/issues/1594 } if status in statuses: return statuses[status] return status def fetch_balance(self, params={}): self.load_markets() response = self.privatePostGetInfo() balances = response['return'] result = {'info': balances} sides = {'free': 'funds', 'total': 'funds_incl_orders'} keys = list(sides.keys()) for i in range(0, len(keys)): key = keys[i] side = sides[key] if side in balances: currencies = list(balances[side].keys()) for j in range(0, len(currencies)): lowercase = currencies[j] uppercase = lowercase.upper() currency = self.common_currency_code(uppercase) account = None if currency in result: account = result[currency] else: account = self.account() account[key] = balances[side][lowercase] if (account['total'] is not None) and(account['free'] is not None): account['used'] = account['total'] - account['free'] result[currency] = account return self.parse_balance(result) def create_deposit_address(self, code, params={}): response = self.fetch_deposit_address(code, self.extend({ 'need_new': 1, }, params)) address = self.safe_string(response, 'address') self.check_address(address) return { 'currency': code, 'address': address, 'tag': None, 'info': response['info'], } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'coinName': currency['id'], 'need_new': 0, } response = self.privatePostGetDepositAddress(self.extend(request, params)) address = self.safe_string(response['return'], 'address') self.check_address(address) return { 'currency': code, 'address': address, 'tag': None, 'info': response, } def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None # some derived classes use camelcase notation for request fields request = { # 'from': 123456789, # trade ID, from which the display starts numerical 0(test result: liqui ignores self field) # 'count': 1000, # the number of trades for display numerical, default = 1000 # 'from_id': trade ID, from which the display starts numerical 0 # 'end_id': trade ID on which the display ends numerical ∞ # 'order': 'ASC', # sorting, default = DESC(test result: liqui ignores self field, most recent trade always goes last) # 'since': 1234567890, # UTC start time, default = 0(test result: liqui ignores self field) # 'end': 1234567890, # UTC end time, default = ∞(test result: liqui ignores self field) # 'pair': 'eth_btc', # default = all markets } if symbol is not None: market = self.market(symbol) request['pair'] = market['id'] if limit is not None: request['count'] = int(limit) if since is not None: request['since'] = int(since / 1000) method = self.options['fetchMyTradesMethod'] response = getattr(self, method)(self.extend(request, params)) trades = self.safe_value(response, 'return', {}) ids = list(trades.keys()) result = [] for i in range(0, len(ids)): id = ids[i] trade = self.parse_trade(self.extend(trades[id], { 'trade_id': id, }), market) result.append(trade) return self.filter_by_symbol_since_limit(result, symbol, since, limit) def withdraw(self, code, amount, address, tag=None, params={}): self.check_address(address) self.load_markets() currency = self.currency(code) response = self.privatePostWithdrawCoinsToAddress(self.extend({ 'coinName': currency['id'], 'amount': amount, 'address': address, }, params)) return { 'info': response, 'id': None, }
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxt.liqui import liqui from ccxt.base.errors import ExchangeError from ccxt.base.errors import AuthenticationError from ccxt.base.errors import InsufficientFunds class yobit (liqui): def describe(self): return self.deep_extend(super(yobit, self).describe(), { 'id': 'yobit', 'name': 'YoBit', 'countries': ['RU'], 'rateLimit': 3000, # responses are cached every 2 seconds 'version': '3', 'has': { 'createDepositAddress': True, 'fetchDepositAddress': True, 'fetchDeposits': False, 'fetchWithdrawals': False, 'fetchTransactions': False, 'fetchTickers': False, 'CORS': False, 'withdraw': True, }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/27766910-cdcbfdae-5eea-11e7-9859-03fea873272d.jpg', 'api': { 'public': 'https://yobit.net/api', 'private': 'https://yobit.net/tapi', }, 'www': 'https://www.yobit.net', 'doc': 'https://www.yobit.net/en/api/', 'fees': 'https://www.yobit.net/en/fees/', }, 'api': { 'public': { 'get': [ 'depth/{pair}', 'info', 'ticker/{pair}', 'trades/{pair}', ], }, 'private': { 'post': [ 'ActiveOrders', 'CancelOrder', 'GetDepositAddress', 'getInfo', 'OrderInfo', 'Trade', 'TradeHistory', 'WithdrawCoinsToAddress', ], }, }, 'fees': { 'trading': { 'maker': 0.002, 'taker': 0.002, }, 'funding': { 'withdraw': {}, }, }, 'commonCurrencies': { 'AIR': 'AirCoin', 'ANI': 'ANICoin', 'ANT': 'AntsCoin', # what is self, a coin for ants? 'ATMCHA': 'ATM', 'ASN': 'Ascension', 'AST': 'Astral', 'ATM': 'Autumncoin', 'BCC': 'BCH', 'BCS': 'BitcoinStake', 'BLN': 'Bulleon', 'BOT': 'BOTcoin', 'BON': 'BONES', 'BPC': 'BitcoinPremium', 'BTS': 'Bitshares2', 'CAT': 'BitClave', 'CMT': 'CometCoin', 'COV': 'Coven Coin', 'COVX': 'COV', 'CPC': 'Capricoin', 'CS': 'CryptoSpots', 'DCT': 'Discount', 'DGD': 'DarkGoldCoin', 'DIRT': 'DIRTY', 'DROP': 'FaucetCoin', 'EKO': 'EkoCoin', 'ENTER': 'ENTRC', 'EPC': 'ExperienceCoin', 'ERT': 'Eristica Token', 'ESC': 'EdwardSnowden', 'EUROPE': 'EUROP', 'EXT': 'LifeExtension', 'FUNK': 'FUNKCoin', 'GCC': 'GlobalCryptocurrency', 'GEN': 'Genstake', 'GENE': 'Genesiscoin', 'GOLD': 'GoldMint', 'GOT': 'Giotto Coin', 'HTML5': 'HTML', 'HYPERX': 'HYPER', 'ICN': 'iCoin', 'INSANE': 'INSN', 'JNT': 'JointCoin', 'JPC': 'JupiterCoin', 'KNC': 'KingN Coin', 'LBTCX': 'LiteBitcoin', 'LIZI': 'LiZi', 'LOC': 'LocoCoin', 'LOCX': 'LOC', 'LUNYR': 'LUN', 'LUN': 'LunarCoin', # they just change the ticker if it is already taken 'MDT': 'Midnight', 'NAV': 'NavajoCoin', 'NBT': 'NiceBytes', 'OMG': 'OMGame', 'PAC': '$PAC', 'PLAY': 'PlayCoin', 'PIVX': 'Darknet', 'PRS': 'PRE', 'PUTIN': 'PUT', 'STK': 'StakeCoin', 'SUB': 'Subscriptio', 'PAY': 'EPAY', 'PLC': 'Platin Coin', 'RCN': 'RCoin', 'REP': 'Republicoin', 'RUR': 'RUB', 'XIN': 'XINCoin', }, 'options': { 'fetchOrdersRequiresSymbol': True, 'fetchTickersMaxLength': 512, }, 'exceptions': { 'broad': { 'Total transaction amount': ExchangeError, # {"success": 0, "error": "Total transaction amount is less than minimal total: 0.00010000"} 'Insufficient funds': InsufficientFunds, 'invalid key': AuthenticationError, }, }, }) def parse_order_status(self, status): statuses = { '0': 'open', '1': 'closed', '2': 'canceled', '3': 'open', # or partially-filled and closed? https://github.com/ccxt/ccxt/issues/1594 } if status in statuses: return statuses[status] return status def fetch_balance(self, params={}): self.load_markets() response = self.privatePostGetInfo() balances = response['return'] result = {'info': balances} sides = {'free': 'funds', 'total': 'funds_incl_orders'} keys = list(sides.keys()) for i in range(0, len(keys)): key = keys[i] side = sides[key] if side in balances: currencies = list(balances[side].keys()) for j in range(0, len(currencies)): lowercase = currencies[j] uppercase = lowercase.upper() currency = self.common_currency_code(uppercase) account = None if currency in result: account = result[currency] else: account = self.account() account[key] = balances[side][lowercase] if (account['total'] is not None) and(account['free'] is not None): account['used'] = account['total'] - account['free'] result[currency] = account return self.parse_balance(result) def create_deposit_address(self, code, params={}): response = self.fetch_deposit_address(code, self.extend({ 'need_new': 1, }, params)) address = self.safe_string(response, 'address') self.check_address(address) return { 'currency': code, 'address': address, 'tag': None, 'info': response['info'], } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'coinName': currency['id'], 'need_new': 0, } response = self.privatePostGetDepositAddress(self.extend(request, params)) address = self.safe_string(response['return'], 'address') self.check_address(address) return { 'currency': code, 'address': address, 'tag': None, 'info': response, } def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None # some derived classes use camelcase notation for request fields request = { # 'from': 123456789, # trade ID, from which the display starts numerical 0(test result: liqui ignores self field) # 'count': 1000, # the number of trades for display numerical, default = 1000 # 'from_id': trade ID, from which the display starts numerical 0 # 'end_id': trade ID on which the display ends numerical ∞ # 'order': 'ASC', # sorting, default = DESC(test result: liqui ignores self field, most recent trade always goes last) # 'since': 1234567890, # UTC start time, default = 0(test result: liqui ignores self field) # 'end': 1234567890, # UTC end time, default = ∞(test result: liqui ignores self field) # 'pair': 'eth_btc', # default = all markets } if symbol is not None: market = self.market(symbol) request['pair'] = market['id'] if limit is not None: request['count'] = int(limit) if since is not None: request['since'] = int(since / 1000) method = self.options['fetchMyTradesMethod'] response = getattr(self, method)(self.extend(request, params)) trades = self.safe_value(response, 'return', {}) ids = list(trades.keys()) result = [] for i in range(0, len(ids)): id = ids[i] trade = self.parse_trade(self.extend(trades[id], { 'trade_id': id, }), market) result.append(trade) return self.filter_by_symbol_since_limit(result, symbol, since, limit) def withdraw(self, code, amount, address, tag=None, params={}): self.check_address(address) self.load_markets() currency = self.currency(code) response = self.privatePostWithdrawCoinsToAddress(self.extend({ 'coinName': currency['id'], 'amount': amount, 'address': address, }, params)) return { 'info': response, 'id': None, }
en
0.676731
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code # responses are cached every 2 seconds # what is self, a coin for ants? # they just change the ticker if it is already taken # {"success": 0, "error": "Total transaction amount is less than minimal total: 0.00010000"} # or partially-filled and closed? https://github.com/ccxt/ccxt/issues/1594 # some derived classes use camelcase notation for request fields # 'from': 123456789, # trade ID, from which the display starts numerical 0(test result: liqui ignores self field) # 'count': 1000, # the number of trades for display numerical, default = 1000 # 'from_id': trade ID, from which the display starts numerical 0 # 'end_id': trade ID on which the display ends numerical ∞ # 'order': 'ASC', # sorting, default = DESC(test result: liqui ignores self field, most recent trade always goes last) # 'since': 1234567890, # UTC start time, default = 0(test result: liqui ignores self field) # 'end': 1234567890, # UTC end time, default = ∞(test result: liqui ignores self field) # 'pair': 'eth_btc', # default = all markets
1.655913
2
key_word_num/json_handle.py
Carrie2001/Chinese-poems-generation-based-on-pictures
1
6624641
# coding UTF-8 # 该程序用于把https://github.com/chinese-poetry/chinese-poetry上的古诗json数据转化成txt并从中筛选五言诗句 import json import os # 5言诗 length = 5 length += 1 length *= 2 tfilename = '唐诗' files = os.listdir(tfilename) all_list = [] # 把这些诗句写入list for name in files: filename = tfilename + '/' filename += name with open(filename, 'r', encoding='utf-8') as f: data_json = json.load(f) for i in range(len(data_json)): tmp_list = [] title = [] title.append(data_json[i]['title']) author = [] author.append(data_json[i]['author']) tmp_list.append(title) tmp_list.append(author) tmp_list.append(data_json[i]['paragraphs']) # print(tmp_list[2][0]) # print(len(tmp_list[2][0])) # print(len(tmp_list[2])) if len(tmp_list[2]) != 2 or len(tmp_list[2][0]) != length: continue all_list.append(tmp_list) # 把list存入文档 with open('五言绝句唐诗', 'w', encoding='utf-8') as file: for i in range(len(all_list)): file.write(all_list[i][0][0]) file.write('::') file.write(all_list[i][1][0]) file.write('::') file.write(all_list[i][2][0]) file.write(all_list[i][2][1]) file.write('\n')
# coding UTF-8 # 该程序用于把https://github.com/chinese-poetry/chinese-poetry上的古诗json数据转化成txt并从中筛选五言诗句 import json import os # 5言诗 length = 5 length += 1 length *= 2 tfilename = '唐诗' files = os.listdir(tfilename) all_list = [] # 把这些诗句写入list for name in files: filename = tfilename + '/' filename += name with open(filename, 'r', encoding='utf-8') as f: data_json = json.load(f) for i in range(len(data_json)): tmp_list = [] title = [] title.append(data_json[i]['title']) author = [] author.append(data_json[i]['author']) tmp_list.append(title) tmp_list.append(author) tmp_list.append(data_json[i]['paragraphs']) # print(tmp_list[2][0]) # print(len(tmp_list[2][0])) # print(len(tmp_list[2])) if len(tmp_list[2]) != 2 or len(tmp_list[2][0]) != length: continue all_list.append(tmp_list) # 把list存入文档 with open('五言绝句唐诗', 'w', encoding='utf-8') as file: for i in range(len(all_list)): file.write(all_list[i][0][0]) file.write('::') file.write(all_list[i][1][0]) file.write('::') file.write(all_list[i][2][0]) file.write(all_list[i][2][1]) file.write('\n')
zh
0.319235
# coding UTF-8 # 该程序用于把https://github.com/chinese-poetry/chinese-poetry上的古诗json数据转化成txt并从中筛选五言诗句 # 5言诗 # 把这些诗句写入list # print(tmp_list[2][0]) # print(len(tmp_list[2][0])) # print(len(tmp_list[2])) # 把list存入文档
2.979715
3
client/build/build_rocketbox_materials.py
AustralianDisabilityLimited/MultiversePlatform
33
6624642
# # The Multiverse Platform is made available under the MIT License. # # Copyright (c) 2012 The Multiverse Foundation # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, # merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software # is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # #!/usr/bin/python model_info = \ [[ "business01_m_highpoly", [ "body" ], [ "business01_m_60" ] ], [ "business02_m_highpoly", [ "body" ], [ "business02_m_35" ] ], [ "business03_m_highpoly", [ "body" ], [ "business03_m_35" ] ], [ "business04_m_highpoly", [ "body" ], [ "business04_m_35" ] ], [ "business05_m_highpoly", [ "body" ], [ "business05_m_25" ] ], [ "business06_m_highpoly", [ "body" ], [ "business06_m_25" ] ], [ "business07_m_highpoly", [ "body" ], [ "business07_m_25" ] ], [ "child01_m_highpoly", [ "body" ], [ "child01_m" ] ], [ "child02_m_highpoly", [ "body" ], [ "child02_m" ] ], [ "nude01_m_highpoly", [ "body", "hair_transparent" ], [ "nude01_m_25", "nude01_m_25_hair" ] ], [ "nude02_m_highpoly", [ "body" ], [ "nude_02_m_25" ] ], [ "nude03_m_highpoly", [ "body" ], [ "nude_03_m_25" ] ], [ "nude04_m_highpoly", [ "body" ], [ "nude04_m_25" ] ], [ "nude08_m_highpoly", [ "body" ], [ "nude08_m_25" ] ], [ "nude09_m_highpoly", [ "body" ], [ "nude09_m_25" ] ], [ "nude10_m_highpoly", [ "body" ], [ "nude_10_m_25" ] ], [ "soccerplayer01_m_highpoly", [ "body" ], [ "soccerplayer01_m" ] ], [ "sportive01_m_highpoly", [ "body" ], [ "sportive01_m_20" ] ], [ "sportive03_m_highpoly", [ "body" ], [ "sportive03_m_25" ] ], [ "sportive04_m_highpoly", [ "body" ], [ "sportive04_m_25" ] ], [ "sportive05_m_highpoly", [ "body" ], [ "sportive05_m_30" ] ], [ "sportive07_m_highpoly", [ "body" ], [ "sportive07_m_25" ] ], [ "sportive08_m_highpoly", [ "body" ], [ "sportive08_m_25" ] ], [ "sportive09_m_highpoly", [ "body" ], [ "sportive09_m_25" ] ], [ "sportive10_m_highpoly", [ "body" ], [ "sportive10_m_20" ] ], [ "sportive11_m_highpoly", [ "body" ], [ "sportive11_m_30" ] ], [ "business01_f_highpoly", [ "body", "hair_transparent" ], [ "business01_f_30", "business01_f_30_hair" ] ], [ "business02_f_highpoly", [ "body" ], [ "business01_f_50" ] ], [ "business03_f_highpoly", [ "body" ], [ "business03_f_25" ] ], [ "nude01_f_highpoly", [ "body" ], [ "nude01_f_20" ] ] ] material_script = """ material %s.%s { // The concept here is that we make one ambient pass which just // provides a bit of lightening, then we run the multiple light // passes (additive), then we run the texture pass (multiplicative) technique { // Base ambient pass pass { fog_override true // Really basic vertex program // NB we don't use fixed function here because GL does not like // mixing fixed function and vertex programs, depth fighting can // be an issue vertex_program_ref Examples/AmbientOneVS { // Models with a skeleton will have already had their vertices // transformed into world space by the software skinning. // Pass in the viewproj_matrix instead of worldviewproj_matrix param_named_auto WorldViewProj viewproj_matrix param_named_auto ambient ambient_light_colour } } // Now do the lighting pass // NB we don't do decal texture here because this is repeated per light pass { fog_override true // do this for each light iteration once_per_light scene_blend add // Vertex program reference vertex_program_ref Examples/DiffuseBumpVS { param_named_auto WorldViewProj viewproj_matrix param_named_auto WorldMatrix world_matrix param_named_auto LightPosition light_position_object_space 0 param_named_auto EyePosition camera_position_object_space } fragment_program_ref Examples/DiffuseBumpPS_20 { param_named_auto LMd light_diffuse_colour 0 param_named_auto LMs light_specular_colour 0 param_named shininess float 10 param_named NormalMap int 0 param_named GlossMap int 1 } // Normal map texture_unit { texture %s_normal.DDS colour_op replace } // Gloss map texture_unit { tex_coord_set 1 texture %s_spec.DDS } } // Decal pass pass { fog_override true lighting off // Really basic vertex program // NB we don't use fixed function here because GL does not like // mixing fixed function and vertex programs, depth fighting can // be an issue vertex_program_ref Examples/AmbientOneVS { param_named_auto WorldViewProj viewproj_matrix param_named ambient float4 1 1 1 1 } scene_blend dest_colour zero texture_unit { texture %s.DDS } } } // fallback method for machines without the ps_2_0 support technique { // Base ambient pass pass { fog_override true // base colours, not needed for rendering, but as information // to lighting pass categorisation routine ambient 1 1 1 diffuse 0 0 0 specular 0 0 0 0 texture_unit { texture %s.DDS } } } } """ def write_material(f, model_name, material_suffix, texture_prefix): script = material_script % (model_name, material_suffix, texture_prefix, texture_prefix, texture_prefix, texture_prefix) f.write(script) def write_material_script(model_name, texture_prefix_map): f = open(model_name + ".material", 'w') for material_suffix in texture_prefix_map.keys(): write_material(f, model_name, material_suffix, texture_prefix_map[material_suffix]) f.close() for model_entry in model_info: model_name = model_entry[0] model_materials = model_entry[1] model_textures = model_entry[2] texture_prefix_map = {} for i in range(len(model_materials)): texture_prefix_map[model_materials[i]] = model_textures[i] write_material_script(model_name, texture_prefix_map)
# # The Multiverse Platform is made available under the MIT License. # # Copyright (c) 2012 The Multiverse Foundation # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, # merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software # is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # #!/usr/bin/python model_info = \ [[ "business01_m_highpoly", [ "body" ], [ "business01_m_60" ] ], [ "business02_m_highpoly", [ "body" ], [ "business02_m_35" ] ], [ "business03_m_highpoly", [ "body" ], [ "business03_m_35" ] ], [ "business04_m_highpoly", [ "body" ], [ "business04_m_35" ] ], [ "business05_m_highpoly", [ "body" ], [ "business05_m_25" ] ], [ "business06_m_highpoly", [ "body" ], [ "business06_m_25" ] ], [ "business07_m_highpoly", [ "body" ], [ "business07_m_25" ] ], [ "child01_m_highpoly", [ "body" ], [ "child01_m" ] ], [ "child02_m_highpoly", [ "body" ], [ "child02_m" ] ], [ "nude01_m_highpoly", [ "body", "hair_transparent" ], [ "nude01_m_25", "nude01_m_25_hair" ] ], [ "nude02_m_highpoly", [ "body" ], [ "nude_02_m_25" ] ], [ "nude03_m_highpoly", [ "body" ], [ "nude_03_m_25" ] ], [ "nude04_m_highpoly", [ "body" ], [ "nude04_m_25" ] ], [ "nude08_m_highpoly", [ "body" ], [ "nude08_m_25" ] ], [ "nude09_m_highpoly", [ "body" ], [ "nude09_m_25" ] ], [ "nude10_m_highpoly", [ "body" ], [ "nude_10_m_25" ] ], [ "soccerplayer01_m_highpoly", [ "body" ], [ "soccerplayer01_m" ] ], [ "sportive01_m_highpoly", [ "body" ], [ "sportive01_m_20" ] ], [ "sportive03_m_highpoly", [ "body" ], [ "sportive03_m_25" ] ], [ "sportive04_m_highpoly", [ "body" ], [ "sportive04_m_25" ] ], [ "sportive05_m_highpoly", [ "body" ], [ "sportive05_m_30" ] ], [ "sportive07_m_highpoly", [ "body" ], [ "sportive07_m_25" ] ], [ "sportive08_m_highpoly", [ "body" ], [ "sportive08_m_25" ] ], [ "sportive09_m_highpoly", [ "body" ], [ "sportive09_m_25" ] ], [ "sportive10_m_highpoly", [ "body" ], [ "sportive10_m_20" ] ], [ "sportive11_m_highpoly", [ "body" ], [ "sportive11_m_30" ] ], [ "business01_f_highpoly", [ "body", "hair_transparent" ], [ "business01_f_30", "business01_f_30_hair" ] ], [ "business02_f_highpoly", [ "body" ], [ "business01_f_50" ] ], [ "business03_f_highpoly", [ "body" ], [ "business03_f_25" ] ], [ "nude01_f_highpoly", [ "body" ], [ "nude01_f_20" ] ] ] material_script = """ material %s.%s { // The concept here is that we make one ambient pass which just // provides a bit of lightening, then we run the multiple light // passes (additive), then we run the texture pass (multiplicative) technique { // Base ambient pass pass { fog_override true // Really basic vertex program // NB we don't use fixed function here because GL does not like // mixing fixed function and vertex programs, depth fighting can // be an issue vertex_program_ref Examples/AmbientOneVS { // Models with a skeleton will have already had their vertices // transformed into world space by the software skinning. // Pass in the viewproj_matrix instead of worldviewproj_matrix param_named_auto WorldViewProj viewproj_matrix param_named_auto ambient ambient_light_colour } } // Now do the lighting pass // NB we don't do decal texture here because this is repeated per light pass { fog_override true // do this for each light iteration once_per_light scene_blend add // Vertex program reference vertex_program_ref Examples/DiffuseBumpVS { param_named_auto WorldViewProj viewproj_matrix param_named_auto WorldMatrix world_matrix param_named_auto LightPosition light_position_object_space 0 param_named_auto EyePosition camera_position_object_space } fragment_program_ref Examples/DiffuseBumpPS_20 { param_named_auto LMd light_diffuse_colour 0 param_named_auto LMs light_specular_colour 0 param_named shininess float 10 param_named NormalMap int 0 param_named GlossMap int 1 } // Normal map texture_unit { texture %s_normal.DDS colour_op replace } // Gloss map texture_unit { tex_coord_set 1 texture %s_spec.DDS } } // Decal pass pass { fog_override true lighting off // Really basic vertex program // NB we don't use fixed function here because GL does not like // mixing fixed function and vertex programs, depth fighting can // be an issue vertex_program_ref Examples/AmbientOneVS { param_named_auto WorldViewProj viewproj_matrix param_named ambient float4 1 1 1 1 } scene_blend dest_colour zero texture_unit { texture %s.DDS } } } // fallback method for machines without the ps_2_0 support technique { // Base ambient pass pass { fog_override true // base colours, not needed for rendering, but as information // to lighting pass categorisation routine ambient 1 1 1 diffuse 0 0 0 specular 0 0 0 0 texture_unit { texture %s.DDS } } } } """ def write_material(f, model_name, material_suffix, texture_prefix): script = material_script % (model_name, material_suffix, texture_prefix, texture_prefix, texture_prefix, texture_prefix) f.write(script) def write_material_script(model_name, texture_prefix_map): f = open(model_name + ".material", 'w') for material_suffix in texture_prefix_map.keys(): write_material(f, model_name, material_suffix, texture_prefix_map[material_suffix]) f.close() for model_entry in model_info: model_name = model_entry[0] model_materials = model_entry[1] model_textures = model_entry[2] texture_prefix_map = {} for i in range(len(model_materials)): texture_prefix_map[model_materials[i]] = model_textures[i] write_material_script(model_name, texture_prefix_map)
en
0.628072
# # The Multiverse Platform is made available under the MIT License. # # Copyright (c) 2012 The Multiverse Foundation # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, # merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software # is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # #!/usr/bin/python material %s.%s { // The concept here is that we make one ambient pass which just // provides a bit of lightening, then we run the multiple light // passes (additive), then we run the texture pass (multiplicative) technique { // Base ambient pass pass { fog_override true // Really basic vertex program // NB we don't use fixed function here because GL does not like // mixing fixed function and vertex programs, depth fighting can // be an issue vertex_program_ref Examples/AmbientOneVS { // Models with a skeleton will have already had their vertices // transformed into world space by the software skinning. // Pass in the viewproj_matrix instead of worldviewproj_matrix param_named_auto WorldViewProj viewproj_matrix param_named_auto ambient ambient_light_colour } } // Now do the lighting pass // NB we don't do decal texture here because this is repeated per light pass { fog_override true // do this for each light iteration once_per_light scene_blend add // Vertex program reference vertex_program_ref Examples/DiffuseBumpVS { param_named_auto WorldViewProj viewproj_matrix param_named_auto WorldMatrix world_matrix param_named_auto LightPosition light_position_object_space 0 param_named_auto EyePosition camera_position_object_space } fragment_program_ref Examples/DiffuseBumpPS_20 { param_named_auto LMd light_diffuse_colour 0 param_named_auto LMs light_specular_colour 0 param_named shininess float 10 param_named NormalMap int 0 param_named GlossMap int 1 } // Normal map texture_unit { texture %s_normal.DDS colour_op replace } // Gloss map texture_unit { tex_coord_set 1 texture %s_spec.DDS } } // Decal pass pass { fog_override true lighting off // Really basic vertex program // NB we don't use fixed function here because GL does not like // mixing fixed function and vertex programs, depth fighting can // be an issue vertex_program_ref Examples/AmbientOneVS { param_named_auto WorldViewProj viewproj_matrix param_named ambient float4 1 1 1 1 } scene_blend dest_colour zero texture_unit { texture %s.DDS } } } // fallback method for machines without the ps_2_0 support technique { // Base ambient pass pass { fog_override true // base colours, not needed for rendering, but as information // to lighting pass categorisation routine ambient 1 1 1 diffuse 0 0 0 specular 0 0 0 0 texture_unit { texture %s.DDS } } } }
1.455451
1
electrum/lntransport.py
git-sgmoore/electrum
1
6624643
# Copyright (C) 2018 <NAME> (waxwing) # Copyright (C) 2018 The Electrum developers # Distributed under the MIT software license, see the accompanying # file LICENCE or http://www.opensource.org/licenses/mit-license.php # Derived from https://gist.github.com/AdamISZ/046d05c156aaeb56cc897f85eecb3eb8 import hashlib import asyncio from asyncio import StreamReader, StreamWriter from typing import Optional from .crypto import sha256, hmac_oneshot, chacha20_poly1305_encrypt, chacha20_poly1305_decrypt from .lnutil import (get_ecdh, privkey_to_pubkey, LightningPeerConnectionClosed, HandshakeFailed, LNPeerAddr) from . import ecc from .util import bh2u, MySocksProxy class HandshakeState(object): prologue = b"lightning" protocol_name = b"Noise_XK_secp256k1_ChaChaPoly_SHA256" handshake_version = b"\x00" def __init__(self, responder_pub): self.responder_pub = responder_pub self.h = sha256(self.protocol_name) self.ck = self.h self.update(self.prologue) self.update(self.responder_pub) def update(self, data): self.h = sha256(self.h + data) return self.h def get_nonce_bytes(n): """BOLT 8 requires the nonce to be 12 bytes, 4 bytes leading zeroes and 8 bytes little endian encoded 64 bit integer. """ return b"\x00"*4 + n.to_bytes(8, 'little') def aead_encrypt(key: bytes, nonce: int, associated_data: bytes, data: bytes) -> bytes: nonce_bytes = get_nonce_bytes(nonce) return chacha20_poly1305_encrypt(key=key, nonce=nonce_bytes, associated_data=associated_data, data=data) def aead_decrypt(key: bytes, nonce: int, associated_data: bytes, data: bytes) -> bytes: nonce_bytes = get_nonce_bytes(nonce) return chacha20_poly1305_decrypt(key=key, nonce=nonce_bytes, associated_data=associated_data, data=data) def get_bolt8_hkdf(salt, ikm): """RFC5869 HKDF instantiated in the specific form used in Lightning BOLT 8: Extract and expand to 64 bytes using HMAC-SHA256, with info field set to a zero length string as per BOLT8 Return as two 32 byte fields. """ #Extract prk = hmac_oneshot(salt, msg=ikm, digest=hashlib.sha256) assert len(prk) == 32 #Expand info = b"" T0 = b"" T1 = hmac_oneshot(prk, T0 + info + b"\x01", digest=hashlib.sha256) T2 = hmac_oneshot(prk, T1 + info + b"\x02", digest=hashlib.sha256) assert len(T1 + T2) == 64 return T1, T2 def act1_initiator_message(hs, epriv, epub): ss = get_ecdh(epriv, hs.responder_pub) ck2, temp_k1 = get_bolt8_hkdf(hs.ck, ss) hs.ck = ck2 c = aead_encrypt(temp_k1, 0, hs.update(epub), b"") #for next step if we do it hs.update(c) msg = hs.handshake_version + epub + c assert len(msg) == 50 return msg, temp_k1 def create_ephemeral_key() -> (bytes, bytes): privkey = ecc.ECPrivkey.generate_random_key() return privkey.get_secret_bytes(), privkey.get_public_key_bytes() class LNTransportBase: reader: StreamReader writer: StreamWriter privkey: bytes def name(self) -> str: raise NotImplementedError() def send_bytes(self, msg: bytes) -> None: l = len(msg).to_bytes(2, 'big') lc = aead_encrypt(self.sk, self.sn(), b'', l) c = aead_encrypt(self.sk, self.sn(), b'', msg) assert len(lc) == 18 assert len(c) == len(msg) + 16 self.writer.write(lc+c) async def read_messages(self): buffer = bytearray() while True: rn_l, rk_l = self.rn() rn_m, rk_m = self.rn() while True: if len(buffer) >= 18: lc = bytes(buffer[:18]) l = aead_decrypt(rk_l, rn_l, b'', lc) length = int.from_bytes(l, 'big') offset = 18 + length + 16 if len(buffer) >= offset: c = bytes(buffer[18:offset]) del buffer[:offset] # much faster than: buffer=buffer[offset:] msg = aead_decrypt(rk_m, rn_m, b'', c) yield msg break try: s = await self.reader.read(2**10) except Exception: s = None if not s: raise LightningPeerConnectionClosed() buffer += s def rn(self): o = self._rn, self.rk self._rn += 1 if self._rn == 1000: self.r_ck, self.rk = get_bolt8_hkdf(self.r_ck, self.rk) self._rn = 0 return o def sn(self): o = self._sn self._sn += 1 if self._sn == 1000: self.s_ck, self.sk = get_bolt8_hkdf(self.s_ck, self.sk) self._sn = 0 return o def init_counters(self, ck): # init counters self._sn = 0 self._rn = 0 self.r_ck = ck self.s_ck = ck def close(self): self.writer.close() def remote_pubkey(self) -> Optional[bytes]: raise NotImplementedError() class LNResponderTransport(LNTransportBase): """Transport initiated by remote party.""" def __init__(self, privkey: bytes, reader: StreamReader, writer: StreamWriter): LNTransportBase.__init__(self) self.reader = reader self.writer = writer self.privkey = privkey self._pubkey = None # remote pubkey def name(self): pubkey = self.remote_pubkey() pubkey_hex = pubkey.hex() if pubkey else pubkey return f"{pubkey_hex}(in)" async def handshake(self, **kwargs): hs = HandshakeState(privkey_to_pubkey(self.privkey)) act1 = b'' while len(act1) < 50: buf = await self.reader.read(50 - len(act1)) if not buf: raise HandshakeFailed('responder disconnected') act1 += buf if len(act1) != 50: raise HandshakeFailed('responder: short act 1 read, length is ' + str(len(act1))) if bytes([act1[0]]) != HandshakeState.handshake_version: raise HandshakeFailed('responder: bad handshake version in act 1') c = act1[-16:] re = act1[1:34] h = hs.update(re) ss = get_ecdh(self.privkey, re) ck, temp_k1 = get_bolt8_hkdf(sha256(HandshakeState.protocol_name), ss) _p = aead_decrypt(temp_k1, 0, h, c) hs.update(c) # act 2 if 'epriv' not in kwargs: epriv, epub = create_ephemeral_key() else: epriv = kwargs['epriv'] epub = ecc.ECPrivkey(epriv).get_public_key_bytes() hs.ck = ck hs.responder_pub = re msg, temp_k2 = act1_initiator_message(hs, epriv, epub) self.writer.write(msg) # act 3 act3 = b'' while len(act3) < 66: buf = await self.reader.read(66 - len(act3)) if not buf: raise HandshakeFailed('responder disconnected') act3 += buf if len(act3) != 66: raise HandshakeFailed('responder: short act 3 read, length is ' + str(len(act3))) if bytes([act3[0]]) != HandshakeState.handshake_version: raise HandshakeFailed('responder: bad handshake version in act 3') c = act3[1:50] t = act3[-16:] rs = aead_decrypt(temp_k2, 1, hs.h, c) ss = get_ecdh(epriv, rs) ck, temp_k3 = get_bolt8_hkdf(hs.ck, ss) _p = aead_decrypt(temp_k3, 0, hs.update(c), t) self.rk, self.sk = get_bolt8_hkdf(ck, b'') self.init_counters(ck) self._pubkey = rs return rs def remote_pubkey(self) -> Optional[bytes]: return self._pubkey class LNTransport(LNTransportBase): """Transport initiated by local party.""" def __init__(self, privkey: bytes, peer_addr: LNPeerAddr, *, proxy: Optional[dict]): LNTransportBase.__init__(self) assert type(privkey) is bytes and len(privkey) == 32 self.privkey = privkey self.peer_addr = peer_addr self.proxy = MySocksProxy.from_proxy_dict(proxy) def name(self): return self.peer_addr.net_addr_str() async def handshake(self): if not self.proxy: self.reader, self.writer = await asyncio.open_connection(self.peer_addr.host, self.peer_addr.port) else: self.reader, self.writer = await self.proxy.open_connection(self.peer_addr.host, self.peer_addr.port) hs = HandshakeState(self.peer_addr.pubkey) # Get a new ephemeral key epriv, epub = create_ephemeral_key() msg, _temp_k1 = act1_initiator_message(hs, epriv, epub) # act 1 self.writer.write(msg) rspns = await self.reader.read(2**10) if len(rspns) != 50: raise HandshakeFailed(f"Lightning handshake act 1 response has bad length, " f"are you sure this is the right pubkey? {self.peer_addr}") hver, alice_epub, tag = rspns[0], rspns[1:34], rspns[34:] if bytes([hver]) != hs.handshake_version: raise HandshakeFailed("unexpected handshake version: {}".format(hver)) # act 2 hs.update(alice_epub) ss = get_ecdh(epriv, alice_epub) ck, temp_k2 = get_bolt8_hkdf(hs.ck, ss) hs.ck = ck p = aead_decrypt(temp_k2, 0, hs.h, tag) hs.update(tag) # act 3 my_pubkey = privkey_to_pubkey(self.privkey) c = aead_encrypt(temp_k2, 1, hs.h, my_pubkey) hs.update(c) ss = get_ecdh(self.privkey[:32], alice_epub) ck, temp_k3 = get_bolt8_hkdf(hs.ck, ss) hs.ck = ck t = aead_encrypt(temp_k3, 0, hs.h, b'') msg = hs.handshake_version + c + t self.writer.write(msg) self.sk, self.rk = get_bolt8_hkdf(hs.ck, b'') self.init_counters(ck) def remote_pubkey(self) -> Optional[bytes]: return self.peer_addr.pubkey
# Copyright (C) 2018 <NAME> (waxwing) # Copyright (C) 2018 The Electrum developers # Distributed under the MIT software license, see the accompanying # file LICENCE or http://www.opensource.org/licenses/mit-license.php # Derived from https://gist.github.com/AdamISZ/046d05c156aaeb56cc897f85eecb3eb8 import hashlib import asyncio from asyncio import StreamReader, StreamWriter from typing import Optional from .crypto import sha256, hmac_oneshot, chacha20_poly1305_encrypt, chacha20_poly1305_decrypt from .lnutil import (get_ecdh, privkey_to_pubkey, LightningPeerConnectionClosed, HandshakeFailed, LNPeerAddr) from . import ecc from .util import bh2u, MySocksProxy class HandshakeState(object): prologue = b"lightning" protocol_name = b"Noise_XK_secp256k1_ChaChaPoly_SHA256" handshake_version = b"\x00" def __init__(self, responder_pub): self.responder_pub = responder_pub self.h = sha256(self.protocol_name) self.ck = self.h self.update(self.prologue) self.update(self.responder_pub) def update(self, data): self.h = sha256(self.h + data) return self.h def get_nonce_bytes(n): """BOLT 8 requires the nonce to be 12 bytes, 4 bytes leading zeroes and 8 bytes little endian encoded 64 bit integer. """ return b"\x00"*4 + n.to_bytes(8, 'little') def aead_encrypt(key: bytes, nonce: int, associated_data: bytes, data: bytes) -> bytes: nonce_bytes = get_nonce_bytes(nonce) return chacha20_poly1305_encrypt(key=key, nonce=nonce_bytes, associated_data=associated_data, data=data) def aead_decrypt(key: bytes, nonce: int, associated_data: bytes, data: bytes) -> bytes: nonce_bytes = get_nonce_bytes(nonce) return chacha20_poly1305_decrypt(key=key, nonce=nonce_bytes, associated_data=associated_data, data=data) def get_bolt8_hkdf(salt, ikm): """RFC5869 HKDF instantiated in the specific form used in Lightning BOLT 8: Extract and expand to 64 bytes using HMAC-SHA256, with info field set to a zero length string as per BOLT8 Return as two 32 byte fields. """ #Extract prk = hmac_oneshot(salt, msg=ikm, digest=hashlib.sha256) assert len(prk) == 32 #Expand info = b"" T0 = b"" T1 = hmac_oneshot(prk, T0 + info + b"\x01", digest=hashlib.sha256) T2 = hmac_oneshot(prk, T1 + info + b"\x02", digest=hashlib.sha256) assert len(T1 + T2) == 64 return T1, T2 def act1_initiator_message(hs, epriv, epub): ss = get_ecdh(epriv, hs.responder_pub) ck2, temp_k1 = get_bolt8_hkdf(hs.ck, ss) hs.ck = ck2 c = aead_encrypt(temp_k1, 0, hs.update(epub), b"") #for next step if we do it hs.update(c) msg = hs.handshake_version + epub + c assert len(msg) == 50 return msg, temp_k1 def create_ephemeral_key() -> (bytes, bytes): privkey = ecc.ECPrivkey.generate_random_key() return privkey.get_secret_bytes(), privkey.get_public_key_bytes() class LNTransportBase: reader: StreamReader writer: StreamWriter privkey: bytes def name(self) -> str: raise NotImplementedError() def send_bytes(self, msg: bytes) -> None: l = len(msg).to_bytes(2, 'big') lc = aead_encrypt(self.sk, self.sn(), b'', l) c = aead_encrypt(self.sk, self.sn(), b'', msg) assert len(lc) == 18 assert len(c) == len(msg) + 16 self.writer.write(lc+c) async def read_messages(self): buffer = bytearray() while True: rn_l, rk_l = self.rn() rn_m, rk_m = self.rn() while True: if len(buffer) >= 18: lc = bytes(buffer[:18]) l = aead_decrypt(rk_l, rn_l, b'', lc) length = int.from_bytes(l, 'big') offset = 18 + length + 16 if len(buffer) >= offset: c = bytes(buffer[18:offset]) del buffer[:offset] # much faster than: buffer=buffer[offset:] msg = aead_decrypt(rk_m, rn_m, b'', c) yield msg break try: s = await self.reader.read(2**10) except Exception: s = None if not s: raise LightningPeerConnectionClosed() buffer += s def rn(self): o = self._rn, self.rk self._rn += 1 if self._rn == 1000: self.r_ck, self.rk = get_bolt8_hkdf(self.r_ck, self.rk) self._rn = 0 return o def sn(self): o = self._sn self._sn += 1 if self._sn == 1000: self.s_ck, self.sk = get_bolt8_hkdf(self.s_ck, self.sk) self._sn = 0 return o def init_counters(self, ck): # init counters self._sn = 0 self._rn = 0 self.r_ck = ck self.s_ck = ck def close(self): self.writer.close() def remote_pubkey(self) -> Optional[bytes]: raise NotImplementedError() class LNResponderTransport(LNTransportBase): """Transport initiated by remote party.""" def __init__(self, privkey: bytes, reader: StreamReader, writer: StreamWriter): LNTransportBase.__init__(self) self.reader = reader self.writer = writer self.privkey = privkey self._pubkey = None # remote pubkey def name(self): pubkey = self.remote_pubkey() pubkey_hex = pubkey.hex() if pubkey else pubkey return f"{pubkey_hex}(in)" async def handshake(self, **kwargs): hs = HandshakeState(privkey_to_pubkey(self.privkey)) act1 = b'' while len(act1) < 50: buf = await self.reader.read(50 - len(act1)) if not buf: raise HandshakeFailed('responder disconnected') act1 += buf if len(act1) != 50: raise HandshakeFailed('responder: short act 1 read, length is ' + str(len(act1))) if bytes([act1[0]]) != HandshakeState.handshake_version: raise HandshakeFailed('responder: bad handshake version in act 1') c = act1[-16:] re = act1[1:34] h = hs.update(re) ss = get_ecdh(self.privkey, re) ck, temp_k1 = get_bolt8_hkdf(sha256(HandshakeState.protocol_name), ss) _p = aead_decrypt(temp_k1, 0, h, c) hs.update(c) # act 2 if 'epriv' not in kwargs: epriv, epub = create_ephemeral_key() else: epriv = kwargs['epriv'] epub = ecc.ECPrivkey(epriv).get_public_key_bytes() hs.ck = ck hs.responder_pub = re msg, temp_k2 = act1_initiator_message(hs, epriv, epub) self.writer.write(msg) # act 3 act3 = b'' while len(act3) < 66: buf = await self.reader.read(66 - len(act3)) if not buf: raise HandshakeFailed('responder disconnected') act3 += buf if len(act3) != 66: raise HandshakeFailed('responder: short act 3 read, length is ' + str(len(act3))) if bytes([act3[0]]) != HandshakeState.handshake_version: raise HandshakeFailed('responder: bad handshake version in act 3') c = act3[1:50] t = act3[-16:] rs = aead_decrypt(temp_k2, 1, hs.h, c) ss = get_ecdh(epriv, rs) ck, temp_k3 = get_bolt8_hkdf(hs.ck, ss) _p = aead_decrypt(temp_k3, 0, hs.update(c), t) self.rk, self.sk = get_bolt8_hkdf(ck, b'') self.init_counters(ck) self._pubkey = rs return rs def remote_pubkey(self) -> Optional[bytes]: return self._pubkey class LNTransport(LNTransportBase): """Transport initiated by local party.""" def __init__(self, privkey: bytes, peer_addr: LNPeerAddr, *, proxy: Optional[dict]): LNTransportBase.__init__(self) assert type(privkey) is bytes and len(privkey) == 32 self.privkey = privkey self.peer_addr = peer_addr self.proxy = MySocksProxy.from_proxy_dict(proxy) def name(self): return self.peer_addr.net_addr_str() async def handshake(self): if not self.proxy: self.reader, self.writer = await asyncio.open_connection(self.peer_addr.host, self.peer_addr.port) else: self.reader, self.writer = await self.proxy.open_connection(self.peer_addr.host, self.peer_addr.port) hs = HandshakeState(self.peer_addr.pubkey) # Get a new ephemeral key epriv, epub = create_ephemeral_key() msg, _temp_k1 = act1_initiator_message(hs, epriv, epub) # act 1 self.writer.write(msg) rspns = await self.reader.read(2**10) if len(rspns) != 50: raise HandshakeFailed(f"Lightning handshake act 1 response has bad length, " f"are you sure this is the right pubkey? {self.peer_addr}") hver, alice_epub, tag = rspns[0], rspns[1:34], rspns[34:] if bytes([hver]) != hs.handshake_version: raise HandshakeFailed("unexpected handshake version: {}".format(hver)) # act 2 hs.update(alice_epub) ss = get_ecdh(epriv, alice_epub) ck, temp_k2 = get_bolt8_hkdf(hs.ck, ss) hs.ck = ck p = aead_decrypt(temp_k2, 0, hs.h, tag) hs.update(tag) # act 3 my_pubkey = privkey_to_pubkey(self.privkey) c = aead_encrypt(temp_k2, 1, hs.h, my_pubkey) hs.update(c) ss = get_ecdh(self.privkey[:32], alice_epub) ck, temp_k3 = get_bolt8_hkdf(hs.ck, ss) hs.ck = ck t = aead_encrypt(temp_k3, 0, hs.h, b'') msg = hs.handshake_version + c + t self.writer.write(msg) self.sk, self.rk = get_bolt8_hkdf(hs.ck, b'') self.init_counters(ck) def remote_pubkey(self) -> Optional[bytes]: return self.peer_addr.pubkey
en
0.82565
# Copyright (C) 2018 <NAME> (waxwing) # Copyright (C) 2018 The Electrum developers # Distributed under the MIT software license, see the accompanying # file LICENCE or http://www.opensource.org/licenses/mit-license.php # Derived from https://gist.github.com/AdamISZ/046d05c156aaeb56cc897f85eecb3eb8 BOLT 8 requires the nonce to be 12 bytes, 4 bytes leading zeroes and 8 bytes little endian encoded 64 bit integer. RFC5869 HKDF instantiated in the specific form used in Lightning BOLT 8: Extract and expand to 64 bytes using HMAC-SHA256, with info field set to a zero length string as per BOLT8 Return as two 32 byte fields. #Extract #Expand #for next step if we do it # much faster than: buffer=buffer[offset:] # init counters Transport initiated by remote party. # remote pubkey # act 2 # act 3 Transport initiated by local party. # Get a new ephemeral key # act 1 # act 2 # act 3
1.896893
2
Web/Python/vtkmodules/web/render_window_serializer.py
CapLek/VTK
0
6624644
<reponame>CapLek/VTK<gh_stars>0 from __future__ import absolute_import, division, print_function import json, time, re from vtk.vtkFiltersGeometry import vtkCompositeDataGeometryFilter from vtk.vtkCommonCore import vtkTypeUInt32Array from vtk.vtkWebCore import vtkWebApplication from vtk.numpy_interface import algorithms from vtk.numpy_interface import dataset_adapter as dsa import numpy as np from vtk.web import hashDataArray, getJSArrayType from vtk.web import buffer, base64Encode # ----------------------------------------------------------------------------- # Convenience class for caching data arrays, storing computed sha sums, keeping # track of valid actors, etc... # ----------------------------------------------------------------------------- class SynchronizationContext(): def __init__(self, debug=False): self.dataArrayCache = {} self.lastDependenciesMapping = {} self.ingoreLastDependencies = False self.debugSerializers = debug self.debugAll = debug def setIgnoreLastDependencies(self, force): self.ingoreLastDependencies = force def cacheDataArray(self, pMd5, data): self.dataArrayCache[pMd5] = data def getCachedDataArray(self, pMd5, binary = False): cacheObj = self.dataArrayCache[pMd5] array = cacheObj['array'] cacheTime = cacheObj['mTime'] if cacheTime != array.GetMTime(): if context.debugAll: print(' ***** ERROR: you asked for an old cache key! ***** ') if array.GetDataType() == 12: # IdType need to be converted to Uint32 arraySize = array.GetNumberOfTuples() * array.GetNumberOfComponents() newArray = vtkTypeUInt32Array() newArray.SetNumberOfTuples(arraySize) for i in range(arraySize): newArray.SetValue(i, -1 if array.GetValue(i) < 0 else array.GetValue(i)) pBuffer = buffer(newArray) else: pBuffer = buffer(array) if binary: # Convert the vtkUnsignedCharArray into a bytes object, required by Autobahn websockets return pBuffer.tobytes() return base64Encode(pBuffer) def checkForArraysToRelease(self, timeWindow = 20): cutOffTime = time.time() - timeWindow shasToDelete = [] for sha in self.dataArrayCache: record = self.dataArrayCache[sha] array = record['array'] count = array.GetReferenceCount() if count == 1 and record['ts'] < cutOffTime: shasToDelete.append(sha) for sha in shasToDelete: del self.dataArrayCache[sha] def getLastDependencyList(self, idstr): lastDeps = [] if idstr in self.lastDependenciesMapping and not self.ingoreLastDependencies: lastDeps = self.lastDependenciesMapping[idstr] return lastDeps def setNewDependencyList(self, idstr, depList): self.lastDependenciesMapping[idstr] = depList def buildDependencyCallList(self, idstr, newList, addMethod, removeMethod): oldList = self.getLastDependencyList(idstr) calls = [] calls += [ [addMethod, [ wrapId(x) ]] for x in newList if x not in oldList ] calls += [ [removeMethod, [ wrapId(x) ]] for x in oldList if x not in newList ] self.setNewDependencyList(idstr, newList) return calls # ----------------------------------------------------------------------------- # Global variables # ----------------------------------------------------------------------------- SERIALIZERS = {} context = None # ----------------------------------------------------------------------------- # Global API # ----------------------------------------------------------------------------- def registerInstanceSerializer(name, method): global SERIALIZERS SERIALIZERS[name] = method # ----------------------------------------------------------------------------- def serializeInstance(parent, instance, instanceId, context, depth): instanceType = instance.GetClassName() serializer = SERIALIZERS[instanceType] if instanceType in SERIALIZERS else None if serializer: return serializer(parent, instance, instanceId, context, depth) if context.debugSerializers: print('%s!!!No serializer for %s with id %s' % (pad(depth), instanceType, instanceId)) return None # ----------------------------------------------------------------------------- def initializeSerializers(): # Actors/viewProps registerInstanceSerializer('vtkOpenGLActor', genericActorSerializer) registerInstanceSerializer('vtkPVLODActor', genericActorSerializer) # Mappers registerInstanceSerializer('vtkOpenGLPolyDataMapper', genericMapperSerializer) registerInstanceSerializer('vtkCompositePolyDataMapper2', genericMapperSerializer) # LookupTables/TransferFunctions registerInstanceSerializer('vtkLookupTable', lookupTableSerializer) registerInstanceSerializer('vtkPVDiscretizableColorTransferFunction', colorTransferFunctionSerializer) # Property registerInstanceSerializer('vtkOpenGLProperty', propertySerializer) # Datasets registerInstanceSerializer('vtkPolyData', polydataSerializer) registerInstanceSerializer('vtkMultiBlockDataSet', mergeToPolydataSerializer) # RenderWindows registerInstanceSerializer('vtkCocoaRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkXOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkWin32OpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkEGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkOpenVRRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkGenericOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkOSOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkIOSRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkExternalOpenGLRenderWindow', renderWindowSerializer) # Renderers registerInstanceSerializer('vtkOpenGLRenderer', rendererSerializer) # Cameras registerInstanceSerializer('vtkOpenGLCamera', cameraSerializer) # Lights registerInstanceSerializer('vtkPVLight', lightSerializer) registerInstanceSerializer('vtkOpenGLLight', lightSerializer) # ----------------------------------------------------------------------------- # Helper functions # ----------------------------------------------------------------------------- def pad(depth): padding = '' for d in range(depth): padding += ' ' return padding # ----------------------------------------------------------------------------- def wrapId(idStr): return 'instance:${%s}' % idStr # ----------------------------------------------------------------------------- def getReferenceId(ref): return vtkWebApplication.GetObjectId(ref) # ----------------------------------------------------------------------------- dataArrayShaMapping = {} def digest(array): objId = getReferenceId(array) record = None if objId in dataArrayShaMapping: record = dataArrayShaMapping[objId] if record and record['mtime'] == array.GetMTime(): return record['sha'] record = { 'sha': hashDataArray(array), 'mtime': array.GetMTime() } dataArrayShaMapping[objId] = record return record['sha'] # ----------------------------------------------------------------------------- def getRangeInfo(array, component): r = array.GetRange(component) compRange = {} compRange['min'] = r[0] compRange['max'] = r[1] componentName = array.GetComponentName(component) compRange['component'] = component if not componentName else componentName return compRange # ----------------------------------------------------------------------------- def getArrayDescription(array, context, dataset): if not array: return None if array.GetNumberOfComponents() == 9: adapter = dsa.WrapDataObject(dataset) name = array.GetName() npArray = adapter.GetPointData().GetArray(name) eigenvalues = algorithms.eigenvalue(npArray) merge = np.column_stack((npArray, eigenvalues[:, np.newaxis, :])) n = npArray.shape[0] array.SetNumberOfComponents(12) array.SetNumberOfTuples(n) for i in range(n): array.SetTypedTuple(i, merge[i].ravel()) pMd5 = digest(array) context.cacheDataArray(pMd5, { 'array': array, 'mTime': array.GetMTime(), 'ts': time.time() }) root = {} root['hash'] = pMd5 root['vtkClass'] = 'vtkDataArray' root['name'] = array.GetName() root['dataType'] = getJSArrayType(array) root['numberOfComponents'] = array.GetNumberOfComponents() root['size'] = array.GetNumberOfComponents() * array.GetNumberOfTuples() root['ranges'] = [] if root['numberOfComponents'] > 1: for i in range(root['numberOfComponents']): root['ranges'].append(getRangeInfo(array, i)) root['ranges'].append(getRangeInfo(array, -1)) else: root['ranges'].append(getRangeInfo(array, 0)) return root # ----------------------------------------------------------------------------- def extractRequiredFields(extractedFields, mapper, dataset, context, requestedFields=['Normals', 'TCoords']): # FIXME should evolve and support funky mapper which leverage many arrays if mapper.IsA('vtkMapper'): scalarVisibility = mapper.GetScalarVisibility() arrayAccessMode = mapper.GetArrayAccessMode() colorMode = mapper.GetColorMode() scalarMode = mapper.GetScalarMode() pd = dataset.GetPointData() for index in range(pd.GetNumberOfArrays()): colorArrayName = pd.GetArrayName(index) if scalarVisibility and scalarMode == 3: arrayMeta = getArrayDescription(pd.GetArray(colorArrayName), context, dataset) if arrayMeta: arrayMeta['location'] = 'pointData' extractedFields.append(arrayMeta) if scalarVisibility and scalarMode == 4: arrayMeta = getArrayDescription(pd.GetArray(colorArrayName), context, dataset) if arrayMeta: arrayMeta['location'] = 'cellData' extractedFields.append(arrayMeta) # Normal handling if 'Normals' in requestedFields: normals = dataset.GetPointData().GetNormals() if normals: arrayMeta = getArrayDescription(normals, context, dataset) if arrayMeta: arrayMeta['location'] = 'pointData' arrayMeta['registration'] = 'setNormals' extractedFields.append(arrayMeta) # TCoord handling if 'TCoords' in requestedFields: tcoords = dataset.GetPointData().GetTCoords() if tcoords: arrayMeta = getArrayDescription(tcoords, context, dataset) if arrayMeta: arrayMeta['location'] = 'pointData' arrayMeta['registration'] = 'setTCoords' extractedFields.append(arrayMeta) # ----------------------------------------------------------------------------- # Concrete instance serializers # ----------------------------------------------------------------------------- def genericActorSerializer(parent, actor, actorId, context, depth): # This kind of actor has two "children" of interest, a property and a mapper actorVisibility = actor.GetVisibility() mapperInstance = None propertyInstance = None calls = [] dependencies = [] if actorVisibility: mapper = None if not hasattr(actor, 'GetMapper'): if context.debugAll: print('This actor does not have a GetMapper method') else: mapper = actor.GetMapper() if mapper: mapperId = getReferenceId(mapper) mapperInstance = serializeInstance(actor, mapper, mapperId, context, depth + 1) if mapperInstance: dependencies.append(mapperInstance) calls.append(['setMapper', [ wrapId(mapperId) ]]) prop = None if hasattr(actor, 'GetProperty'): prop = actor.GetProperty() else: if context.debugAll: print('This actor does not have a GetProperty method') if prop: propId = getReferenceId(prop) propertyInstance = serializeInstance(actor, prop, propId, context, depth + 1) if propertyInstance: dependencies.append(propertyInstance) calls.append(['setProperty', [ wrapId(propId) ]]) if actorVisibility == 0 or (mapperInstance and propertyInstance): return { 'parent': getReferenceId(parent), 'id': actorId, 'type': actor.GetClassName(), 'properties': { # vtkProp 'visibility': actorVisibility, 'pickable': actor.GetPickable(), 'dragable': actor.GetDragable(), 'useBounds': actor.GetUseBounds(), # vtkProp3D 'origin': actor.GetOrigin(), 'position': actor.GetPosition(), 'scale': actor.GetScale(), # vtkActor 'forceOpaque': actor.GetForceOpaque(), 'forceTranslucent': actor.GetForceTranslucent() }, 'calls': calls, 'dependencies': dependencies } return None # ----------------------------------------------------------------------------- def genericMapperSerializer(parent, mapper, mapperId, context, depth): # This kind of mapper requires us to get 2 items: input data and lookup table dataObject = None dataObjectInstance = None lookupTableInstance = None calls = [] dependencies = [] if hasattr(mapper, 'GetInputDataObject'): dataObject = mapper.GetInputDataObject(0, 0) else: if context.debugAll: print('This mapper does not have GetInputDataObject method') if dataObject: dataObjectId = '%s-dataset' % mapperId dataObjectInstance = serializeInstance(mapper, dataObject, dataObjectId, context, depth + 1) if dataObjectInstance: dependencies.append(dataObjectInstance) calls.append(['setInputData', [ wrapId(dataObjectId) ]]) lookupTable = None if hasattr(mapper, 'GetLookupTable'): lookupTable = mapper.GetLookupTable() else: if context.debugAll: print('This mapper does not have GetLookupTable method') if lookupTable: lookupTableId = getReferenceId(lookupTable) lookupTableInstance = serializeInstance(mapper, lookupTable, lookupTableId, context, depth + 1) if lookupTableInstance: dependencies.append(lookupTableInstance) calls.append(['setLookupTable', [ wrapId(lookupTableId) ]]) if dataObjectInstance and lookupTableInstance: colorArrayName = mapper.GetArrayName() if mapper.GetArrayAccessMode() == 1 else mapper.GetArrayId() return { 'parent': getReferenceId(parent), 'id': mapperId, 'type': mapper.GetClassName(), 'properties': { 'scalarRange': mapper.GetScalarRange(), 'useLookupTableScalarRange': True if mapper.GetUseLookupTableScalarRange() else False, 'scalarVisibility': mapper.GetScalarVisibility(), 'colorByArrayName': colorArrayName, 'colorMode': mapper.GetColorMode(), 'scalarMode': mapper.GetScalarMode(), 'interpolateScalarsBeforeMapping': True if mapper.GetInterpolateScalarsBeforeMapping() else False }, 'calls': calls, 'dependencies': dependencies } return None # ----------------------------------------------------------------------------- def lookupTableSerializer(parent, lookupTable, lookupTableId, context, depth): # No children in this case, so no additions to bindings and return empty list # But we do need to add instance lookupTableRange = lookupTable.GetRange() lookupTableHueRange = [0.5, 0] if hasattr(lookupTable, 'GetHueRange'): try: lookupTable.GetHueRange(lookupTableHueRange) except Exception as inst: pass lutSatRange = lookupTable.GetSaturationRange() lutAlphaRange = lookupTable.GetAlphaRange() return { 'parent': getReferenceId(parent), 'id': lookupTableId, 'type': lookupTable.GetClassName(), 'properties': { 'numberOfColors': lookupTable.GetNumberOfColors(), 'valueRange': lookupTableRange, 'hueRange': lookupTableHueRange, # 'alphaRange': lutAlphaRange, # Causes weird rendering artifacts on client 'saturationRange': lutSatRange, 'nanColor': lookupTable.GetNanColor(), 'belowRangeColor': lookupTable.GetBelowRangeColor(), 'aboveRangeColor': lookupTable.GetAboveRangeColor(), 'useAboveRangeColor': True if lookupTable.GetUseAboveRangeColor() else False, 'useBelowRangeColor': True if lookupTable.GetUseBelowRangeColor() else False, 'alpha': lookupTable.GetAlpha(), 'vectorSize': lookupTable.GetVectorSize(), 'vectorComponent': lookupTable.GetVectorComponent(), 'vectorMode': lookupTable.GetVectorMode(), 'indexedLookup': lookupTable.GetIndexedLookup() } } # ----------------------------------------------------------------------------- def propertySerializer(parent, propObj, propObjId, context, depth): representation = propObj.GetRepresentation() if hasattr(propObj, 'GetRepresentation') else 2 colorToUse = propObj.GetDiffuseColor() if hasattr(propObj, 'GetDiffuseColor') else [1, 1, 1] if representation == 1 and hasattr(propObj, 'GetColor'): colorToUse = propObj.GetColor() return { 'parent': getReferenceId(parent), 'id': propObjId, 'type': propObj.GetClassName(), 'properties': { 'representation': representation, 'diffuseColor': colorToUse, 'color': propObj.GetColor(), 'ambientColor': propObj.GetAmbientColor(), 'specularColor': propObj.GetSpecularColor(), 'edgeColor': propObj.GetEdgeColor(), 'ambient': propObj.GetAmbient(), 'diffuse': propObj.GetDiffuse(), 'specular': propObj.GetSpecular(), 'specularPower': propObj.GetSpecularPower(), 'opacity': propObj.GetOpacity(), 'interpolation': propObj.GetInterpolation(), 'edgeVisibility': True if propObj.GetEdgeVisibility() else False, 'backfaceCulling': True if propObj.GetBackfaceCulling() else False, 'frontfaceCulling': True if propObj.GetFrontfaceCulling() else False, 'pointSize': propObj.GetPointSize(), 'lineWidth': propObj.GetLineWidth(), 'lighting': propObj.GetLighting() } } # ----------------------------------------------------------------------------- def polydataSerializer(parent, dataset, datasetId, context, depth): datasetType = dataset.GetClassName() if dataset and dataset.GetPoints(): properties = {} # Points points = getArrayDescription(dataset.GetPoints().GetData(), context, dataset) points['vtkClass'] = 'vtkPoints' properties['points'] = points ## Verts if dataset.GetVerts() and dataset.GetVerts().GetData().GetNumberOfTuples() > 0: _verts = getArrayDescription(dataset.GetVerts().GetData(), context, dataset) properties['verts'] = _verts properties['verts']['vtkClass'] = 'vtkCellArray' ## Lines if dataset.GetLines() and dataset.GetLines().GetData().GetNumberOfTuples() > 0: _lines = getArrayDescription(dataset.GetLines().GetData(), context, dataset) properties['lines'] = _lines properties['lines']['vtkClass'] = 'vtkCellArray' ## Polys if dataset.GetPolys() and dataset.GetPolys().GetData().GetNumberOfTuples() > 0: _polys = getArrayDescription(dataset.GetPolys().GetData(), context, dataset) properties['polys'] = _polys properties['polys']['vtkClass'] = 'vtkCellArray' ## Strips if dataset.GetStrips() and dataset.GetStrips().GetData().GetNumberOfTuples() > 0: _strips = getArrayDescription(dataset.GetStrips().GetData(), context, dataset) properties['strips'] = _strips properties['strips']['vtkClass'] = 'vtkCellArray' ## Fields properties['fields'] = [] extractRequiredFields(properties['fields'], parent, dataset, context) return { 'parent': getReferenceId(parent), 'id': datasetId, 'type': datasetType, 'properties': properties } if context.debugAll: print('This dataset has no points!') return None # ----------------------------------------------------------------------------- def mergeToPolydataSerializer(parent, dataObject, dataObjectId, context, depth): dataset = None if dataObject.IsA('vtkCompositeDataSet'): dataMTime = dataObject.GetMTime() gf = vtkCompositeDataGeometryFilter() gf.SetInputData(dataObject) gf.Update() tempDS = gf.GetOutput() dataset = tempDS else: dataset = mapper.GetInput() return polydataSerializer(parent, dataset, dataObjectId, context, depth) # ----------------------------------------------------------------------------- def colorTransferFunctionSerializer(parent, instance, objId, context, depth): nodes = [] for i in range(instance.GetSize()): # x, r, g, b, midpoint, sharpness node = [0, 0, 0, 0, 0, 0] instance.GetNodeValue(i, node) nodes.append(node) return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'clamping': True if instance.GetClamping() else False, 'colorSpace': instance.GetColorSpace(), 'hSVWrap': True if instance.GetHSVWrap() else False, # 'nanColor': instance.GetNanColor(), # Breaks client # 'belowRangeColor': instance.GetBelowRangeColor(), # Breaks client # 'aboveRangeColor': instance.GetAboveRangeColor(), # Breaks client # 'useAboveRangeColor': True if instance.GetUseAboveRangeColor() else False, # 'useBelowRangeColor': True if instance.GetUseBelowRangeColor() else False, 'allowDuplicateScalars': True if instance.GetAllowDuplicateScalars() else False, 'alpha': instance.GetAlpha(), 'vectorComponent': instance.GetVectorComponent(), 'vectorSize': instance.GetVectorSize(), 'vectorMode': instance.GetVectorMode(), 'indexedLookup': instance.GetIndexedLookup(), 'nodes': nodes } } # ----------------------------------------------------------------------------- def rendererSerializer(parent, instance, objId, context, depth): dependencies = [] viewPropIds = [] lightsIds = [] calls = [] # Camera camera = instance.GetActiveCamera() cameraId = getReferenceId(camera) cameraBindings = [] cameraInstance = serializeInstance(instance, camera, cameraId, context, depth + 1) if cameraInstance: dependencies.append(cameraInstance) calls.append(['setActiveCamera', [ wrapId(cameraId) ]]) # View prop as representation containers viewPropCollection = instance.GetViewProps() for rpIdx in range(viewPropCollection.GetNumberOfItems()): viewProp = viewPropCollection.GetItemAsObject(rpIdx) viewPropId = getReferenceId(viewProp) viewPropInstance = serializeInstance(instance, viewProp, viewPropId, context, depth + 1) if viewPropInstance: dependencies.append(viewPropInstance) viewPropIds.append(viewPropId) calls += context.buildDependencyCallList('%s-props' % objId, viewPropIds, 'addViewProp', 'removeViewProp') # Lights lightCollection = instance.GetLights() for lightIdx in range(lightCollection.GetNumberOfItems()): light = lightCollection.GetItemAsObject(lightIdx) lightId = getReferenceId(light) lightInstance = serializeInstance(instance, light, lightId, context, depth + 1) if lightInstance: dependencies.append(lightInstance) lightsIds.append(lightId) calls += context.buildDependencyCallList('%s-lights' % objId, lightsIds, 'addLight', 'removeLight') if len(dependencies) > 1: return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'background': instance.GetBackground(), 'background2': instance.GetBackground2(), 'viewport': instance.GetViewport(), ### These commented properties do not yet have real setters in vtk.js # 'gradientBackground': instance.GetGradientBackground(), # 'aspect': instance.GetAspect(), # 'pixelAspect': instance.GetPixelAspect(), # 'ambient': instance.GetAmbient(), 'twoSidedLighting': instance.GetTwoSidedLighting(), 'lightFollowCamera': instance.GetLightFollowCamera(), 'layer': instance.GetLayer(), 'preserveColorBuffer': instance.GetPreserveColorBuffer(), 'preserveDepthBuffer': instance.GetPreserveDepthBuffer(), 'nearClippingPlaneTolerance': instance.GetNearClippingPlaneTolerance(), 'clippingRangeExpansion': instance.GetClippingRangeExpansion(), 'useShadows': instance.GetUseShadows(), 'useDepthPeeling': instance.GetUseDepthPeeling(), 'occlusionRatio': instance.GetOcclusionRatio(), 'maximumNumberOfPeels': instance.GetMaximumNumberOfPeels() }, 'dependencies': dependencies, 'calls': calls } return None # ----------------------------------------------------------------------------- def cameraSerializer(parent, instance, objId, context, depth): return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'focalPoint': instance.GetFocalPoint(), 'position': instance.GetPosition(), 'viewUp': instance.GetViewUp(), } } # ----------------------------------------------------------------------------- def lightTypeToString(value): """ #define VTK_LIGHT_TYPE_HEADLIGHT 1 #define VTK_LIGHT_TYPE_CAMERA_LIGHT 2 #define VTK_LIGHT_TYPE_SCENE_LIGHT 3 'HeadLight'; 'SceneLight'; 'CameraLight' """ if value == 1: return 'HeadLight' elif value == 2: return 'CameraLight' return 'SceneLight' def lightSerializer(parent, instance, objId, context, depth): return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { # 'specularColor': instance.GetSpecularColor(), # 'ambientColor': instance.GetAmbientColor(), 'switch': instance.GetSwitch(), 'intensity': instance.GetIntensity(), 'color': instance.GetDiffuseColor(), 'position': instance.GetPosition(), 'focalPoint': instance.GetFocalPoint(), 'positional': instance.GetPositional(), 'exponent': instance.GetExponent(), 'coneAngle': instance.GetConeAngle(), 'attenuationValues': instance.GetAttenuationValues(), 'lightType': lightTypeToString(instance.GetLightType()), 'shadowAttenuation': instance.GetShadowAttenuation() } } # ----------------------------------------------------------------------------- def renderWindowSerializer(parent, instance, objId, context, depth): dependencies = [] rendererIds = [] rendererCollection = instance.GetRenderers() for rIdx in range(rendererCollection.GetNumberOfItems()): # Grab the next vtkRenderer renderer = rendererCollection.GetItemAsObject(rIdx) rendererId = getReferenceId(renderer) rendererInstance = serializeInstance(instance, renderer, rendererId, context, depth + 1) if rendererInstance: dependencies.append(rendererInstance) rendererIds.append(rendererId) calls = context.buildDependencyCallList(objId, rendererIds, 'addRenderer', 'removeRenderer') return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'numberOfLayers': instance.GetNumberOfLayers() }, 'dependencies': dependencies, 'calls': calls }
from __future__ import absolute_import, division, print_function import json, time, re from vtk.vtkFiltersGeometry import vtkCompositeDataGeometryFilter from vtk.vtkCommonCore import vtkTypeUInt32Array from vtk.vtkWebCore import vtkWebApplication from vtk.numpy_interface import algorithms from vtk.numpy_interface import dataset_adapter as dsa import numpy as np from vtk.web import hashDataArray, getJSArrayType from vtk.web import buffer, base64Encode # ----------------------------------------------------------------------------- # Convenience class for caching data arrays, storing computed sha sums, keeping # track of valid actors, etc... # ----------------------------------------------------------------------------- class SynchronizationContext(): def __init__(self, debug=False): self.dataArrayCache = {} self.lastDependenciesMapping = {} self.ingoreLastDependencies = False self.debugSerializers = debug self.debugAll = debug def setIgnoreLastDependencies(self, force): self.ingoreLastDependencies = force def cacheDataArray(self, pMd5, data): self.dataArrayCache[pMd5] = data def getCachedDataArray(self, pMd5, binary = False): cacheObj = self.dataArrayCache[pMd5] array = cacheObj['array'] cacheTime = cacheObj['mTime'] if cacheTime != array.GetMTime(): if context.debugAll: print(' ***** ERROR: you asked for an old cache key! ***** ') if array.GetDataType() == 12: # IdType need to be converted to Uint32 arraySize = array.GetNumberOfTuples() * array.GetNumberOfComponents() newArray = vtkTypeUInt32Array() newArray.SetNumberOfTuples(arraySize) for i in range(arraySize): newArray.SetValue(i, -1 if array.GetValue(i) < 0 else array.GetValue(i)) pBuffer = buffer(newArray) else: pBuffer = buffer(array) if binary: # Convert the vtkUnsignedCharArray into a bytes object, required by Autobahn websockets return pBuffer.tobytes() return base64Encode(pBuffer) def checkForArraysToRelease(self, timeWindow = 20): cutOffTime = time.time() - timeWindow shasToDelete = [] for sha in self.dataArrayCache: record = self.dataArrayCache[sha] array = record['array'] count = array.GetReferenceCount() if count == 1 and record['ts'] < cutOffTime: shasToDelete.append(sha) for sha in shasToDelete: del self.dataArrayCache[sha] def getLastDependencyList(self, idstr): lastDeps = [] if idstr in self.lastDependenciesMapping and not self.ingoreLastDependencies: lastDeps = self.lastDependenciesMapping[idstr] return lastDeps def setNewDependencyList(self, idstr, depList): self.lastDependenciesMapping[idstr] = depList def buildDependencyCallList(self, idstr, newList, addMethod, removeMethod): oldList = self.getLastDependencyList(idstr) calls = [] calls += [ [addMethod, [ wrapId(x) ]] for x in newList if x not in oldList ] calls += [ [removeMethod, [ wrapId(x) ]] for x in oldList if x not in newList ] self.setNewDependencyList(idstr, newList) return calls # ----------------------------------------------------------------------------- # Global variables # ----------------------------------------------------------------------------- SERIALIZERS = {} context = None # ----------------------------------------------------------------------------- # Global API # ----------------------------------------------------------------------------- def registerInstanceSerializer(name, method): global SERIALIZERS SERIALIZERS[name] = method # ----------------------------------------------------------------------------- def serializeInstance(parent, instance, instanceId, context, depth): instanceType = instance.GetClassName() serializer = SERIALIZERS[instanceType] if instanceType in SERIALIZERS else None if serializer: return serializer(parent, instance, instanceId, context, depth) if context.debugSerializers: print('%s!!!No serializer for %s with id %s' % (pad(depth), instanceType, instanceId)) return None # ----------------------------------------------------------------------------- def initializeSerializers(): # Actors/viewProps registerInstanceSerializer('vtkOpenGLActor', genericActorSerializer) registerInstanceSerializer('vtkPVLODActor', genericActorSerializer) # Mappers registerInstanceSerializer('vtkOpenGLPolyDataMapper', genericMapperSerializer) registerInstanceSerializer('vtkCompositePolyDataMapper2', genericMapperSerializer) # LookupTables/TransferFunctions registerInstanceSerializer('vtkLookupTable', lookupTableSerializer) registerInstanceSerializer('vtkPVDiscretizableColorTransferFunction', colorTransferFunctionSerializer) # Property registerInstanceSerializer('vtkOpenGLProperty', propertySerializer) # Datasets registerInstanceSerializer('vtkPolyData', polydataSerializer) registerInstanceSerializer('vtkMultiBlockDataSet', mergeToPolydataSerializer) # RenderWindows registerInstanceSerializer('vtkCocoaRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkXOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkWin32OpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkEGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkOpenVRRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkGenericOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkOSOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkOpenGLRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkIOSRenderWindow', renderWindowSerializer) registerInstanceSerializer('vtkExternalOpenGLRenderWindow', renderWindowSerializer) # Renderers registerInstanceSerializer('vtkOpenGLRenderer', rendererSerializer) # Cameras registerInstanceSerializer('vtkOpenGLCamera', cameraSerializer) # Lights registerInstanceSerializer('vtkPVLight', lightSerializer) registerInstanceSerializer('vtkOpenGLLight', lightSerializer) # ----------------------------------------------------------------------------- # Helper functions # ----------------------------------------------------------------------------- def pad(depth): padding = '' for d in range(depth): padding += ' ' return padding # ----------------------------------------------------------------------------- def wrapId(idStr): return 'instance:${%s}' % idStr # ----------------------------------------------------------------------------- def getReferenceId(ref): return vtkWebApplication.GetObjectId(ref) # ----------------------------------------------------------------------------- dataArrayShaMapping = {} def digest(array): objId = getReferenceId(array) record = None if objId in dataArrayShaMapping: record = dataArrayShaMapping[objId] if record and record['mtime'] == array.GetMTime(): return record['sha'] record = { 'sha': hashDataArray(array), 'mtime': array.GetMTime() } dataArrayShaMapping[objId] = record return record['sha'] # ----------------------------------------------------------------------------- def getRangeInfo(array, component): r = array.GetRange(component) compRange = {} compRange['min'] = r[0] compRange['max'] = r[1] componentName = array.GetComponentName(component) compRange['component'] = component if not componentName else componentName return compRange # ----------------------------------------------------------------------------- def getArrayDescription(array, context, dataset): if not array: return None if array.GetNumberOfComponents() == 9: adapter = dsa.WrapDataObject(dataset) name = array.GetName() npArray = adapter.GetPointData().GetArray(name) eigenvalues = algorithms.eigenvalue(npArray) merge = np.column_stack((npArray, eigenvalues[:, np.newaxis, :])) n = npArray.shape[0] array.SetNumberOfComponents(12) array.SetNumberOfTuples(n) for i in range(n): array.SetTypedTuple(i, merge[i].ravel()) pMd5 = digest(array) context.cacheDataArray(pMd5, { 'array': array, 'mTime': array.GetMTime(), 'ts': time.time() }) root = {} root['hash'] = pMd5 root['vtkClass'] = 'vtkDataArray' root['name'] = array.GetName() root['dataType'] = getJSArrayType(array) root['numberOfComponents'] = array.GetNumberOfComponents() root['size'] = array.GetNumberOfComponents() * array.GetNumberOfTuples() root['ranges'] = [] if root['numberOfComponents'] > 1: for i in range(root['numberOfComponents']): root['ranges'].append(getRangeInfo(array, i)) root['ranges'].append(getRangeInfo(array, -1)) else: root['ranges'].append(getRangeInfo(array, 0)) return root # ----------------------------------------------------------------------------- def extractRequiredFields(extractedFields, mapper, dataset, context, requestedFields=['Normals', 'TCoords']): # FIXME should evolve and support funky mapper which leverage many arrays if mapper.IsA('vtkMapper'): scalarVisibility = mapper.GetScalarVisibility() arrayAccessMode = mapper.GetArrayAccessMode() colorMode = mapper.GetColorMode() scalarMode = mapper.GetScalarMode() pd = dataset.GetPointData() for index in range(pd.GetNumberOfArrays()): colorArrayName = pd.GetArrayName(index) if scalarVisibility and scalarMode == 3: arrayMeta = getArrayDescription(pd.GetArray(colorArrayName), context, dataset) if arrayMeta: arrayMeta['location'] = 'pointData' extractedFields.append(arrayMeta) if scalarVisibility and scalarMode == 4: arrayMeta = getArrayDescription(pd.GetArray(colorArrayName), context, dataset) if arrayMeta: arrayMeta['location'] = 'cellData' extractedFields.append(arrayMeta) # Normal handling if 'Normals' in requestedFields: normals = dataset.GetPointData().GetNormals() if normals: arrayMeta = getArrayDescription(normals, context, dataset) if arrayMeta: arrayMeta['location'] = 'pointData' arrayMeta['registration'] = 'setNormals' extractedFields.append(arrayMeta) # TCoord handling if 'TCoords' in requestedFields: tcoords = dataset.GetPointData().GetTCoords() if tcoords: arrayMeta = getArrayDescription(tcoords, context, dataset) if arrayMeta: arrayMeta['location'] = 'pointData' arrayMeta['registration'] = 'setTCoords' extractedFields.append(arrayMeta) # ----------------------------------------------------------------------------- # Concrete instance serializers # ----------------------------------------------------------------------------- def genericActorSerializer(parent, actor, actorId, context, depth): # This kind of actor has two "children" of interest, a property and a mapper actorVisibility = actor.GetVisibility() mapperInstance = None propertyInstance = None calls = [] dependencies = [] if actorVisibility: mapper = None if not hasattr(actor, 'GetMapper'): if context.debugAll: print('This actor does not have a GetMapper method') else: mapper = actor.GetMapper() if mapper: mapperId = getReferenceId(mapper) mapperInstance = serializeInstance(actor, mapper, mapperId, context, depth + 1) if mapperInstance: dependencies.append(mapperInstance) calls.append(['setMapper', [ wrapId(mapperId) ]]) prop = None if hasattr(actor, 'GetProperty'): prop = actor.GetProperty() else: if context.debugAll: print('This actor does not have a GetProperty method') if prop: propId = getReferenceId(prop) propertyInstance = serializeInstance(actor, prop, propId, context, depth + 1) if propertyInstance: dependencies.append(propertyInstance) calls.append(['setProperty', [ wrapId(propId) ]]) if actorVisibility == 0 or (mapperInstance and propertyInstance): return { 'parent': getReferenceId(parent), 'id': actorId, 'type': actor.GetClassName(), 'properties': { # vtkProp 'visibility': actorVisibility, 'pickable': actor.GetPickable(), 'dragable': actor.GetDragable(), 'useBounds': actor.GetUseBounds(), # vtkProp3D 'origin': actor.GetOrigin(), 'position': actor.GetPosition(), 'scale': actor.GetScale(), # vtkActor 'forceOpaque': actor.GetForceOpaque(), 'forceTranslucent': actor.GetForceTranslucent() }, 'calls': calls, 'dependencies': dependencies } return None # ----------------------------------------------------------------------------- def genericMapperSerializer(parent, mapper, mapperId, context, depth): # This kind of mapper requires us to get 2 items: input data and lookup table dataObject = None dataObjectInstance = None lookupTableInstance = None calls = [] dependencies = [] if hasattr(mapper, 'GetInputDataObject'): dataObject = mapper.GetInputDataObject(0, 0) else: if context.debugAll: print('This mapper does not have GetInputDataObject method') if dataObject: dataObjectId = '%s-dataset' % mapperId dataObjectInstance = serializeInstance(mapper, dataObject, dataObjectId, context, depth + 1) if dataObjectInstance: dependencies.append(dataObjectInstance) calls.append(['setInputData', [ wrapId(dataObjectId) ]]) lookupTable = None if hasattr(mapper, 'GetLookupTable'): lookupTable = mapper.GetLookupTable() else: if context.debugAll: print('This mapper does not have GetLookupTable method') if lookupTable: lookupTableId = getReferenceId(lookupTable) lookupTableInstance = serializeInstance(mapper, lookupTable, lookupTableId, context, depth + 1) if lookupTableInstance: dependencies.append(lookupTableInstance) calls.append(['setLookupTable', [ wrapId(lookupTableId) ]]) if dataObjectInstance and lookupTableInstance: colorArrayName = mapper.GetArrayName() if mapper.GetArrayAccessMode() == 1 else mapper.GetArrayId() return { 'parent': getReferenceId(parent), 'id': mapperId, 'type': mapper.GetClassName(), 'properties': { 'scalarRange': mapper.GetScalarRange(), 'useLookupTableScalarRange': True if mapper.GetUseLookupTableScalarRange() else False, 'scalarVisibility': mapper.GetScalarVisibility(), 'colorByArrayName': colorArrayName, 'colorMode': mapper.GetColorMode(), 'scalarMode': mapper.GetScalarMode(), 'interpolateScalarsBeforeMapping': True if mapper.GetInterpolateScalarsBeforeMapping() else False }, 'calls': calls, 'dependencies': dependencies } return None # ----------------------------------------------------------------------------- def lookupTableSerializer(parent, lookupTable, lookupTableId, context, depth): # No children in this case, so no additions to bindings and return empty list # But we do need to add instance lookupTableRange = lookupTable.GetRange() lookupTableHueRange = [0.5, 0] if hasattr(lookupTable, 'GetHueRange'): try: lookupTable.GetHueRange(lookupTableHueRange) except Exception as inst: pass lutSatRange = lookupTable.GetSaturationRange() lutAlphaRange = lookupTable.GetAlphaRange() return { 'parent': getReferenceId(parent), 'id': lookupTableId, 'type': lookupTable.GetClassName(), 'properties': { 'numberOfColors': lookupTable.GetNumberOfColors(), 'valueRange': lookupTableRange, 'hueRange': lookupTableHueRange, # 'alphaRange': lutAlphaRange, # Causes weird rendering artifacts on client 'saturationRange': lutSatRange, 'nanColor': lookupTable.GetNanColor(), 'belowRangeColor': lookupTable.GetBelowRangeColor(), 'aboveRangeColor': lookupTable.GetAboveRangeColor(), 'useAboveRangeColor': True if lookupTable.GetUseAboveRangeColor() else False, 'useBelowRangeColor': True if lookupTable.GetUseBelowRangeColor() else False, 'alpha': lookupTable.GetAlpha(), 'vectorSize': lookupTable.GetVectorSize(), 'vectorComponent': lookupTable.GetVectorComponent(), 'vectorMode': lookupTable.GetVectorMode(), 'indexedLookup': lookupTable.GetIndexedLookup() } } # ----------------------------------------------------------------------------- def propertySerializer(parent, propObj, propObjId, context, depth): representation = propObj.GetRepresentation() if hasattr(propObj, 'GetRepresentation') else 2 colorToUse = propObj.GetDiffuseColor() if hasattr(propObj, 'GetDiffuseColor') else [1, 1, 1] if representation == 1 and hasattr(propObj, 'GetColor'): colorToUse = propObj.GetColor() return { 'parent': getReferenceId(parent), 'id': propObjId, 'type': propObj.GetClassName(), 'properties': { 'representation': representation, 'diffuseColor': colorToUse, 'color': propObj.GetColor(), 'ambientColor': propObj.GetAmbientColor(), 'specularColor': propObj.GetSpecularColor(), 'edgeColor': propObj.GetEdgeColor(), 'ambient': propObj.GetAmbient(), 'diffuse': propObj.GetDiffuse(), 'specular': propObj.GetSpecular(), 'specularPower': propObj.GetSpecularPower(), 'opacity': propObj.GetOpacity(), 'interpolation': propObj.GetInterpolation(), 'edgeVisibility': True if propObj.GetEdgeVisibility() else False, 'backfaceCulling': True if propObj.GetBackfaceCulling() else False, 'frontfaceCulling': True if propObj.GetFrontfaceCulling() else False, 'pointSize': propObj.GetPointSize(), 'lineWidth': propObj.GetLineWidth(), 'lighting': propObj.GetLighting() } } # ----------------------------------------------------------------------------- def polydataSerializer(parent, dataset, datasetId, context, depth): datasetType = dataset.GetClassName() if dataset and dataset.GetPoints(): properties = {} # Points points = getArrayDescription(dataset.GetPoints().GetData(), context, dataset) points['vtkClass'] = 'vtkPoints' properties['points'] = points ## Verts if dataset.GetVerts() and dataset.GetVerts().GetData().GetNumberOfTuples() > 0: _verts = getArrayDescription(dataset.GetVerts().GetData(), context, dataset) properties['verts'] = _verts properties['verts']['vtkClass'] = 'vtkCellArray' ## Lines if dataset.GetLines() and dataset.GetLines().GetData().GetNumberOfTuples() > 0: _lines = getArrayDescription(dataset.GetLines().GetData(), context, dataset) properties['lines'] = _lines properties['lines']['vtkClass'] = 'vtkCellArray' ## Polys if dataset.GetPolys() and dataset.GetPolys().GetData().GetNumberOfTuples() > 0: _polys = getArrayDescription(dataset.GetPolys().GetData(), context, dataset) properties['polys'] = _polys properties['polys']['vtkClass'] = 'vtkCellArray' ## Strips if dataset.GetStrips() and dataset.GetStrips().GetData().GetNumberOfTuples() > 0: _strips = getArrayDescription(dataset.GetStrips().GetData(), context, dataset) properties['strips'] = _strips properties['strips']['vtkClass'] = 'vtkCellArray' ## Fields properties['fields'] = [] extractRequiredFields(properties['fields'], parent, dataset, context) return { 'parent': getReferenceId(parent), 'id': datasetId, 'type': datasetType, 'properties': properties } if context.debugAll: print('This dataset has no points!') return None # ----------------------------------------------------------------------------- def mergeToPolydataSerializer(parent, dataObject, dataObjectId, context, depth): dataset = None if dataObject.IsA('vtkCompositeDataSet'): dataMTime = dataObject.GetMTime() gf = vtkCompositeDataGeometryFilter() gf.SetInputData(dataObject) gf.Update() tempDS = gf.GetOutput() dataset = tempDS else: dataset = mapper.GetInput() return polydataSerializer(parent, dataset, dataObjectId, context, depth) # ----------------------------------------------------------------------------- def colorTransferFunctionSerializer(parent, instance, objId, context, depth): nodes = [] for i in range(instance.GetSize()): # x, r, g, b, midpoint, sharpness node = [0, 0, 0, 0, 0, 0] instance.GetNodeValue(i, node) nodes.append(node) return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'clamping': True if instance.GetClamping() else False, 'colorSpace': instance.GetColorSpace(), 'hSVWrap': True if instance.GetHSVWrap() else False, # 'nanColor': instance.GetNanColor(), # Breaks client # 'belowRangeColor': instance.GetBelowRangeColor(), # Breaks client # 'aboveRangeColor': instance.GetAboveRangeColor(), # Breaks client # 'useAboveRangeColor': True if instance.GetUseAboveRangeColor() else False, # 'useBelowRangeColor': True if instance.GetUseBelowRangeColor() else False, 'allowDuplicateScalars': True if instance.GetAllowDuplicateScalars() else False, 'alpha': instance.GetAlpha(), 'vectorComponent': instance.GetVectorComponent(), 'vectorSize': instance.GetVectorSize(), 'vectorMode': instance.GetVectorMode(), 'indexedLookup': instance.GetIndexedLookup(), 'nodes': nodes } } # ----------------------------------------------------------------------------- def rendererSerializer(parent, instance, objId, context, depth): dependencies = [] viewPropIds = [] lightsIds = [] calls = [] # Camera camera = instance.GetActiveCamera() cameraId = getReferenceId(camera) cameraBindings = [] cameraInstance = serializeInstance(instance, camera, cameraId, context, depth + 1) if cameraInstance: dependencies.append(cameraInstance) calls.append(['setActiveCamera', [ wrapId(cameraId) ]]) # View prop as representation containers viewPropCollection = instance.GetViewProps() for rpIdx in range(viewPropCollection.GetNumberOfItems()): viewProp = viewPropCollection.GetItemAsObject(rpIdx) viewPropId = getReferenceId(viewProp) viewPropInstance = serializeInstance(instance, viewProp, viewPropId, context, depth + 1) if viewPropInstance: dependencies.append(viewPropInstance) viewPropIds.append(viewPropId) calls += context.buildDependencyCallList('%s-props' % objId, viewPropIds, 'addViewProp', 'removeViewProp') # Lights lightCollection = instance.GetLights() for lightIdx in range(lightCollection.GetNumberOfItems()): light = lightCollection.GetItemAsObject(lightIdx) lightId = getReferenceId(light) lightInstance = serializeInstance(instance, light, lightId, context, depth + 1) if lightInstance: dependencies.append(lightInstance) lightsIds.append(lightId) calls += context.buildDependencyCallList('%s-lights' % objId, lightsIds, 'addLight', 'removeLight') if len(dependencies) > 1: return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'background': instance.GetBackground(), 'background2': instance.GetBackground2(), 'viewport': instance.GetViewport(), ### These commented properties do not yet have real setters in vtk.js # 'gradientBackground': instance.GetGradientBackground(), # 'aspect': instance.GetAspect(), # 'pixelAspect': instance.GetPixelAspect(), # 'ambient': instance.GetAmbient(), 'twoSidedLighting': instance.GetTwoSidedLighting(), 'lightFollowCamera': instance.GetLightFollowCamera(), 'layer': instance.GetLayer(), 'preserveColorBuffer': instance.GetPreserveColorBuffer(), 'preserveDepthBuffer': instance.GetPreserveDepthBuffer(), 'nearClippingPlaneTolerance': instance.GetNearClippingPlaneTolerance(), 'clippingRangeExpansion': instance.GetClippingRangeExpansion(), 'useShadows': instance.GetUseShadows(), 'useDepthPeeling': instance.GetUseDepthPeeling(), 'occlusionRatio': instance.GetOcclusionRatio(), 'maximumNumberOfPeels': instance.GetMaximumNumberOfPeels() }, 'dependencies': dependencies, 'calls': calls } return None # ----------------------------------------------------------------------------- def cameraSerializer(parent, instance, objId, context, depth): return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'focalPoint': instance.GetFocalPoint(), 'position': instance.GetPosition(), 'viewUp': instance.GetViewUp(), } } # ----------------------------------------------------------------------------- def lightTypeToString(value): """ #define VTK_LIGHT_TYPE_HEADLIGHT 1 #define VTK_LIGHT_TYPE_CAMERA_LIGHT 2 #define VTK_LIGHT_TYPE_SCENE_LIGHT 3 'HeadLight'; 'SceneLight'; 'CameraLight' """ if value == 1: return 'HeadLight' elif value == 2: return 'CameraLight' return 'SceneLight' def lightSerializer(parent, instance, objId, context, depth): return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { # 'specularColor': instance.GetSpecularColor(), # 'ambientColor': instance.GetAmbientColor(), 'switch': instance.GetSwitch(), 'intensity': instance.GetIntensity(), 'color': instance.GetDiffuseColor(), 'position': instance.GetPosition(), 'focalPoint': instance.GetFocalPoint(), 'positional': instance.GetPositional(), 'exponent': instance.GetExponent(), 'coneAngle': instance.GetConeAngle(), 'attenuationValues': instance.GetAttenuationValues(), 'lightType': lightTypeToString(instance.GetLightType()), 'shadowAttenuation': instance.GetShadowAttenuation() } } # ----------------------------------------------------------------------------- def renderWindowSerializer(parent, instance, objId, context, depth): dependencies = [] rendererIds = [] rendererCollection = instance.GetRenderers() for rIdx in range(rendererCollection.GetNumberOfItems()): # Grab the next vtkRenderer renderer = rendererCollection.GetItemAsObject(rIdx) rendererId = getReferenceId(renderer) rendererInstance = serializeInstance(instance, renderer, rendererId, context, depth + 1) if rendererInstance: dependencies.append(rendererInstance) rendererIds.append(rendererId) calls = context.buildDependencyCallList(objId, rendererIds, 'addRenderer', 'removeRenderer') return { 'parent': getReferenceId(parent), 'id': objId, 'type': instance.GetClassName(), 'properties': { 'numberOfLayers': instance.GetNumberOfLayers() }, 'dependencies': dependencies, 'calls': calls }
en
0.28282
# ----------------------------------------------------------------------------- # Convenience class for caching data arrays, storing computed sha sums, keeping # track of valid actors, etc... # ----------------------------------------------------------------------------- # IdType need to be converted to Uint32 # Convert the vtkUnsignedCharArray into a bytes object, required by Autobahn websockets # ----------------------------------------------------------------------------- # Global variables # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Global API # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Actors/viewProps # Mappers # LookupTables/TransferFunctions # Property # Datasets # RenderWindows # Renderers # Cameras # Lights # ----------------------------------------------------------------------------- # Helper functions # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # FIXME should evolve and support funky mapper which leverage many arrays # Normal handling # TCoord handling # ----------------------------------------------------------------------------- # Concrete instance serializers # ----------------------------------------------------------------------------- # This kind of actor has two "children" of interest, a property and a mapper # vtkProp # vtkProp3D # vtkActor # ----------------------------------------------------------------------------- # This kind of mapper requires us to get 2 items: input data and lookup table # ----------------------------------------------------------------------------- # No children in this case, so no additions to bindings and return empty list # But we do need to add instance # 'alphaRange': lutAlphaRange, # Causes weird rendering artifacts on client # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Points ## Verts ## Lines ## Polys ## Strips ## Fields # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # x, r, g, b, midpoint, sharpness # 'nanColor': instance.GetNanColor(), # Breaks client # 'belowRangeColor': instance.GetBelowRangeColor(), # Breaks client # 'aboveRangeColor': instance.GetAboveRangeColor(), # Breaks client # 'useAboveRangeColor': True if instance.GetUseAboveRangeColor() else False, # 'useBelowRangeColor': True if instance.GetUseBelowRangeColor() else False, # ----------------------------------------------------------------------------- # Camera # View prop as representation containers # Lights ### These commented properties do not yet have real setters in vtk.js # 'gradientBackground': instance.GetGradientBackground(), # 'aspect': instance.GetAspect(), # 'pixelAspect': instance.GetPixelAspect(), # 'ambient': instance.GetAmbient(), # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- #define VTK_LIGHT_TYPE_HEADLIGHT 1 #define VTK_LIGHT_TYPE_CAMERA_LIGHT 2 #define VTK_LIGHT_TYPE_SCENE_LIGHT 3 'HeadLight'; 'SceneLight'; 'CameraLight' # 'specularColor': instance.GetSpecularColor(), # 'ambientColor': instance.GetAmbientColor(), # ----------------------------------------------------------------------------- # Grab the next vtkRenderer
1.847409
2
examples/to_replace/sessions/tx_sessions.py
devdave/txWeb
0
6624645
import time import webbrowser from txweb import Site as Site from txweb import expose from twisted.web.static import File from twisted.internet import reactor, defer from twisted.web.server import NOT_DONE_YET class Root(object): landing_page = File("./landing_page.html") home_page = File("./home.html") register_page = File("./register.html") @expose def index(self, request): # from dbgp.client import brk; brk("192.168.1.2", 9090) return "index" @expose def on_register(self, request): # from dbgp.client import brk; brk("192.168.1.2", 9090) if not request.args.get('name', False): return "Missing name" def run(): reactor.listenTCP(8080, Site(Root())) reactor.callLater(3, webbrowser.open, "http://127.0.0.1:8080/home" ) reactor.run() if __name__ == '__main__': run()
import time import webbrowser from txweb import Site as Site from txweb import expose from twisted.web.static import File from twisted.internet import reactor, defer from twisted.web.server import NOT_DONE_YET class Root(object): landing_page = File("./landing_page.html") home_page = File("./home.html") register_page = File("./register.html") @expose def index(self, request): # from dbgp.client import brk; brk("192.168.1.2", 9090) return "index" @expose def on_register(self, request): # from dbgp.client import brk; brk("192.168.1.2", 9090) if not request.args.get('name', False): return "Missing name" def run(): reactor.listenTCP(8080, Site(Root())) reactor.callLater(3, webbrowser.open, "http://127.0.0.1:8080/home" ) reactor.run() if __name__ == '__main__': run()
en
0.229299
# from dbgp.client import brk; brk("192.168.1.2", 9090) # from dbgp.client import brk; brk("192.168.1.2", 9090)
2.498728
2
fantasyProjectHome/fantasyApp/views.py
jaredtewodros/cfbFantasyApp
0
6624646
<gh_stars>0 from django.shortcuts import render # Create your views here. def index(request): return render(request, 'fantasyApp/index.html', {}) def login(request): return render(request, 'fantasyApp/login.html', {})
from django.shortcuts import render # Create your views here. def index(request): return render(request, 'fantasyApp/index.html', {}) def login(request): return render(request, 'fantasyApp/login.html', {})
en
0.968116
# Create your views here.
1.754787
2
setup.py
Zagrebelin/py-moneyed
0
6624647
#!/usr/bin/env python from setuptools import setup from setuptools.command.test import test as TestCommand import sys class Tox(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't loaded import tox errno = tox.cmdline(self.test_args) sys.exit(errno) setup( name='py-moneyed', packages=['moneyed'], version='0.9.1-dev', description='Provides Currency and Money classes for use in your Python code.', author='Kai', author_email='<EMAIL>', url='http://github.com/limist/py-moneyed', download_url='', keywords="money currency class abstraction", license='BSD', install_requires=[], classifiers=[ 'Programming Language :: Python', "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Development Status :: 3 - Alpha', 'Environment :: Other Environment', 'Intended Audience :: Developers', 'Topic :: Office/Business :: Financial', 'Topic :: Software Development :: Libraries :: Python Modules'], long_description=open('README.rst', 'r').read(), extras_require={ 'tests': [ 'pytest>=2.3.0', 'tox>=1.6.0' ]}, tests_require=['tox>=1.6.0', 'pytest>=2.3.0'], cmdclass={'test': Tox}, include_package_data=True, )
#!/usr/bin/env python from setuptools import setup from setuptools.command.test import test as TestCommand import sys class Tox(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't loaded import tox errno = tox.cmdline(self.test_args) sys.exit(errno) setup( name='py-moneyed', packages=['moneyed'], version='0.9.1-dev', description='Provides Currency and Money classes for use in your Python code.', author='Kai', author_email='<EMAIL>', url='http://github.com/limist/py-moneyed', download_url='', keywords="money currency class abstraction", license='BSD', install_requires=[], classifiers=[ 'Programming Language :: Python', "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Development Status :: 3 - Alpha', 'Environment :: Other Environment', 'Intended Audience :: Developers', 'Topic :: Office/Business :: Financial', 'Topic :: Software Development :: Libraries :: Python Modules'], long_description=open('README.rst', 'r').read(), extras_require={ 'tests': [ 'pytest>=2.3.0', 'tox>=1.6.0' ]}, tests_require=['tox>=1.6.0', 'pytest>=2.3.0'], cmdclass={'test': Tox}, include_package_data=True, )
en
0.756797
#!/usr/bin/env python # import here, cause outside the eggs aren't loaded
2.020488
2
built-in/PyTorch/Official/cv/image_object_detection/Faster_Mask_RCNN_for_PyTorch_Dynamic_Shape/detectron2/evaluation/cityscapes_evaluation.py
Ascend/modelzoo
12
6624648
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import glob import logging import numpy as np import os import tempfile from collections import OrderedDict import torch from fvcore.common.file_io import PathManager from PIL import Image from detectron2.data import MetadataCatalog from detectron2.utils import comm from .evaluator import DatasetEvaluator class CityscapesEvaluator(DatasetEvaluator): """ Base class for evaluation using cityscapes API. """ def __init__(self, dataset_name): """ Args: dataset_name (str): the name of the dataset. It must have the following metadata associated with it: "thing_classes", "gt_dir". """ self._metadata = MetadataCatalog.get(dataset_name) self._cpu_device = torch.device("cpu") self._logger = logging.getLogger(__name__) def reset(self): self._working_dir = tempfile.TemporaryDirectory(prefix="cityscapes_eval_") self._temp_dir = self._working_dir.name # All workers will write to the same results directory # TODO this does not work in distributed training self._temp_dir = comm.all_gather(self._temp_dir)[0] if self._temp_dir != self._working_dir.name: self._working_dir.cleanup() self._logger.info( "Writing cityscapes results to temporary directory {} ...".format(self._temp_dir) ) class CityscapesInstanceEvaluator(CityscapesEvaluator): """ Evaluate instance segmentation results on cityscapes dataset using cityscapes API. Note: * It does not work in multi-machine distributed training. * It contains a synchronization, therefore has to be used on all ranks. * Only the main process runs evaluation. """ def process(self, inputs, outputs): from cityscapesscripts.helpers.labels import name2label for input, output in zip(inputs, outputs): file_name = input["file_name"] basename = os.path.splitext(os.path.basename(file_name))[0] pred_txt = os.path.join(self._temp_dir, basename + "_pred.txt") output = output["instances"].to(self._cpu_device) num_instances = len(output) with open(pred_txt, "w") as fout: for i in range(num_instances): pred_class = output.pred_classes[i] classes = self._metadata.thing_classes[pred_class] class_id = name2label[classes].id score = output.scores[i] mask = output.pred_masks[i].numpy().astype("uint8") png_filename = os.path.join( self._temp_dir, basename + "_{}_{}.png".format(i, classes) ) Image.fromarray(mask * 255).save(png_filename) fout.write("{} {} {}\n".format(os.path.basename(png_filename), class_id, score)) def evaluate(self): """ Returns: dict: has a key "segm", whose value is a dict of "AP" and "AP50". """ comm.synchronize() if comm.get_rank() > 0: return import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) # set some global states in cityscapes evaluation API, before evaluating cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) cityscapes_eval.args.predictionWalk = None cityscapes_eval.args.JSONOutput = False cityscapes_eval.args.colorized = False cityscapes_eval.args.gtInstancesFile = os.path.join(self._temp_dir, "gtInstances.json") # These lines are adopted from # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa gt_dir = PathManager.get_local_path(self._metadata.gt_dir) groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_instanceIds.png")) assert len( groundTruthImgList ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( cityscapes_eval.args.groundTruthSearch ) predictionImgList = [] for gt in groundTruthImgList: predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args)) results = cityscapes_eval.evaluateImgLists( predictionImgList, groundTruthImgList, cityscapes_eval.args )["averages"] ret = OrderedDict() ret["segm"] = {"AP": results["allAp"] * 100, "AP50": results["allAp50%"] * 100} self._working_dir.cleanup() return ret class CityscapesSemSegEvaluator(CityscapesEvaluator): """ Evaluate semantic segmentation results on cityscapes dataset using cityscapes API. Note: * It does not work in multi-machine distributed training. * It contains a synchronization, therefore has to be used on all ranks. * Only the main process runs evaluation. """ def process(self, inputs, outputs): from cityscapesscripts.helpers.labels import trainId2label for input, output in zip(inputs, outputs): file_name = input["file_name"] basename = os.path.splitext(os.path.basename(file_name))[0] pred_filename = os.path.join(self._temp_dir, basename + "_pred.png") output = output["sem_seg"].argmax(dim=0).to(self._cpu_device).numpy() pred = 255 * np.ones(output.shape, dtype=np.uint8) for train_id, label in trainId2label.items(): if label.ignoreInEval: continue pred[output == train_id] = label.id Image.fromarray(pred).save(pred_filename) def evaluate(self): comm.synchronize() if comm.get_rank() > 0: return # Load the Cityscapes eval script *after* setting the required env var, # since the script reads CITYSCAPES_DATASET into global variables at load time. import cityscapesscripts.evaluation.evalPixelLevelSemanticLabeling as cityscapes_eval self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) # set some global states in cityscapes evaluation API, before evaluating cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) cityscapes_eval.args.predictionWalk = None cityscapes_eval.args.JSONOutput = False cityscapes_eval.args.colorized = False # These lines are adopted from # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalPixelLevelSemanticLabeling.py # noqa gt_dir = PathManager.get_local_path(self._metadata.gt_dir) groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_labelIds.png")) assert len( groundTruthImgList ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( cityscapes_eval.args.groundTruthSearch ) predictionImgList = [] for gt in groundTruthImgList: predictionImgList.append(cityscapes_eval.getPrediction(cityscapes_eval.args, gt)) results = cityscapes_eval.evaluateImgLists( predictionImgList, groundTruthImgList, cityscapes_eval.args ) ret = OrderedDict() ret["sem_seg"] = { "IoU": 100.0 * results["averageScoreClasses"], "iIoU": 100.0 * results["averageScoreInstClasses"], "IoU_sup": 100.0 * results["averageScoreCategories"], "iIoU_sup": 100.0 * results["averageScoreInstCategories"], } self._working_dir.cleanup() return ret
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import glob import logging import numpy as np import os import tempfile from collections import OrderedDict import torch from fvcore.common.file_io import PathManager from PIL import Image from detectron2.data import MetadataCatalog from detectron2.utils import comm from .evaluator import DatasetEvaluator class CityscapesEvaluator(DatasetEvaluator): """ Base class for evaluation using cityscapes API. """ def __init__(self, dataset_name): """ Args: dataset_name (str): the name of the dataset. It must have the following metadata associated with it: "thing_classes", "gt_dir". """ self._metadata = MetadataCatalog.get(dataset_name) self._cpu_device = torch.device("cpu") self._logger = logging.getLogger(__name__) def reset(self): self._working_dir = tempfile.TemporaryDirectory(prefix="cityscapes_eval_") self._temp_dir = self._working_dir.name # All workers will write to the same results directory # TODO this does not work in distributed training self._temp_dir = comm.all_gather(self._temp_dir)[0] if self._temp_dir != self._working_dir.name: self._working_dir.cleanup() self._logger.info( "Writing cityscapes results to temporary directory {} ...".format(self._temp_dir) ) class CityscapesInstanceEvaluator(CityscapesEvaluator): """ Evaluate instance segmentation results on cityscapes dataset using cityscapes API. Note: * It does not work in multi-machine distributed training. * It contains a synchronization, therefore has to be used on all ranks. * Only the main process runs evaluation. """ def process(self, inputs, outputs): from cityscapesscripts.helpers.labels import name2label for input, output in zip(inputs, outputs): file_name = input["file_name"] basename = os.path.splitext(os.path.basename(file_name))[0] pred_txt = os.path.join(self._temp_dir, basename + "_pred.txt") output = output["instances"].to(self._cpu_device) num_instances = len(output) with open(pred_txt, "w") as fout: for i in range(num_instances): pred_class = output.pred_classes[i] classes = self._metadata.thing_classes[pred_class] class_id = name2label[classes].id score = output.scores[i] mask = output.pred_masks[i].numpy().astype("uint8") png_filename = os.path.join( self._temp_dir, basename + "_{}_{}.png".format(i, classes) ) Image.fromarray(mask * 255).save(png_filename) fout.write("{} {} {}\n".format(os.path.basename(png_filename), class_id, score)) def evaluate(self): """ Returns: dict: has a key "segm", whose value is a dict of "AP" and "AP50". """ comm.synchronize() if comm.get_rank() > 0: return import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) # set some global states in cityscapes evaluation API, before evaluating cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) cityscapes_eval.args.predictionWalk = None cityscapes_eval.args.JSONOutput = False cityscapes_eval.args.colorized = False cityscapes_eval.args.gtInstancesFile = os.path.join(self._temp_dir, "gtInstances.json") # These lines are adopted from # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa gt_dir = PathManager.get_local_path(self._metadata.gt_dir) groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_instanceIds.png")) assert len( groundTruthImgList ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( cityscapes_eval.args.groundTruthSearch ) predictionImgList = [] for gt in groundTruthImgList: predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args)) results = cityscapes_eval.evaluateImgLists( predictionImgList, groundTruthImgList, cityscapes_eval.args )["averages"] ret = OrderedDict() ret["segm"] = {"AP": results["allAp"] * 100, "AP50": results["allAp50%"] * 100} self._working_dir.cleanup() return ret class CityscapesSemSegEvaluator(CityscapesEvaluator): """ Evaluate semantic segmentation results on cityscapes dataset using cityscapes API. Note: * It does not work in multi-machine distributed training. * It contains a synchronization, therefore has to be used on all ranks. * Only the main process runs evaluation. """ def process(self, inputs, outputs): from cityscapesscripts.helpers.labels import trainId2label for input, output in zip(inputs, outputs): file_name = input["file_name"] basename = os.path.splitext(os.path.basename(file_name))[0] pred_filename = os.path.join(self._temp_dir, basename + "_pred.png") output = output["sem_seg"].argmax(dim=0).to(self._cpu_device).numpy() pred = 255 * np.ones(output.shape, dtype=np.uint8) for train_id, label in trainId2label.items(): if label.ignoreInEval: continue pred[output == train_id] = label.id Image.fromarray(pred).save(pred_filename) def evaluate(self): comm.synchronize() if comm.get_rank() > 0: return # Load the Cityscapes eval script *after* setting the required env var, # since the script reads CITYSCAPES_DATASET into global variables at load time. import cityscapesscripts.evaluation.evalPixelLevelSemanticLabeling as cityscapes_eval self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) # set some global states in cityscapes evaluation API, before evaluating cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) cityscapes_eval.args.predictionWalk = None cityscapes_eval.args.JSONOutput = False cityscapes_eval.args.colorized = False # These lines are adopted from # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalPixelLevelSemanticLabeling.py # noqa gt_dir = PathManager.get_local_path(self._metadata.gt_dir) groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_labelIds.png")) assert len( groundTruthImgList ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( cityscapes_eval.args.groundTruthSearch ) predictionImgList = [] for gt in groundTruthImgList: predictionImgList.append(cityscapes_eval.getPrediction(cityscapes_eval.args, gt)) results = cityscapes_eval.evaluateImgLists( predictionImgList, groundTruthImgList, cityscapes_eval.args ) ret = OrderedDict() ret["sem_seg"] = { "IoU": 100.0 * results["averageScoreClasses"], "iIoU": 100.0 * results["averageScoreInstClasses"], "IoU_sup": 100.0 * results["averageScoreCategories"], "iIoU_sup": 100.0 * results["averageScoreInstCategories"], } self._working_dir.cleanup() return ret
en
0.83532
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Base class for evaluation using cityscapes API. Args: dataset_name (str): the name of the dataset. It must have the following metadata associated with it: "thing_classes", "gt_dir". # All workers will write to the same results directory # TODO this does not work in distributed training Evaluate instance segmentation results on cityscapes dataset using cityscapes API. Note: * It does not work in multi-machine distributed training. * It contains a synchronization, therefore has to be used on all ranks. * Only the main process runs evaluation. Returns: dict: has a key "segm", whose value is a dict of "AP" and "AP50". # set some global states in cityscapes evaluation API, before evaluating # These lines are adopted from # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa Evaluate semantic segmentation results on cityscapes dataset using cityscapes API. Note: * It does not work in multi-machine distributed training. * It contains a synchronization, therefore has to be used on all ranks. * Only the main process runs evaluation. # Load the Cityscapes eval script *after* setting the required env var, # since the script reads CITYSCAPES_DATASET into global variables at load time. # set some global states in cityscapes evaluation API, before evaluating # These lines are adopted from # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalPixelLevelSemanticLabeling.py # noqa
1.615029
2
servermanager/api/blogapi.py
clpolcurs/TungBlog
0
6624649
from haystack.query import SearchQuerySet from blog.models import Article, Category class BlogApi: def __init__(self): self.searchqueryset = SearchQuerySet() self.searchqueryset.auto_query('') self.__max_takecount__ = 8 def search_articles(self, query): sqs = self.searchqueryset.auto_query(query) sqs = sqs.load_all() return sqs[:self.__max_takecount__] def get_category_lists(self): return Category.objects.all() def get_category_articles(self, categoryname): articles = Article.objects.filter(category__name=categoryname) if articles: return articles[:self.__max_takecount__] return None def get_recent_articles(self): return Article.objects.all()[:self.__max_takecount__]
from haystack.query import SearchQuerySet from blog.models import Article, Category class BlogApi: def __init__(self): self.searchqueryset = SearchQuerySet() self.searchqueryset.auto_query('') self.__max_takecount__ = 8 def search_articles(self, query): sqs = self.searchqueryset.auto_query(query) sqs = sqs.load_all() return sqs[:self.__max_takecount__] def get_category_lists(self): return Category.objects.all() def get_category_articles(self, categoryname): articles = Article.objects.filter(category__name=categoryname) if articles: return articles[:self.__max_takecount__] return None def get_recent_articles(self): return Article.objects.all()[:self.__max_takecount__]
none
1
2.493907
2
microsoft_screenshot.py
Vignesh-Desmond/screenshot-discord
4
6624650
<reponame>Vignesh-Desmond/screenshot-discord #!/usr/bin/env python # -*- coding: utf-8 -*- import discord from discord.ext import commands, tasks import asyncio from PIL import Image, ImageEnhance, ImageFilter, ImageGrab from io import BytesIO import keyboard from datetime import datetime import pytz import numpy as np import os from dotenv import load_dotenv from azure.cognitiveservices.vision.computervision import ComputerVisionClient from msrest.authentication import CognitiveServicesCredentials load_dotenv() client = discord.Client() TOKEN = os.environ["TOKEN"] CHANNEL_ID = int(os.environ["ID"]) SUBSCRIPTION_KEY = os.environ["KEY"] ENDPOINT = os.environ["ENDPOINT"] screenshot_count = 0 computervision_client = ComputerVisionClient( ENDPOINT, CognitiveServicesCredentials(SUBSCRIPTION_KEY) ) def take_screenshot(): left = 513 top = 349 right = 1617 bottom = 1079 ss_image = ImageGrab.grab().convert("RGB") im = ss_image.crop((left, top, right, bottom)) na = np.array(im) lavenderY, lavenderX = np.where(np.all(na == [222, 242, 248], axis=2)) top, bottom = lavenderY[0], lavenderY[-1] left, right = lavenderX[0], lavenderX[-1] ROI = na[top:bottom, left:right] cropped_image = Image.fromarray(ROI) return cropped_image def vision_api(image): ocr_result_local = computervision_client.recognize_printed_text_in_stream(image) OCR_result = "" for region in ocr_result_local.regions: final = "" for line in region.lines: s = "" for word in line.words: if word.text != "O": s += word.text + " " if s != "": s += "\n" final += s OCR_result += final return OCR_result @client.event async def on_ready(): print("Logged in as") print(client.user.name) print(client.user.id) print("------") channel = client.get_channel(CHANNEL_ID) await channel.send("Vanakkam") await asyncio.sleep(1) await channel.send("https://media.giphy.com/media/O8YQGdQapcRvW/giphy.gif") screenshot_takeNsend.start() @tasks.loop(seconds=0.2) async def screenshot_takeNsend(): channel = client.get_channel(CHANNEL_ID) if keyboard.is_pressed("ctrl + b"): global screenshot_count screenshot_count += 1 with BytesIO() as image_binary: screenshot_image = take_screenshot() screenshot_image.save(image_binary, "PNG") image_binary.seek(0) ocr_text = vision_api(image_binary) image_binary.seek(0) file = discord.File(fp=image_binary, filename="image.png") embed = discord.Embed( title=f"Screenshot {screenshot_count}", colour=discord.Colour(0xD8C538), description=ocr_text, timestamp=datetime.now(pytz.timezone("Asia/Kolkata")), ) embed.set_image(url="attachment://image.png") await channel.send(file=file, embed=embed) await asyncio.sleep(1) await channel.send("** **") print("Screenshot sent") await asyncio.sleep(1) if __name__ == "__main__": client.run(TOKEN)
#!/usr/bin/env python # -*- coding: utf-8 -*- import discord from discord.ext import commands, tasks import asyncio from PIL import Image, ImageEnhance, ImageFilter, ImageGrab from io import BytesIO import keyboard from datetime import datetime import pytz import numpy as np import os from dotenv import load_dotenv from azure.cognitiveservices.vision.computervision import ComputerVisionClient from msrest.authentication import CognitiveServicesCredentials load_dotenv() client = discord.Client() TOKEN = os.environ["TOKEN"] CHANNEL_ID = int(os.environ["ID"]) SUBSCRIPTION_KEY = os.environ["KEY"] ENDPOINT = os.environ["ENDPOINT"] screenshot_count = 0 computervision_client = ComputerVisionClient( ENDPOINT, CognitiveServicesCredentials(SUBSCRIPTION_KEY) ) def take_screenshot(): left = 513 top = 349 right = 1617 bottom = 1079 ss_image = ImageGrab.grab().convert("RGB") im = ss_image.crop((left, top, right, bottom)) na = np.array(im) lavenderY, lavenderX = np.where(np.all(na == [222, 242, 248], axis=2)) top, bottom = lavenderY[0], lavenderY[-1] left, right = lavenderX[0], lavenderX[-1] ROI = na[top:bottom, left:right] cropped_image = Image.fromarray(ROI) return cropped_image def vision_api(image): ocr_result_local = computervision_client.recognize_printed_text_in_stream(image) OCR_result = "" for region in ocr_result_local.regions: final = "" for line in region.lines: s = "" for word in line.words: if word.text != "O": s += word.text + " " if s != "": s += "\n" final += s OCR_result += final return OCR_result @client.event async def on_ready(): print("Logged in as") print(client.user.name) print(client.user.id) print("------") channel = client.get_channel(CHANNEL_ID) await channel.send("Vanakkam") await asyncio.sleep(1) await channel.send("https://media.giphy.com/media/O8YQGdQapcRvW/giphy.gif") screenshot_takeNsend.start() @tasks.loop(seconds=0.2) async def screenshot_takeNsend(): channel = client.get_channel(CHANNEL_ID) if keyboard.is_pressed("ctrl + b"): global screenshot_count screenshot_count += 1 with BytesIO() as image_binary: screenshot_image = take_screenshot() screenshot_image.save(image_binary, "PNG") image_binary.seek(0) ocr_text = vision_api(image_binary) image_binary.seek(0) file = discord.File(fp=image_binary, filename="image.png") embed = discord.Embed( title=f"Screenshot {screenshot_count}", colour=discord.Colour(0xD8C538), description=ocr_text, timestamp=datetime.now(pytz.timezone("Asia/Kolkata")), ) embed.set_image(url="attachment://image.png") await channel.send(file=file, embed=embed) await asyncio.sleep(1) await channel.send("** **") print("Screenshot sent") await asyncio.sleep(1) if __name__ == "__main__": client.run(TOKEN)
en
0.352855
#!/usr/bin/env python # -*- coding: utf-8 -*-
2.5101
3