ngram
listlengths
0
82k
[ "provided will use session['email'] :return: Account if email is present", "name. :fsuid: Optional, user's FSUID. :course_list: Optional, courses being taken", "an account for a single user. :email: Required, the email", "(defaults to using session), otherwise redirects to login page. :email:", "taken by user :return: Account object. \"\"\" account = Account(", "Set user's extra credit courses course_util.set_courses(account, course_list) account.set_password(password) account.save() return", ":fsuid: Optional, user's FSUID. :course_list: Optional, courses being taken by", "'email' def create_account(email: str, password: str, first_name: str, last_name: str,", "is_admin=False ) # Set user's extra credit courses course_util.set_courses(account, course_list)", "email address of the user. :password: Required, user's chosen password.", "util.auth, except with the action paradigm removed. \"\"\" from flask", "\"\"\" from flask import session from app.models import Account from", "if not provided will use session['email'] :return: Account if email", "not provided will use session['email'] :return: Account if email is", "of the user. :password: Required, user's chosen password. :first_name: Required,", "user's chosen password. :first_name: Required, user's first name. :last_name: Required,", "the email address of the user. :password: Required, user's chosen", "courses course_util.set_courses(account, course_list) account.set_password(password) account.save() return account def get_account(email: str=None):", "flask import session from app.models import Account from app.util import", "email (defaults to using session), otherwise redirects to login page.", ":password: Required, user's chosen password. :first_name: Required, user's first name.", ":return: Account object. \"\"\" account = Account( email=email, first_name=first_name, last_name=last_name,", "None otherwise. \"\"\" try: email = email or session['email'] return", "first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False ) # Set user's extra credit", "FSUID. :course_list: Optional, courses being taken by user :return: Account", "to using session), otherwise redirects to login page. :email: Optional", "user's last name. :fsuid: Optional, user's FSUID. :course_list: Optional, courses", "off of util.auth, except with the action paradigm removed. \"\"\"", "= []): \"\"\" Creates an account for a single user.", "Required, the email address of the user. :password: Required, user's", "Required, user's last name. :fsuid: Optional, user's FSUID. :course_list: Optional,", "str, course_list: list = []): \"\"\" Creates an account for", "\"\"\" try: email = email or session['email'] return Account.objects.get_or_404(email=email) except:", "page. :email: Optional email string, if not provided will use", "Account if email is present in session, None otherwise. \"\"\"", "user. :email: Required, the email address of the user. :password:", "object. \"\"\" account = Account( email=email, first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False", "Account( email=email, first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False ) # Set user's", "string, if not provided will use session['email'] :return: Account if", "account def get_account(email: str=None): \"\"\" Retrieves account via email (defaults", "with the action paradigm removed. \"\"\" from flask import session", "first_name: str, last_name: str, fsuid: str, course_list: list = []):", "account.save() return account def get_account(email: str=None): \"\"\" Retrieves account via", "app.models import Account from app.util import course as course_util #", "action paradigm removed. \"\"\" from flask import session from app.models", ":return: Account if email is present in session, None otherwise.", ":last_name: Required, user's last name. :fsuid: Optional, user's FSUID. :course_list:", "keys SESSION_EMAIL = 'email' def create_account(email: str, password: str, first_name:", "Required, user's first name. :last_name: Required, user's last name. :fsuid:", "get_account(email: str=None): \"\"\" Retrieves account via email (defaults to using", "of util.auth, except with the action paradigm removed. \"\"\" from", "session from app.models import Account from app.util import course as", "is based off of util.auth, except with the action paradigm", "user's extra credit courses course_util.set_courses(account, course_list) account.set_password(password) account.save() return account", "course as course_util # Session keys SESSION_EMAIL = 'email' def", "email is present in session, None otherwise. \"\"\" try: email", "chosen password. :first_name: Required, user's first name. :last_name: Required, user's", "last_name=last_name, fsuid=fsuid, is_admin=False ) # Set user's extra credit courses", "Session keys SESSION_EMAIL = 'email' def create_account(email: str, password: str,", "Account from app.util import course as course_util # Session keys", "SESSION_EMAIL = 'email' def create_account(email: str, password: str, first_name: str,", "if email is present in session, None otherwise. \"\"\" try:", "except with the action paradigm removed. \"\"\" from flask import", "= 'email' def create_account(email: str, password: str, first_name: str, last_name:", "# Session keys SESSION_EMAIL = 'email' def create_account(email: str, password:", "use session['email'] :return: Account if email is present in session,", "via email (defaults to using session), otherwise redirects to login", "otherwise. \"\"\" try: email = email or session['email'] return Account.objects.get_or_404(email=email)", "list = []): \"\"\" Creates an account for a single", "util.auth2: Authentication tools This module is based off of util.auth,", "email = email or session['email'] return Account.objects.get_or_404(email=email) except: return None", "address of the user. :password: Required, user's chosen password. :first_name:", ":email: Required, the email address of the user. :password: Required,", "email=email, first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False ) # Set user's extra", "Optional, courses being taken by user :return: Account object. \"\"\"", "account.set_password(password) account.save() return account def get_account(email: str=None): \"\"\" Retrieves account", "password. :first_name: Required, user's first name. :last_name: Required, user's last", "course_util.set_courses(account, course_list) account.set_password(password) account.save() return account def get_account(email: str=None): \"\"\"", "course_list) account.set_password(password) account.save() return account def get_account(email: str=None): \"\"\" Retrieves", "Optional, user's FSUID. :course_list: Optional, courses being taken by user", "Creates an account for a single user. :email: Required, the", "user :return: Account object. \"\"\" account = Account( email=email, first_name=first_name,", "account via email (defaults to using session), otherwise redirects to", "Required, user's chosen password. :first_name: Required, user's first name. :last_name:", "def create_account(email: str, password: str, first_name: str, last_name: str, fsuid:", ") # Set user's extra credit courses course_util.set_courses(account, course_list) account.set_password(password)", "import session from app.models import Account from app.util import course", "session), otherwise redirects to login page. :email: Optional email string,", "str, password: str, first_name: str, last_name: str, fsuid: str, course_list:", "fsuid=fsuid, is_admin=False ) # Set user's extra credit courses course_util.set_courses(account,", "last_name: str, fsuid: str, course_list: list = []): \"\"\" Creates", "# Set user's extra credit courses course_util.set_courses(account, course_list) account.set_password(password) account.save()", "user's FSUID. :course_list: Optional, courses being taken by user :return:", "\"\"\" account = Account( email=email, first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False )", "user's first name. :last_name: Required, user's last name. :fsuid: Optional,", "redirects to login page. :email: Optional email string, if not", "str, fsuid: str, course_list: list = []): \"\"\" Creates an", "return account def get_account(email: str=None): \"\"\" Retrieves account via email", "import Account from app.util import course as course_util # Session", "= Account( email=email, first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False ) # Set", "str, first_name: str, last_name: str, fsuid: str, course_list: list =", "courses being taken by user :return: Account object. \"\"\" account", "first name. :last_name: Required, user's last name. :fsuid: Optional, user's", "to login page. :email: Optional email string, if not provided", "\"\"\" Retrieves account via email (defaults to using session), otherwise", "import course as course_util # Session keys SESSION_EMAIL = 'email'", "course_util # Session keys SESSION_EMAIL = 'email' def create_account(email: str,", "user. :password: Required, user's chosen password. :first_name: Required, user's first", "by user :return: Account object. \"\"\" account = Account( email=email,", "password: str, first_name: str, last_name: str, fsuid: str, course_list: list", "a single user. :email: Required, the email address of the", "[]): \"\"\" Creates an account for a single user. :email:", "module is based off of util.auth, except with the action", "\"\"\" util.auth2: Authentication tools This module is based off of", "as course_util # Session keys SESSION_EMAIL = 'email' def create_account(email:", "create_account(email: str, password: str, first_name: str, last_name: str, fsuid: str,", "app.util import course as course_util # Session keys SESSION_EMAIL =", "account = Account( email=email, first_name=first_name, last_name=last_name, fsuid=fsuid, is_admin=False ) #", "account for a single user. :email: Required, the email address", "tools This module is based off of util.auth, except with", "being taken by user :return: Account object. \"\"\" account =", "fsuid: str, course_list: list = []): \"\"\" Creates an account", "paradigm removed. \"\"\" from flask import session from app.models import", "str, last_name: str, fsuid: str, course_list: list = []): \"\"\"", "will use session['email'] :return: Account if email is present in", "name. :last_name: Required, user's last name. :fsuid: Optional, user's FSUID.", "is present in session, None otherwise. \"\"\" try: email =", "extra credit courses course_util.set_courses(account, course_list) account.set_password(password) account.save() return account def", "using session), otherwise redirects to login page. :email: Optional email", "email string, if not provided will use session['email'] :return: Account", "def get_account(email: str=None): \"\"\" Retrieves account via email (defaults to", "in session, None otherwise. \"\"\" try: email = email or", "from app.models import Account from app.util import course as course_util", "from app.util import course as course_util # Session keys SESSION_EMAIL", "single user. :email: Required, the email address of the user.", "str=None): \"\"\" Retrieves account via email (defaults to using session),", "present in session, None otherwise. \"\"\" try: email = email", "Authentication tools This module is based off of util.auth, except", "based off of util.auth, except with the action paradigm removed.", "Retrieves account via email (defaults to using session), otherwise redirects", "This module is based off of util.auth, except with the", "course_list: list = []): \"\"\" Creates an account for a", "the action paradigm removed. \"\"\" from flask import session from", "Account object. \"\"\" account = Account( email=email, first_name=first_name, last_name=last_name, fsuid=fsuid,", "try: email = email or session['email'] return Account.objects.get_or_404(email=email) except: return", "otherwise redirects to login page. :email: Optional email string, if", "login page. :email: Optional email string, if not provided will", "from flask import session from app.models import Account from app.util", "Optional email string, if not provided will use session['email'] :return:", "last name. :fsuid: Optional, user's FSUID. :course_list: Optional, courses being", "session, None otherwise. \"\"\" try: email = email or session['email']", "session['email'] :return: Account if email is present in session, None", "for a single user. :email: Required, the email address of", ":first_name: Required, user's first name. :last_name: Required, user's last name.", "removed. \"\"\" from flask import session from app.models import Account", "credit courses course_util.set_courses(account, course_list) account.set_password(password) account.save() return account def get_account(email:", "the user. :password: Required, user's chosen password. :first_name: Required, user's", "\"\"\" Creates an account for a single user. :email: Required,", ":course_list: Optional, courses being taken by user :return: Account object.", ":email: Optional email string, if not provided will use session['email']" ]
[ "parent=None): QWidget.__init__(self, parent) self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic)", "self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95) self.toolbar =", "= QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off')", "self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95) self.toolbar = NavigationToolbar(self.canvas_pstatic, self) self.toolbar.setFixedHeight(25) vertical_layout.addWidget(self.toolbar)", "import NavigationToolbar2QT as NavigationToolbar class PstaticWidget(QWidget): def __init__(self, parent=None): QWidget.__init__(self,", "= Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic)", "FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([])", "matplotlib.figure import Figure from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar class", "= self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95)", "NavigationToolbar2QT as NavigationToolbar class PstaticWidget(QWidget): def __init__(self, parent=None): QWidget.__init__(self, parent)", "QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12,", "from matplotlib.figure import Figure from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar", "= FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout)", "self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95) self.toolbar = NavigationToolbar(self.canvas_pstatic, self)", "FigureCanvas from matplotlib.figure import Figure from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as", "QWidget.__init__(self, parent) self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout", "def __init__(self, parent=None): QWidget.__init__(self, parent) self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic", "self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95) self.toolbar = NavigationToolbar(self.canvas_pstatic, self) self.toolbar.setFixedHeight(25)", "vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15,", "matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.figure import Figure from matplotlib.backends.backend_qt5agg import", "self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95) self.toolbar = NavigationToolbar(self.canvas_pstatic,", "from PyQt5.QtWidgets import * from matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.figure", "as NavigationToolbar class PstaticWidget(QWidget): def __init__(self, parent=None): QWidget.__init__(self, parent) self.fig_pstatic", "PstaticWidget(QWidget): def __init__(self, parent=None): QWidget.__init__(self, parent) self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff')", "self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95) self.toolbar", "from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar class PstaticWidget(QWidget): def __init__(self,", "* from matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.figure import Figure from", "PyQt5.QtWidgets import * from matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.figure import", "Figure from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar class PstaticWidget(QWidget): def", "self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout()", "Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic", "vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([])", "NavigationToolbar class PstaticWidget(QWidget): def __init__(self, parent=None): QWidget.__init__(self, parent) self.fig_pstatic =", "from matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.figure import Figure from matplotlib.backends.backend_qt5agg", "class PstaticWidget(QWidget): def __init__(self, parent=None): QWidget.__init__(self, parent) self.fig_pstatic = Figure()", "__init__(self, parent=None): QWidget.__init__(self, parent) self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic =", "import * from matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.figure import Figure", "matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar class PstaticWidget(QWidget): def __init__(self, parent=None):", "parent) self.fig_pstatic = Figure() self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout =", "self.fig_pstatic.set_facecolor('#ffffff') self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic =", "self.canvas_pstatic = FigureCanvas(self.fig_pstatic) vertical_layout = QVBoxLayout() vertical_layout.addWidget(self.canvas_pstatic) self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111)", "import FigureCanvas from matplotlib.figure import Figure from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT", "self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111) self.setLayout(vertical_layout) self.canvas_pstatic.axes_pstatic.set_xticks([]) self.canvas_pstatic.axes_pstatic.set_yticks([]) self.canvas_pstatic.axes_pstatic.axis('off') self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985,", "import Figure from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar class PstaticWidget(QWidget):" ]
[ "np from pymoo.core.algorithm import Algorithm from pymoo.core.population import Population from", "val in range(problem.xl[k], problem.xu[k] + 1): alloc.set(k, val) rec_exhaustively(problem, alloc,", "def _next(self): solutions = exhaustively(self.problem) self.pop = Population.new(X=np.array([x for x,", "from pymoo.util.termination.no_termination import NoTermination from pyallocation.allocation import FastAllocation from pyallocation.problem", "f = np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum() sols.append((x, cv, f))", "sols.sort(key=lambda x: (x[1], x[2])) return sols[:100] def rec_exhaustively(problem, alloc, k,", "from pymoo.core.algorithm import Algorithm from pymoo.core.population import Population from pymoo.util.termination.no_termination", "len(sols) > 1000: sols.sort(key=lambda x: (x[1], x[2])) while len(sols) >", "+ 1, sols) alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs):", "= [] rec_exhaustively(problem, alloc, k, sols) sols.sort(key=lambda x: (x[1], x[2]))", "def exhaustively(problem): alloc = FastAllocation(problem, debug=False) k = 0 sols", "(x[1], x[2])) return sols[:100] def rec_exhaustively(problem, alloc, k, sols): if", "> 100: sols.pop() else: for val in range(problem.xl[k], problem.xu[k] +", "import FastAllocation from pyallocation.problem import AllocationProblem def exhaustively(problem): alloc =", "pymoo.util.termination.no_termination import NoTermination from pyallocation.allocation import FastAllocation from pyallocation.problem import", "debug=False) k = 0 sols = [] rec_exhaustively(problem, alloc, k,", "problem.xu[k] + 1): alloc.set(k, val) rec_exhaustively(problem, alloc, k + 1,", "> 1000: sols.sort(key=lambda x: (x[1], x[2])) while len(sols) > 100:", "exhaustively(problem): alloc = FastAllocation(problem, debug=False) k = 0 sols =", "[] rec_exhaustively(problem, alloc, k, sols) sols.sort(key=lambda x: (x[1], x[2])) return", "= 0 sols = [] rec_exhaustively(problem, alloc, k, sols) sols.sort(key=lambda", "rec_exhaustively(problem, alloc, k, sols) sols.sort(key=lambda x: (x[1], x[2])) return sols[:100]", "cv, f)) if len(sols) > 1000: sols.sort(key=lambda x: (x[1], x[2]))", "setup(self, problem, **kwargs): super().setup(problem, **kwargs) assert isinstance(problem, AllocationProblem) return self", "pyallocation.problem import AllocationProblem def exhaustively(problem): alloc = FastAllocation(problem, debug=False) k", "alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs): super().__init__(**kwargs) self.default_termination =", "isinstance(problem, AllocationProblem) return self def _initialize(self): self._next() def _next(self): solutions", "alloc.CV, (alloc.F * problem.w).sum() sols.append((x, cv, f)) if len(sols) >", "= np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum() sols.append((x, cv, f)) if", "for x, _, _ in solutions])) self.evaluator.eval(self.problem, self.pop) for ind", "import AllocationProblem def exhaustively(problem): alloc = FastAllocation(problem, debug=False) k =", "return if k == problem.n_var: x, cv, f = np.copy(alloc.x),", "Population from pymoo.util.termination.no_termination import NoTermination from pyallocation.allocation import FastAllocation from", "np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum() sols.append((x, cv, f)) if len(sols)", "ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs): super().__init__(**kwargs) self.default_termination = NoTermination() def setup(self,", "sols[:100] def rec_exhaustively(problem, alloc, k, sols): if not alloc.feas: return", "self.default_termination = NoTermination() def setup(self, problem, **kwargs): super().setup(problem, **kwargs) assert", "val) rec_exhaustively(problem, alloc, k + 1, sols) alloc.set(k, -1) class", "in solutions])) self.evaluator.eval(self.problem, self.pop) for ind in self.pop: print(ind.F[0], ind.X)", "Population.new(X=np.array([x for x, _, _ in solutions])) self.evaluator.eval(self.problem, self.pop) for", "if len(sols) > 1000: sols.sort(key=lambda x: (x[1], x[2])) while len(sols)", "return sols[:100] def rec_exhaustively(problem, alloc, k, sols): if not alloc.feas:", "alloc, k, sols) sols.sort(key=lambda x: (x[1], x[2])) return sols[:100] def", "x: (x[1], x[2])) while len(sols) > 100: sols.pop() else: for", "from pyallocation.allocation import FastAllocation from pyallocation.problem import AllocationProblem def exhaustively(problem):", "if not alloc.feas: return if k == problem.n_var: x, cv,", "alloc.feas: return if k == problem.n_var: x, cv, f =", "from pymoo.core.population import Population from pymoo.util.termination.no_termination import NoTermination from pyallocation.allocation", "Algorithm from pymoo.core.population import Population from pymoo.util.termination.no_termination import NoTermination from", "def _initialize(self): self._next() def _next(self): solutions = exhaustively(self.problem) self.pop =", "sols.pop() else: for val in range(problem.xl[k], problem.xu[k] + 1): alloc.set(k,", "+ 1): alloc.set(k, val) rec_exhaustively(problem, alloc, k + 1, sols)", "1): alloc.set(k, val) rec_exhaustively(problem, alloc, k + 1, sols) alloc.set(k,", "(x[1], x[2])) while len(sols) > 100: sols.pop() else: for val", "import NoTermination from pyallocation.allocation import FastAllocation from pyallocation.problem import AllocationProblem", "k == problem.n_var: x, cv, f = np.copy(alloc.x), alloc.CV, (alloc.F", "== problem.n_var: x, cv, f = np.copy(alloc.x), alloc.CV, (alloc.F *", "* problem.w).sum() sols.append((x, cv, f)) if len(sols) > 1000: sols.sort(key=lambda", "x, _, _ in solutions])) self.evaluator.eval(self.problem, self.pop) for ind in", "len(sols) > 100: sols.pop() else: for val in range(problem.xl[k], problem.xu[k]", "x: (x[1], x[2])) return sols[:100] def rec_exhaustively(problem, alloc, k, sols):", "alloc, k, sols): if not alloc.feas: return if k ==", "self.evaluator.eval(self.problem, self.pop) for ind in self.pop: print(ind.F[0], ind.X) self.termination.force_termination =", "import numpy as np from pymoo.core.algorithm import Algorithm from pymoo.core.population", "solutions = exhaustively(self.problem) self.pop = Population.new(X=np.array([x for x, _, _", "<gh_stars>0 import numpy as np from pymoo.core.algorithm import Algorithm from", "x[2])) while len(sols) > 100: sols.pop() else: for val in", "**kwargs) assert isinstance(problem, AllocationProblem) return self def _initialize(self): self._next() def", "self def _initialize(self): self._next() def _next(self): solutions = exhaustively(self.problem) self.pop", "k + 1, sols) alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm): def __init__(self,", "pyallocation.allocation import FastAllocation from pyallocation.problem import AllocationProblem def exhaustively(problem): alloc", "AllocationProblem def exhaustively(problem): alloc = FastAllocation(problem, debug=False) k = 0", "pymoo.core.population import Population from pymoo.util.termination.no_termination import NoTermination from pyallocation.allocation import", "not alloc.feas: return if k == problem.n_var: x, cv, f", "class ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs): super().__init__(**kwargs) self.default_termination = NoTermination() def", "= FastAllocation(problem, debug=False) k = 0 sols = [] rec_exhaustively(problem,", "alloc = FastAllocation(problem, debug=False) k = 0 sols = []", "f)) if len(sols) > 1000: sols.sort(key=lambda x: (x[1], x[2])) while", "return self def _initialize(self): self._next() def _next(self): solutions = exhaustively(self.problem)", "0 sols = [] rec_exhaustively(problem, alloc, k, sols) sols.sort(key=lambda x:", "alloc.set(k, val) rec_exhaustively(problem, alloc, k + 1, sols) alloc.set(k, -1)", "alloc, k + 1, sols) alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm): def", "cv, f = np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum() sols.append((x, cv,", "**kwargs): super().setup(problem, **kwargs) assert isinstance(problem, AllocationProblem) return self def _initialize(self):", "super().setup(problem, **kwargs) assert isinstance(problem, AllocationProblem) return self def _initialize(self): self._next()", "= exhaustively(self.problem) self.pop = Population.new(X=np.array([x for x, _, _ in", "x, cv, f = np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum() sols.append((x,", "x[2])) return sols[:100] def rec_exhaustively(problem, alloc, k, sols): if not", "in range(problem.xl[k], problem.xu[k] + 1): alloc.set(k, val) rec_exhaustively(problem, alloc, k", "1, sols) alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs): super().__init__(**kwargs)", "sols) alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs): super().__init__(**kwargs) self.default_termination", "problem.w).sum() sols.append((x, cv, f)) if len(sols) > 1000: sols.sort(key=lambda x:", "_next(self): solutions = exhaustively(self.problem) self.pop = Population.new(X=np.array([x for x, _,", "NoTermination from pyallocation.allocation import FastAllocation from pyallocation.problem import AllocationProblem def", "(alloc.F * problem.w).sum() sols.append((x, cv, f)) if len(sols) > 1000:", "super().__init__(**kwargs) self.default_termination = NoTermination() def setup(self, problem, **kwargs): super().setup(problem, **kwargs)", "_initialize(self): self._next() def _next(self): solutions = exhaustively(self.problem) self.pop = Population.new(X=np.array([x", "FastAllocation from pyallocation.problem import AllocationProblem def exhaustively(problem): alloc = FastAllocation(problem,", "sols.append((x, cv, f)) if len(sols) > 1000: sols.sort(key=lambda x: (x[1],", "self.pop) for ind in self.pop: print(ind.F[0], ind.X) self.termination.force_termination = True", "def __init__(self, **kwargs): super().__init__(**kwargs) self.default_termination = NoTermination() def setup(self, problem,", "1000: sols.sort(key=lambda x: (x[1], x[2])) while len(sols) > 100: sols.pop()", "k = 0 sols = [] rec_exhaustively(problem, alloc, k, sols)", "def setup(self, problem, **kwargs): super().setup(problem, **kwargs) assert isinstance(problem, AllocationProblem) return", "else: for val in range(problem.xl[k], problem.xu[k] + 1): alloc.set(k, val)", "for val in range(problem.xl[k], problem.xu[k] + 1): alloc.set(k, val) rec_exhaustively(problem,", "rec_exhaustively(problem, alloc, k, sols): if not alloc.feas: return if k", "if k == problem.n_var: x, cv, f = np.copy(alloc.x), alloc.CV,", "numpy as np from pymoo.core.algorithm import Algorithm from pymoo.core.population import", "**kwargs): super().__init__(**kwargs) self.default_termination = NoTermination() def setup(self, problem, **kwargs): super().setup(problem,", "assert isinstance(problem, AllocationProblem) return self def _initialize(self): self._next() def _next(self):", "_ in solutions])) self.evaluator.eval(self.problem, self.pop) for ind in self.pop: print(ind.F[0],", "import Population from pymoo.util.termination.no_termination import NoTermination from pyallocation.allocation import FastAllocation", "import Algorithm from pymoo.core.population import Population from pymoo.util.termination.no_termination import NoTermination", "while len(sols) > 100: sols.pop() else: for val in range(problem.xl[k],", "as np from pymoo.core.algorithm import Algorithm from pymoo.core.population import Population", "exhaustively(self.problem) self.pop = Population.new(X=np.array([x for x, _, _ in solutions]))", "FastAllocation(problem, debug=False) k = 0 sols = [] rec_exhaustively(problem, alloc,", "from pyallocation.problem import AllocationProblem def exhaustively(problem): alloc = FastAllocation(problem, debug=False)", "NoTermination() def setup(self, problem, **kwargs): super().setup(problem, **kwargs) assert isinstance(problem, AllocationProblem)", "k, sols): if not alloc.feas: return if k == problem.n_var:", "self._next() def _next(self): solutions = exhaustively(self.problem) self.pop = Population.new(X=np.array([x for", "sols): if not alloc.feas: return if k == problem.n_var: x,", "= NoTermination() def setup(self, problem, **kwargs): super().setup(problem, **kwargs) assert isinstance(problem,", "_, _ in solutions])) self.evaluator.eval(self.problem, self.pop) for ind in self.pop:", "rec_exhaustively(problem, alloc, k + 1, sols) alloc.set(k, -1) class ExhaustiveAlgorithm(Algorithm):", "AllocationProblem) return self def _initialize(self): self._next() def _next(self): solutions =", "range(problem.xl[k], problem.xu[k] + 1): alloc.set(k, val) rec_exhaustively(problem, alloc, k +", "-1) class ExhaustiveAlgorithm(Algorithm): def __init__(self, **kwargs): super().__init__(**kwargs) self.default_termination = NoTermination()", "k, sols) sols.sort(key=lambda x: (x[1], x[2])) return sols[:100] def rec_exhaustively(problem,", "problem.n_var: x, cv, f = np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum()", "sols = [] rec_exhaustively(problem, alloc, k, sols) sols.sort(key=lambda x: (x[1],", "100: sols.pop() else: for val in range(problem.xl[k], problem.xu[k] + 1):", "__init__(self, **kwargs): super().__init__(**kwargs) self.default_termination = NoTermination() def setup(self, problem, **kwargs):", "pymoo.core.algorithm import Algorithm from pymoo.core.population import Population from pymoo.util.termination.no_termination import", "sols) sols.sort(key=lambda x: (x[1], x[2])) return sols[:100] def rec_exhaustively(problem, alloc,", "self.pop = Population.new(X=np.array([x for x, _, _ in solutions])) self.evaluator.eval(self.problem,", "sols.sort(key=lambda x: (x[1], x[2])) while len(sols) > 100: sols.pop() else:", "problem, **kwargs): super().setup(problem, **kwargs) assert isinstance(problem, AllocationProblem) return self def", "= Population.new(X=np.array([x for x, _, _ in solutions])) self.evaluator.eval(self.problem, self.pop)", "solutions])) self.evaluator.eval(self.problem, self.pop) for ind in self.pop: print(ind.F[0], ind.X) self.termination.force_termination", "def rec_exhaustively(problem, alloc, k, sols): if not alloc.feas: return if" ]
[ "False SQLALCHEMY_ECHO = True class Development(Config): ENV = 'development' DEBUG", "True class Development(Config): ENV = 'development' DEBUG = True TESTING", "class Config: CSRF_ENABLED = True SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI =", "True SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False", "= True class Development(Config): ENV = 'development' DEBUG = True", "= 'development' DEBUG = True TESTING = False class Production(Config):", "TESTING = False class Production(Config): ENV = 'production' DEBUG =", "False class Production(Config): ENV = 'production' DEBUG = False SQLALCHEMY_DATABASE_URI", "ENV = 'production' DEBUG = False SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da')", "SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = True class", "DEBUG = True TESTING = False class Production(Config): ENV =", "class Production(Config): ENV = 'production' DEBUG = False SQLALCHEMY_DATABASE_URI =", "'development' DEBUG = True TESTING = False class Production(Config): ENV", "SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO", "class Development(Config): ENV = 'development' DEBUG = True TESTING =", "= True SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS =", "= 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO =", "= False class Production(Config): ENV = 'production' DEBUG = False", "ENV = 'development' DEBUG = True TESTING = False class", "= False SQLALCHEMY_ECHO = True class Development(Config): ENV = 'development'", "'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = True class Development(Config): ENV", "= True TESTING = False class Production(Config): ENV = 'production'", "SQLALCHEMY_ECHO = True class Development(Config): ENV = 'development' DEBUG =", "'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = True", "True TESTING = False class Production(Config): ENV = 'production' DEBUG", "os class Config: CSRF_ENABLED = True SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI", "= 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = True class Development(Config):", "Development(Config): ENV = 'development' DEBUG = True TESTING = False", "SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = True class Development(Config): ENV =", "import os class Config: CSRF_ENABLED = True SECRET_KEY = 'your-very-very-secret-key'", "Config: CSRF_ENABLED = True SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev'", "Production(Config): ENV = 'production' DEBUG = False SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL',", "CSRF_ENABLED = True SECRET_KEY = 'your-very-very-secret-key' SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev' SQLALCHEMY_TRACK_MODIFICATIONS" ]
[ "if key == rpc_api.STACK_ID: yield ('id', value['stack_id']) yield ('links', [util.make_link(req,", "return dict(itertools.chain.from_iterable( transform(k, v) for k, v in stack.items())) def", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "stacks] result = {'stacks': formatted_stacks} links = views_common.get_collection_links(req, formatted_stacks) if", "may obtain # a copy of the License at #", "for k, v in stack.items())) def collection(req, stacks, count=None, include_project=False):", "# # Licensed under the Apache License, Version 2.0 (the", "split of state into action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else:", "agreed to in writing, software # distributed under the License", "util from heat.api.openstack.v1.views import views_common from heat.rpc import api as", "from heat.rpc import api as rpc_api _collection_name = 'stacks' basic_keys", "Unless required by applicable law or agreed to in writing,", "join RES_ACTION # and RES_STATUS, so the API format doesn't", "api as rpc_api _collection_name = 'stacks' basic_keys = ( rpc_api.STACK_ID,", "(key, value) return dict(itertools.chain.from_iterable( transform(k, v) for k, v in", "under the License. import itertools from heat.api.openstack.v1 import util from", "distributed under the License is distributed on an \"AS IS\"", "value): if keys and key not in keys: return if", "dict(itertools.chain.from_iterable( transform(k, v) for k, v in stack.items())) def collection(req,", "= links if count is not None: result['count'] = count", "# return key, json.dumps(value) yield (key, value) return dict(itertools.chain.from_iterable( transform(k,", "rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def", "TODO(zaneb): ensure parameters can be formatted for XML # elif", "License, Version 2.0 (the \"License\"); you may # not use", "CONDITIONS OF ANY KIND, either express or implied. See the", ") def format_stack(req, stack, keys=None, include_project=False): def transform(key, value): if", "stack): # To avoid breaking API compatibility, we join RES_ACTION", "RES_STATUS, so the API format doesn't expose the # internal", "== rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in stack): # To avoid breaking", "rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS,", "keys=None, include_project=False): def transform(key, value): if keys and key not", "obtain # a copy of the License at # #", "import views_common from heat.rpc import api as rpc_api _collection_name =", "and key not in keys: return if key == rpc_api.STACK_ID:", "applicable law or agreed to in writing, software # distributed", "the # internal split of state into action/status yield (key,", "collection(req, stacks, count=None, include_project=False): keys = basic_keys formatted_stacks = [format_stack(req,", "{'stacks': formatted_stacks} links = views_common.get_collection_links(req, formatted_stacks) if links: result['links'] =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "import itertools from heat.api.openstack.v1 import util from heat.api.openstack.v1.views import views_common", "Version 2.0 (the \"License\"); you may # not use this", "== rpc_api.STACK_ACTION: return elif (key == rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in", "specific language governing permissions and limitations # under the License.", "To avoid breaking API compatibility, we join RES_ACTION # and", "yield ('links', [util.make_link(req, value)]) if include_project: yield ('project', value['tenant']) elif", "if include_project: yield ('project', value['tenant']) elif key == rpc_api.STACK_ACTION: return", "'stacks' basic_keys = ( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME,", "# not use this file except in compliance with the", "not use this file except in compliance with the License.", "OF ANY KIND, either express or implied. See the #", "rpc_api.STACK_ID: yield ('id', value['stack_id']) yield ('links', [util.make_link(req, value)]) if include_project:", "include_project=False): keys = basic_keys formatted_stacks = [format_stack(req, s, keys, include_project)", "if links: result['links'] = links if count is not None:", "rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, )", "value['stack_id']) yield ('links', [util.make_link(req, value)]) if include_project: yield ('project', value['tenant'])", "writing, software # distributed under the License is distributed on", "the API format doesn't expose the # internal split of", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in writing, software # distributed under the License is distributed", "('project', value['tenant']) elif key == rpc_api.STACK_ACTION: return elif (key ==", "value))) else: # TODO(zaneb): ensure parameters can be formatted for", "count=None, include_project=False): keys = basic_keys formatted_stacks = [format_stack(req, s, keys,", "in compliance with the License. You may obtain # a", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "License for the specific language governing permissions and limitations #", "return elif (key == rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in stack): #", "the License. You may obtain # a copy of the", "yield (key, value) return dict(itertools.chain.from_iterable( transform(k, v) for k, v", "so the API format doesn't expose the # internal split", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "use this file except in compliance with the License. You", "(key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else: # TODO(zaneb): ensure parameters can be", "You may obtain # a copy of the License at", "basic_keys formatted_stacks = [format_stack(req, s, keys, include_project) for s in", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "in stacks] result = {'stacks': formatted_stacks} links = views_common.get_collection_links(req, formatted_stacks)", "result = {'stacks': formatted_stacks} links = views_common.get_collection_links(req, formatted_stacks) if links:", "def format_stack(req, stack, keys=None, include_project=False): def transform(key, value): if keys", "rpc_api.STACK_TAGS, ) def format_stack(req, stack, keys=None, include_project=False): def transform(key, value):", "elif key == rpc_api.STACK_PARAMETERS: # return key, json.dumps(value) yield (key,", "(key == rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in stack): # To avoid", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "basic_keys = ( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME,", "yield ('project', value['tenant']) elif key == rpc_api.STACK_ACTION: return elif (key", "action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else: # TODO(zaneb): ensure parameters", "yield ('id', value['stack_id']) yield ('links', [util.make_link(req, value)]) if include_project: yield", "s in stacks] result = {'stacks': formatted_stacks} links = views_common.get_collection_links(req,", "and RES_STATUS, so the API format doesn't expose the #", "rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT,", "stacks, count=None, include_project=False): keys = basic_keys formatted_stacks = [format_stack(req, s,", "either express or implied. See the # License for the", "key, json.dumps(value) yield (key, value) return dict(itertools.chain.from_iterable( transform(k, v) for", "s, keys, include_project) for s in stacks] result = {'stacks':", "breaking API compatibility, we join RES_ACTION # and RES_STATUS, so", "views_common from heat.rpc import api as rpc_api _collection_name = 'stacks'", "under the License is distributed on an \"AS IS\" BASIS,", "state into action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else: # TODO(zaneb):", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "API compatibility, we join RES_ACTION # and RES_STATUS, so the", "may # not use this file except in compliance with", "elif (key == rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in stack): # To", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "import api as rpc_api _collection_name = 'stacks' basic_keys = (", "License. import itertools from heat.api.openstack.v1 import util from heat.api.openstack.v1.views import", "return if key == rpc_api.STACK_ID: yield ('id', value['stack_id']) yield ('links',", "= {'stacks': formatted_stacks} links = views_common.get_collection_links(req, formatted_stacks) if links: result['links']", "== rpc_api.STACK_PARAMETERS: # return key, json.dumps(value) yield (key, value) return", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "with the License. You may obtain # a copy of", "KIND, either express or implied. See the # License for", "# License for the specific language governing permissions and limitations", "= 'stacks' basic_keys = ( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA,", "rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def format_stack(req, stack, keys=None, include_project=False):", "json.dumps(value) yield (key, value) return dict(itertools.chain.from_iterable( transform(k, v) for k,", "you may # not use this file except in compliance", "from heat.api.openstack.v1 import util from heat.api.openstack.v1.views import views_common from heat.rpc", "\"License\"); you may # not use this file except in", "keys: return if key == rpc_api.STACK_ID: yield ('id', value['stack_id']) yield", "# under the License. import itertools from heat.api.openstack.v1 import util", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "formatted for XML # elif key == rpc_api.STACK_PARAMETERS: # return", "stack.items())) def collection(req, stacks, count=None, include_project=False): keys = basic_keys formatted_stacks", "== rpc_api.STACK_ID: yield ('id', value['stack_id']) yield ('links', [util.make_link(req, value)]) if", "= [format_stack(req, s, keys, include_project) for s in stacks] result", "express or implied. See the # License for the specific", "format_stack(req, stack, keys=None, include_project=False): def transform(key, value): if keys and", "this file except in compliance with the License. You may", "return key, json.dumps(value) yield (key, value) return dict(itertools.chain.from_iterable( transform(k, v)", "compliance with the License. You may obtain # a copy", "<reponame>noironetworks/heat # # Licensed under the Apache License, Version 2.0", "the Apache License, Version 2.0 (the \"License\"); you may #", "def collection(req, stacks, count=None, include_project=False): keys = basic_keys formatted_stacks =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "rpc_api _collection_name = 'stacks' basic_keys = ( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION,", "can be formatted for XML # elif key == rpc_api.STACK_PARAMETERS:", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "the License. import itertools from heat.api.openstack.v1 import util from heat.api.openstack.v1.views", "key == rpc_api.STACK_ID: yield ('id', value['stack_id']) yield ('links', [util.make_link(req, value)])", "# and RES_STATUS, so the API format doesn't expose the", "See the # License for the specific language governing permissions", "# TODO(zaneb): ensure parameters can be formatted for XML #", "ensure parameters can be formatted for XML # elif key", "v in stack.items())) def collection(req, stacks, count=None, include_project=False): keys =", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "links if count is not None: result['count'] = count return", "parameters can be formatted for XML # elif key ==", "# internal split of state into action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION],", "rpc_api.STACK_ACTION: return elif (key == rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in stack):", "# To avoid breaking API compatibility, we join RES_ACTION #", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "v) for k, v in stack.items())) def collection(req, stacks, count=None,", "rpc_api.STACK_PARAMETERS: # return key, json.dumps(value) yield (key, value) return dict(itertools.chain.from_iterable(", "the # License for the specific language governing permissions and", "def transform(key, value): if keys and key not in keys:", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "# # Unless required by applicable law or agreed to", "include_project) for s in stacks] result = {'stacks': formatted_stacks} links", "rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def format_stack(req, stack, keys=None, include_project=False): def transform(key,", "if keys and key not in keys: return if key", "of state into action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else: #", "governing permissions and limitations # under the License. import itertools", "in stack): # To avoid breaking API compatibility, we join", "key == rpc_api.STACK_ACTION: return elif (key == rpc_api.STACK_STATUS and rpc_api.STACK_ACTION", "into action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else: # TODO(zaneb): ensure", "'_'.join((stack[rpc_api.STACK_ACTION], value))) else: # TODO(zaneb): ensure parameters can be formatted", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "transform(k, v) for k, v in stack.items())) def collection(req, stacks,", "heat.api.openstack.v1 import util from heat.api.openstack.v1.views import views_common from heat.rpc import", "[format_stack(req, s, keys, include_project) for s in stacks] result =", "rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def format_stack(req, stack, keys=None, include_project=False): def", "file except in compliance with the License. You may obtain", "internal split of state into action/status yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value)))", "avoid breaking API compatibility, we join RES_ACTION # and RES_STATUS,", "= ( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME,", "for the specific language governing permissions and limitations # under", "law or agreed to in writing, software # distributed under", "as rpc_api _collection_name = 'stacks' basic_keys = ( rpc_api.STACK_ID, rpc_api.STACK_NAME,", "value) return dict(itertools.chain.from_iterable( transform(k, v) for k, v in stack.items()))", "views_common.get_collection_links(req, formatted_stacks) if links: result['links'] = links if count is", "OR CONDITIONS OF ANY KIND, either express or implied. See", "the specific language governing permissions and limitations # under the", "formatted_stacks} links = views_common.get_collection_links(req, formatted_stacks) if links: result['links'] = links", "format doesn't expose the # internal split of state into", "keys and key not in keys: return if key ==", "in stack.items())) def collection(req, stacks, count=None, include_project=False): keys = basic_keys", "keys, include_project) for s in stacks] result = {'stacks': formatted_stacks}", "rpc_api.STACK_STATUS and rpc_api.STACK_ACTION in stack): # To avoid breaking API", "if count is not None: result['count'] = count return result", "under the Apache License, Version 2.0 (the \"License\"); you may", "expose the # internal split of state into action/status yield", "except in compliance with the License. You may obtain #", "2.0 (the \"License\"); you may # not use this file", "import util from heat.api.openstack.v1.views import views_common from heat.rpc import api", "implied. See the # License for the specific language governing", "else: # TODO(zaneb): ensure parameters can be formatted for XML", "permissions and limitations # under the License. import itertools from", "XML # elif key == rpc_api.STACK_PARAMETERS: # return key, json.dumps(value)", "formatted_stacks = [format_stack(req, s, keys, include_project) for s in stacks]", "include_project=False): def transform(key, value): if keys and key not in", "formatted_stacks) if links: result['links'] = links if count is not", "keys = basic_keys formatted_stacks = [format_stack(req, s, keys, include_project) for", "k, v in stack.items())) def collection(req, stacks, count=None, include_project=False): keys", "('id', value['stack_id']) yield ('links', [util.make_link(req, value)]) if include_project: yield ('project',", "language governing permissions and limitations # under the License. import", "elif key == rpc_api.STACK_ACTION: return elif (key == rpc_api.STACK_STATUS and", "License. You may obtain # a copy of the License", "('links', [util.make_link(req, value)]) if include_project: yield ('project', value['tenant']) elif key", "from heat.api.openstack.v1.views import views_common from heat.rpc import api as rpc_api", "and rpc_api.STACK_ACTION in stack): # To avoid breaking API compatibility,", "rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def format_stack(req,", "= views_common.get_collection_links(req, formatted_stacks) if links: result['links'] = links if count", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "ANY KIND, either express or implied. See the # License", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "heat.api.openstack.v1.views import views_common from heat.rpc import api as rpc_api _collection_name", "heat.rpc import api as rpc_api _collection_name = 'stacks' basic_keys =", "yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value))) else: # TODO(zaneb): ensure parameters can", "# Unless required by applicable law or agreed to in", "_collection_name = 'stacks' basic_keys = ( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS,", "links = views_common.get_collection_links(req, formatted_stacks) if links: result['links'] = links if", "links: result['links'] = links if count is not None: result['count']", "= basic_keys formatted_stacks = [format_stack(req, s, keys, include_project) for s", "( rpc_api.STACK_ID, rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "value)]) if include_project: yield ('project', value['tenant']) elif key == rpc_api.STACK_ACTION:", "rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def format_stack(req, stack, keys=None,", "# elif key == rpc_api.STACK_PARAMETERS: # return key, json.dumps(value) yield", "to in writing, software # distributed under the License is", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "stack, keys=None, include_project=False): def transform(key, value): if keys and key", "value['tenant']) elif key == rpc_api.STACK_ACTION: return elif (key == rpc_api.STACK_STATUS", "compatibility, we join RES_ACTION # and RES_STATUS, so the API", "for s in stacks] result = {'stacks': formatted_stacks} links =", "for XML # elif key == rpc_api.STACK_PARAMETERS: # return key,", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "result['links'] = links if count is not None: result['count'] =", "not in keys: return if key == rpc_api.STACK_ID: yield ('id',", "or agreed to in writing, software # distributed under the", "[util.make_link(req, value)]) if include_project: yield ('project', value['tenant']) elif key ==", "rpc_api.STACK_NAME, rpc_api.STACK_DESCRIPTION, rpc_api.STACK_STATUS, rpc_api.STACK_STATUS_DATA, rpc_api.STACK_CREATION_TIME, rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID,", "required by applicable law or agreed to in writing, software", "and limitations # under the License. import itertools from heat.api.openstack.v1", "be formatted for XML # elif key == rpc_api.STACK_PARAMETERS: #", "doesn't expose the # internal split of state into action/status", "transform(key, value): if keys and key not in keys: return", "API format doesn't expose the # internal split of state", "rpc_api.STACK_DELETION_TIME, rpc_api.STACK_UPDATED_TIME, rpc_api.STACK_OWNER, rpc_api.STACK_PARENT, rpc_api.STACK_USER_PROJECT_ID, rpc_api.STACK_TAGS, ) def format_stack(req, stack,", "include_project: yield ('project', value['tenant']) elif key == rpc_api.STACK_ACTION: return elif", "rpc_api.STACK_ACTION in stack): # To avoid breaking API compatibility, we", "RES_ACTION # and RES_STATUS, so the API format doesn't expose", "key not in keys: return if key == rpc_api.STACK_ID: yield", "key == rpc_api.STACK_PARAMETERS: # return key, json.dumps(value) yield (key, value)", "in keys: return if key == rpc_api.STACK_ID: yield ('id', value['stack_id'])", "itertools from heat.api.openstack.v1 import util from heat.api.openstack.v1.views import views_common from", "limitations # under the License. import itertools from heat.api.openstack.v1 import", "or implied. See the # License for the specific language", "Apache License, Version 2.0 (the \"License\"); you may # not", "we join RES_ACTION # and RES_STATUS, so the API format" ]
[ "= intersect(queries) intersections[combo] = q_intersect - assigned_formula #remove any elements", "#remove any elements from q_intersect that have already been assigned", "for function pyKrev.find_intersections ==================== This function compares n lists of", "whether you want the intersections to contain only unique values.", "given in samples. Where samples = [set_1,set_2,...,set_n] \"\"\" if len(samples)", "exclusive: True or False, depending on whether you want the", "formula and outputs a dictionary containing the intersections between each", "This command uses recursion to find the intersections between a", "n lists of molecular formula. Each item in the sub", "the query pass else: queries[0] = queries[0] - set_f #delete", "= lambda c : len(c),reverse = True) # sort combinations", "1: if exclusive == True: q_intersect = intersect(queries) intersections[combo] =", "a dictionary containing the intersections between each list. Use ----", "elements from q_intersect that have already been assigned assigned_formula.update(q_intersect) #update", "length if exclusive == True: assigned_formula = set() #create a", "combinations: queries = [] for c in combo: formula =", "assigned assigned_formula.update(q_intersect) #update the assigned_set with q_intersect else: intersections[combo] =", "formula_lists).T amb.columns = group_labels intersections = dict() for combo in", "sorted(combinations,key = lambda c : len(c),reverse = True) # sort", "and the corresponding value is a set containing the intersections", "list(filter(None,amb[c])) #Remove None entries introduced by dataframe queries.append(set(formula)) if len(queries)", "find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary in which each key corresponds to", "set, so it will not be mutated by changes to", "set_f == q_set: # ignore the set that corresponds to", "combinations = [seq for i in range(0,len(group_labels)+1) for seq in", "already been assigned assigned_formula.update(q_intersect) #update the assigned_set with q_intersect else:", "if set_f == q_set: # ignore the set that corresponds", "0] combinations = sorted(combinations,key = lambda c : len(c),reverse =", "in the sub list should be a formula string. group_labels:", "recursion to find the intersections between a variable number of", "for i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq)", "= True) # sort combinations by length if exclusive ==", "the intersections between each list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a", "whether the recursion has reached the final element return a", "containing n strings of corresponding group labels. exclusive: True or", "in formula_lists set_f = frozenset(f_list) #convert f_list to sets, must", "else: queries[0] = queries[0] - set_f #delete any repeated elements", "length') combinations = [seq for i in range(0,len(group_labels)+1) for seq", "samples. Where samples = [set_1,set_2,...,set_n] \"\"\" if len(samples) == 1:", "queries.append(set(formula)) if len(queries) == 1: #if there is only one", "intersections = dict() for combo in combinations: queries = []", "to queries[0] for f_list in formula_lists: #cycle all formula in", "the unique elements in it q_set = frozenset(queries[0]) #qset is", "exclusive == True: assigned_formula = set() #create a set that", "in fset intersections[combo] = queries[0] elif len(queries) > 1: if", "outputs a dictionary containing the intersections between each list. Use", "query find the unique elements in it q_set = frozenset(queries[0])", "formula = list(filter(None,amb[c])) #Remove None entries introduced by dataframe queries.append(set(formula))", "that have already been assigned assigned_formula.update(q_intersect) #update the assigned_set with", "value is a set containing the intersections between the groups", "#update the assigned_set with q_intersect else: intersections[combo] = intersect(queries) return", "a dictionary in which each key corresponds to a combination", "= samples[counter] b = samples[counter+1::] if len(b) == 1: #check", "a formula string. group_labels: a list containing n strings of", "from q_intersect that have already been assigned assigned_formula.update(q_intersect) #update the", "= [set_1,set_2,...,set_n] \"\"\" if len(samples) == 1: return samples[0] a", "of equal length') combinations = [seq for i in range(0,len(group_labels)+1)", "i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq) >", "#Remove None entries introduced by dataframe queries.append(set(formula)) if len(queries) ==", "import itertools import numpy as np import pandas as pd", "This function compares n lists of molecular formula and outputs", "by length if exclusive == True: assigned_formula = set() #create", "if len(b) == 1: #check to see whether the recursion", "corresponding group labels. exclusive: True or False, depending on whether", "the sub list should be a formula string. group_labels: a", "\"\"\" if len(formula_lists) != len(group_labels): raise InputError('formula_lists and group_labels must", "combinations by length if exclusive == True: assigned_formula = set()", "= pd.DataFrame(data = formula_lists).T amb.columns = group_labels intersections = dict()", "in that combination. Parameters ---------- formula_lists: a list containing n", "= q_intersect - assigned_formula #remove any elements from q_intersect that", "final element return a & b[0] else: counter += 1", "that combination. Parameters ---------- formula_lists: a list containing n lists", "#if there is only one query find the unique elements", "f_list to sets, must be frozen so type matches q_set", "if exclusive == True: assigned_formula = set() #create a set", "sets, must be frozen so type matches q_set if set_f", "a & b[0] else: counter += 1 return a &", "formula_lists: #cycle all formula in formula_lists set_f = frozenset(f_list) #convert", "should be a formula string. group_labels: a list containing n", "equal length') combinations = [seq for i in range(0,len(group_labels)+1) for", "samples[0] a = samples[counter] b = samples[counter+1::] if len(b) ==", "formula. Each item in the sub list should be a", "True: assigned_formula = set() #create a set that will hold", "n strings of corresponding group labels. exclusive: True or False,", "True): \"\"\" Docstring for function pyKrev.find_intersections ==================== This function compares", "see whether the recursion has reached the final element return", "= samples[counter+1::] if len(b) == 1: #check to see whether", "a list containing n lists of molecular formula. Each item", "the set that corresponds to the query pass else: queries[0]", "= [seq for i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i)", "combinations = sorted(combinations,key = lambda c : len(c),reverse = True)", "molecular formula and outputs a dictionary containing the intersections between", "import numpy as np import pandas as pd def find_intersections(formula_lists,group_labels,exclusive", "between a variable number of sets given in samples. Where", "to a combination of group labels and the corresponding value", "None entries introduced by dataframe queries.append(set(formula)) if len(queries) == 1:", "to the query pass else: queries[0] = queries[0] - set_f", "in itertools.combinations(group_labels,i) if len(seq) > 0] combinations = sorted(combinations,key =", "lists of molecular formula. Each item in the sub list", "Docstring for function pyKrev.find_intersections ==================== This function compares n lists", "only unique values. \"\"\" if len(formula_lists) != len(group_labels): raise InputError('formula_lists", "ignore the set that corresponds to the query pass else:", "variable number of sets given in samples. Where samples =", ": len(c),reverse = True) # sort combinations by length if", "intersections[combo] = q_intersect - assigned_formula #remove any elements from q_intersect", "reached the final element return a & b[0] else: counter", "corresponds to the query pass else: queries[0] = queries[0] -", "intersections between the groups in that combination. Parameters ---------- formula_lists:", "string. group_labels: a list containing n strings of corresponding group", "list should be a formula string. group_labels: a list containing", "for combo in combinations: queries = [] for c in", "\"\"\" This command uses recursion to find the intersections between", "entries introduced by dataframe queries.append(set(formula)) if len(queries) == 1: #if", "combination. Parameters ---------- formula_lists: a list containing n lists of", "and outputs a dictionary containing the intersections between each list.", "in formula_lists: #cycle all formula in formula_lists set_f = frozenset(f_list)", "intersect(queries) return intersections def intersect(samples,counter=0): \"\"\" This command uses recursion", "frozenset(queries[0]) #qset is a frozen set, so it will not", "q_set = frozenset(queries[0]) #qset is a frozen set, so it", "queries[0] - set_f #delete any repeated elements in fset intersections[combo]", "only one query find the unique elements in it q_set", "find_intersections(formula_lists,group_labels,exclusive = True): \"\"\" Docstring for function pyKrev.find_intersections ==================== This", "lambda c : len(c),reverse = True) # sort combinations by", "---------- formula_lists: a list containing n lists of molecular formula.", "have already been assigned assigned_formula.update(q_intersect) #update the assigned_set with q_intersect", "intersections to contain only unique values. \"\"\" if len(formula_lists) !=", "a set that will hold all the formula already assigned", "& b[0] else: counter += 1 return a & intersect(samples,counter)", "intersections[combo] = queries[0] elif len(queries) > 1: if exclusive ==", "mutated by changes to queries[0] for f_list in formula_lists: #cycle", "find the intersections between a variable number of sets given", "def intersect(samples,counter=0): \"\"\" This command uses recursion to find the", "must be frozen so type matches q_set if set_f ==", "so it will not be mutated by changes to queries[0]", "elif len(queries) > 1: if exclusive == True: q_intersect =", "be mutated by changes to queries[0] for f_list in formula_lists:", "as pd def find_intersections(formula_lists,group_labels,exclusive = True): \"\"\" Docstring for function", "that will hold all the formula already assigned to a", "a group amb = pd.DataFrame(data = formula_lists).T amb.columns = group_labels", "the intersections between the groups in that combination. Parameters ----------", "len(c),reverse = True) # sort combinations by length if exclusive", "c in combo: formula = list(filter(None,amb[c])) #Remove None entries introduced", "formula string. group_labels: a list containing n strings of corresponding", "frozen set, so it will not be mutated by changes", "to see whether the recursion has reached the final element", "molecular formula. Each item in the sub list should be", "combo: formula = list(filter(None,amb[c])) #Remove None entries introduced by dataframe", "#check to see whether the recursion has reached the final", "q_intersect else: intersections[combo] = intersect(queries) return intersections def intersect(samples,counter=0): \"\"\"", "groups in that combination. Parameters ---------- formula_lists: a list containing", "for c in combo: formula = list(filter(None,amb[c])) #Remove None entries", "itertools import numpy as np import pandas as pd def", "each list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary in which", "Each item in the sub list should be a formula", "group_labels intersections = dict() for combo in combinations: queries =", "set that corresponds to the query pass else: queries[0] =", "which each key corresponds to a combination of group labels", "contain only unique values. \"\"\" if len(formula_lists) != len(group_labels): raise", "> 1: if exclusive == True: q_intersect = intersect(queries) intersections[combo]", "1: #if there is only one query find the unique", "to contain only unique values. \"\"\" if len(formula_lists) != len(group_labels):", "seq in itertools.combinations(group_labels,i) if len(seq) > 0] combinations = sorted(combinations,key", "amb = pd.DataFrame(data = formula_lists).T amb.columns = group_labels intersections =", "exclusive == True: q_intersect = intersect(queries) intersections[combo] = q_intersect -", "recursion has reached the final element return a & b[0]", "changes to queries[0] for f_list in formula_lists: #cycle all formula", "formula_lists set_f = frozenset(f_list) #convert f_list to sets, must be", "elements in it q_set = frozenset(queries[0]) #qset is a frozen", "[] for c in combo: formula = list(filter(None,amb[c])) #Remove None", "in combo: formula = list(filter(None,amb[c])) #Remove None entries introduced by", "amb.columns = group_labels intersections = dict() for combo in combinations:", "it will not be mutated by changes to queries[0] for", "that corresponds to the query pass else: queries[0] = queries[0]", "depending on whether you want the intersections to contain only", "must be of equal length') combinations = [seq for i", "any elements from q_intersect that have already been assigned assigned_formula.update(q_intersect)", "and group_labels must be of equal length') combinations = [seq", "in samples. Where samples = [set_1,set_2,...,set_n] \"\"\" if len(samples) ==", "return a & b[0] else: counter += 1 return a", "all the formula already assigned to a group amb =", "q_intersect that have already been assigned assigned_formula.update(q_intersect) #update the assigned_set", "or False, depending on whether you want the intersections to", "Returns a dictionary in which each key corresponds to a", "pass else: queries[0] = queries[0] - set_f #delete any repeated", "group_labels must be of equal length') combinations = [seq for", "raise InputError('formula_lists and group_labels must be of equal length') combinations", "pandas as pd def find_intersections(formula_lists,group_labels,exclusive = True): \"\"\" Docstring for", "f_list in formula_lists: #cycle all formula in formula_lists set_f =", "repeated elements in fset intersections[combo] = queries[0] elif len(queries) >", "the corresponding value is a set containing the intersections between", "a frozen set, so it will not be mutated by", "- set_f #delete any repeated elements in fset intersections[combo] =", "#delete any repeated elements in fset intersections[combo] = queries[0] elif", "\"\"\" Docstring for function pyKrev.find_intersections ==================== This function compares n", "of molecular formula and outputs a dictionary containing the intersections", "all formula in formula_lists set_f = frozenset(f_list) #convert f_list to", "= queries[0] elif len(queries) > 1: if exclusive == True:", "queries[0] = queries[0] - set_f #delete any repeated elements in", "np import pandas as pd def find_intersections(formula_lists,group_labels,exclusive = True): \"\"\"", "# ignore the set that corresponds to the query pass", "has reached the final element return a & b[0] else:", "itertools.combinations(group_labels,i) if len(seq) > 0] combinations = sorted(combinations,key = lambda", "the intersections between a variable number of sets given in", "if exclusive == True: q_intersect = intersect(queries) intersections[combo] = q_intersect", "else: intersections[combo] = intersect(queries) return intersections def intersect(samples,counter=0): \"\"\" This", "intersections between each list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary", "dictionary in which each key corresponds to a combination of", "= set() #create a set that will hold all the", "strings of corresponding group labels. exclusive: True or False, depending", "as np import pandas as pd def find_intersections(formula_lists,group_labels,exclusive = True):", "pyKrev.find_intersections ==================== This function compares n lists of molecular formula", "sort combinations by length if exclusive == True: assigned_formula =", "is a frozen set, so it will not be mutated", "a variable number of sets given in samples. Where samples", "in it q_set = frozenset(queries[0]) #qset is a frozen set,", "= sorted(combinations,key = lambda c : len(c),reverse = True) #", "not be mutated by changes to queries[0] for f_list in", "a list containing n strings of corresponding group labels. exclusive:", "1: return samples[0] a = samples[counter] b = samples[counter+1::] if", "labels. exclusive: True or False, depending on whether you want", "queries = [] for c in combo: formula = list(filter(None,amb[c]))", "between each list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary in", "matches q_set if set_f == q_set: # ignore the set", "values. \"\"\" if len(formula_lists) != len(group_labels): raise InputError('formula_lists and group_labels", "False, depending on whether you want the intersections to contain", "there is only one query find the unique elements in", "dataframe queries.append(set(formula)) if len(queries) == 1: #if there is only", "set_f #delete any repeated elements in fset intersections[combo] = queries[0]", "len(samples) == 1: return samples[0] a = samples[counter] b =", "len(group_labels): raise InputError('formula_lists and group_labels must be of equal length')", "queries[0] elif len(queries) > 1: if exclusive == True: q_intersect", "!= len(group_labels): raise InputError('formula_lists and group_labels must be of equal", "intersect(queries) intersections[combo] = q_intersect - assigned_formula #remove any elements from", "by dataframe queries.append(set(formula)) if len(queries) == 1: #if there is", "len(seq) > 0] combinations = sorted(combinations,key = lambda c :", "range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq) > 0] combinations", "set() #create a set that will hold all the formula", "= group_labels intersections = dict() for combo in combinations: queries", "dict() for combo in combinations: queries = [] for c", "return samples[0] a = samples[counter] b = samples[counter+1::] if len(b)", "number of sets given in samples. Where samples = [set_1,set_2,...,set_n]", "so type matches q_set if set_f == q_set: # ignore", "samples[counter] b = samples[counter+1::] if len(b) == 1: #check to", "the formula already assigned to a group amb = pd.DataFrame(data", "q_set if set_f == q_set: # ignore the set that", "to a group amb = pd.DataFrame(data = formula_lists).T amb.columns =", "will hold all the formula already assigned to a group", "if len(samples) == 1: return samples[0] a = samples[counter] b", "== 1: #if there is only one query find the", "return intersections def intersect(samples,counter=0): \"\"\" This command uses recursion to", "query pass else: queries[0] = queries[0] - set_f #delete any", "len(queries) == 1: #if there is only one query find", "a = samples[counter] b = samples[counter+1::] if len(b) == 1:", "[seq for i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if", "True) # sort combinations by length if exclusive == True:", "frozenset(f_list) #convert f_list to sets, must be frozen so type", "sub list should be a formula string. group_labels: a list", "== q_set: # ignore the set that corresponds to the", "#cycle all formula in formula_lists set_f = frozenset(f_list) #convert f_list", "q_intersect = intersect(queries) intersections[combo] = q_intersect - assigned_formula #remove any", "assigned_formula = set() #create a set that will hold all", "list containing n lists of molecular formula. Each item in", "= frozenset(f_list) #convert f_list to sets, must be frozen so", "== 1: return samples[0] a = samples[counter] b = samples[counter+1::]", "== 1: #check to see whether the recursion has reached", "frozen so type matches q_set if set_f == q_set: #", "group labels and the corresponding value is a set containing", "numpy as np import pandas as pd def find_intersections(formula_lists,group_labels,exclusive =", "formula in formula_lists set_f = frozenset(f_list) #convert f_list to sets,", "True or False, depending on whether you want the intersections", "#create a set that will hold all the formula already", "be frozen so type matches q_set if set_f == q_set:", "sets given in samples. Where samples = [set_1,set_2,...,set_n] \"\"\" if", "len(b) == 1: #check to see whether the recursion has", "will not be mutated by changes to queries[0] for f_list", "containing the intersections between the groups in that combination. Parameters", "= frozenset(queries[0]) #qset is a frozen set, so it will", "a combination of group labels and the corresponding value is", "= dict() for combo in combinations: queries = [] for", "the recursion has reached the final element return a &", "the final element return a & b[0] else: counter +=", "for f_list in formula_lists: #cycle all formula in formula_lists set_f", "queries[0] for f_list in formula_lists: #cycle all formula in formula_lists", "group_labels: a list containing n strings of corresponding group labels.", "#convert f_list to sets, must be frozen so type matches", "set containing the intersections between the groups in that combination.", "formula already assigned to a group amb = pd.DataFrame(data =", "already assigned to a group amb = pd.DataFrame(data = formula_lists).T", "in combinations: queries = [] for c in combo: formula", "unique values. \"\"\" if len(formula_lists) != len(group_labels): raise InputError('formula_lists and", "any repeated elements in fset intersections[combo] = queries[0] elif len(queries)", "= True): \"\"\" Docstring for function pyKrev.find_intersections ==================== This function", "be a formula string. group_labels: a list containing n strings", "it q_set = frozenset(queries[0]) #qset is a frozen set, so", "samples[counter+1::] if len(b) == 1: #check to see whether the", "list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary in which each", "set that will hold all the formula already assigned to", "= formula_lists).T amb.columns = group_labels intersections = dict() for combo", "item in the sub list should be a formula string.", "hold all the formula already assigned to a group amb", "= list(filter(None,amb[c])) #Remove None entries introduced by dataframe queries.append(set(formula)) if", "---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary in which each key corresponds", "Parameters ---------- formula_lists: a list containing n lists of molecular", "in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq) > 0]", "Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns a dictionary in which each key", "formula_lists: a list containing n lists of molecular formula. Each", "to find the intersections between a variable number of sets", "\"\"\" if len(samples) == 1: return samples[0] a = samples[counter]", "of sets given in samples. Where samples = [set_1,set_2,...,set_n] \"\"\"", "be of equal length') combinations = [seq for i in", "q_set: # ignore the set that corresponds to the query", "True: q_intersect = intersect(queries) intersections[combo] = q_intersect - assigned_formula #remove", "type matches q_set if set_f == q_set: # ignore the", "is a set containing the intersections between the groups in", "list containing n strings of corresponding group labels. exclusive: True", "combo in combinations: queries = [] for c in combo:", "the intersections to contain only unique values. \"\"\" if len(formula_lists)", "in which each key corresponds to a combination of group", "intersections def intersect(samples,counter=0): \"\"\" This command uses recursion to find", "==================== This function compares n lists of molecular formula and", "group labels. exclusive: True or False, depending on whether you", "group amb = pd.DataFrame(data = formula_lists).T amb.columns = group_labels intersections", "been assigned assigned_formula.update(q_intersect) #update the assigned_set with q_intersect else: intersections[combo]", "element return a & b[0] else: counter += 1 return", "= queries[0] - set_f #delete any repeated elements in fset", "pd.DataFrame(data = formula_lists).T amb.columns = group_labels intersections = dict() for", "if len(queries) == 1: #if there is only one query", "want the intersections to contain only unique values. \"\"\" if", "intersect(samples,counter=0): \"\"\" This command uses recursion to find the intersections", "InputError('formula_lists and group_labels must be of equal length') combinations =", "samples = [set_1,set_2,...,set_n] \"\"\" if len(samples) == 1: return samples[0]", "of corresponding group labels. exclusive: True or False, depending on", "combination of group labels and the corresponding value is a", "len(queries) > 1: if exclusive == True: q_intersect = intersect(queries)", "= intersect(queries) return intersections def intersect(samples,counter=0): \"\"\" This command uses", "find the unique elements in it q_set = frozenset(queries[0]) #qset", "function compares n lists of molecular formula and outputs a", "== True: assigned_formula = set() #create a set that will", "q_intersect - assigned_formula #remove any elements from q_intersect that have", "assigned to a group amb = pd.DataFrame(data = formula_lists).T amb.columns", "len(formula_lists) != len(group_labels): raise InputError('formula_lists and group_labels must be of", "the groups in that combination. Parameters ---------- formula_lists: a list", "c : len(c),reverse = True) # sort combinations by length", "containing the intersections between each list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n']) Returns", "#qset is a frozen set, so it will not be", "labels and the corresponding value is a set containing the", "Where samples = [set_1,set_2,...,set_n] \"\"\" if len(samples) == 1: return", "elements in fset intersections[combo] = queries[0] elif len(queries) > 1:", "key corresponds to a combination of group labels and the", "intersections[combo] = intersect(queries) return intersections def intersect(samples,counter=0): \"\"\" This command", "1: #check to see whether the recursion has reached the", "import pandas as pd def find_intersections(formula_lists,group_labels,exclusive = True): \"\"\" Docstring", "uses recursion to find the intersections between a variable number", "== True: q_intersect = intersect(queries) intersections[combo] = q_intersect - assigned_formula", "command uses recursion to find the intersections between a variable", "= [] for c in combo: formula = list(filter(None,amb[c])) #Remove", "is only one query find the unique elements in it", "unique elements in it q_set = frozenset(queries[0]) #qset is a", "the assigned_set with q_intersect else: intersections[combo] = intersect(queries) return intersections", "by changes to queries[0] for f_list in formula_lists: #cycle all", "assigned_formula #remove any elements from q_intersect that have already been", "to sets, must be frozen so type matches q_set if", "you want the intersections to contain only unique values. \"\"\"", "if len(formula_lists) != len(group_labels): raise InputError('formula_lists and group_labels must be", "assigned_set with q_intersect else: intersections[combo] = intersect(queries) return intersections def", "for seq in itertools.combinations(group_labels,i) if len(seq) > 0] combinations =", "b = samples[counter+1::] if len(b) == 1: #check to see", "# sort combinations by length if exclusive == True: assigned_formula", "corresponds to a combination of group labels and the corresponding", "compares n lists of molecular formula and outputs a dictionary", "[set_1,set_2,...,set_n] \"\"\" if len(samples) == 1: return samples[0] a =", "n lists of molecular formula and outputs a dictionary containing", "of molecular formula. Each item in the sub list should", "containing n lists of molecular formula. Each item in the", "assigned_formula.update(q_intersect) #update the assigned_set with q_intersect else: intersections[combo] = intersect(queries)", "between the groups in that combination. Parameters ---------- formula_lists: a", "of group labels and the corresponding value is a set", "lists of molecular formula and outputs a dictionary containing the", "set_f = frozenset(f_list) #convert f_list to sets, must be frozen", "a set containing the intersections between the groups in that", "introduced by dataframe queries.append(set(formula)) if len(queries) == 1: #if there", "function pyKrev.find_intersections ==================== This function compares n lists of molecular", "> 0] combinations = sorted(combinations,key = lambda c : len(c),reverse", "pd def find_intersections(formula_lists,group_labels,exclusive = True): \"\"\" Docstring for function pyKrev.find_intersections", "with q_intersect else: intersections[combo] = intersect(queries) return intersections def intersect(samples,counter=0):", "- assigned_formula #remove any elements from q_intersect that have already", "def find_intersections(formula_lists,group_labels,exclusive = True): \"\"\" Docstring for function pyKrev.find_intersections ====================", "intersections between a variable number of sets given in samples.", "fset intersections[combo] = queries[0] elif len(queries) > 1: if exclusive", "on whether you want the intersections to contain only unique", "each key corresponds to a combination of group labels and", "if len(seq) > 0] combinations = sorted(combinations,key = lambda c", "dictionary containing the intersections between each list. Use ---- find_intersections([list_1,..,list_n],['group_1',...,'group_n'])", "corresponding value is a set containing the intersections between the", "one query find the unique elements in it q_set =" ]
[ "tracks without albumn artist or title for track in reversed(range(len(musicDirectory))):", "print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with", "= TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track])", "duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove duplicates from list for u", "trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist: if trackTag.title ==", "or str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track]) print('removing track and adding to", "Formula_.m3u', \"r\") as f: content_list = [word.strip() for word in", "= [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles =[] # tag =", "open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as f: content_list = [word.strip() for word", "= TinyTag.get(x) if x != 'wdg'] #remove tracks without albumn", "print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as", "or title for track in reversed(range(len(musicDirectory))): try: trackTag = TinyTag.get(musicDirectory[track])", "file)) #print(os.path.join(root, file)) print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist)", "= [] duplicatesList = [] count =0 # check for", "=[] # tag = TinyTag.get(f[0]) # print(tag.artist) # for root,", "duplicate in range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist ==", "contents') # print(content_list) musicDirectory musicWithoutDuplicates = [] duplicatesList = []", "',musicDirectory) #create playlist newPlaylist = open(\"Test.m3u\", \"w\") #add file path", "tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory))", "print('albumArtist = none',musicDirectory[track]) print('removing track and adding to log file')", "content in enumerate(content_list): # split strings into artist and title", "Mask The Slump God and Drugz).mp3') print(tag.artist) print('song duration: '+str(tag.duration))", "files: if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file)) print('files'+str(musicFiles)) tag", "#musicDirectory =[x for x in musicDirectory j = TinyTag.get(x) if", "duplicateLBiterate) for x in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE", "= duplicatesList + duplicateL else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate)", "# for root, dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs,", "file') musicDirectory.remove(musicDirectory[track]) except IndexError: break #check for duplicates for j", "== musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist: if duplicateTag.title == musicDtag.title", "if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file)) print('files'+str(musicFiles)) tag =", "content_list = [word.strip() for word in f] \"\"\" my_file =", "trackDirectory in range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist == albumArtist", "content_list = my_file. readlines() \"\"\" # print('playlist contents') # print(content_list)", "str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track]) print('removing track and adding to log", "the new playlist for content in enumerate(content_list): # split strings", "root, dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs, files in", "if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track]) print('removing track", "\"\"\" # print('playlist contents') # print(content_list) musicDirectory musicWithoutDuplicates = []", "to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God", "print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove duplicates from", "#Add duplicatesList = duplicatesList + duplicateL else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)", "files in os.walk(\"C:/\"): for file in files: if file.endswith(\".mp3\"): musicFiles.append(file)", "== max(duplicateLBiterate): #REMOVE ONE WITH THE BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate", "'None' or str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track]) print('removing track and adding", "The Slump God and Drugz).mp3') print(tag.artist) print('song duration: '+str(tag.duration)) \"\"\"", "\"r\") as f: content_list = [word.strip() for word in f]", "for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current duplicate", "or trackTag.albumartist in albumArtist: if trackTag.title == title or trackTag.title", "= open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list = my_file. readlines() \"\"\" #", "in os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs, files in os.walk(\"C:/\"): for file", "import TinyTag \"\"\" root = 'C:/' copy_to = '/copy to/folder'", "Bite rate\", duplicateLBiterate) for x in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate ==", "ONE WITH THE BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add duplicatesList", "duplicates from list for u in range(len(duplicatesList)): for i in", "if trackTag.title == title or trackTag.title in title: newPlaylist.write(trackDirectory +", "albumn artist or title for track in reversed(range(len(musicDirectory))): try: trackTag", "last iteration if duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did", "open(\"Test.m3u\", \"w\") #add file path to the respective track in", "word in f] \"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list", "musicDtag.title : #check if last iteration if duplicate>=len(musicDirectory)-1: print(\"found a", "in reversed(range(len(musicDirectory))): try: trackTag = TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None' or", "j in range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for duplicate", "for i in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create", "artist and title trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title)", "== title or trackTag.title in title: newPlaylist.write(trackDirectory + \" \"", "while True: content.next() except StopIteration: pass break else: print() else:", "+ duplicateL else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList)", "in musicDirectory j = TinyTag.get(x) if x != 'wdg'] #remove", "statement or add another print('biterate') #[x for x in duplicateL", "duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove duplicates from list for u in", "f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles =[] # tag = TinyTag.get(f[0]) #", "= [word.strip() for word in f] \"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty", "duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add duplicatesList = duplicatesList + duplicateL else:", "in title: newPlaylist.write(trackDirectory + \" \" + content) newPlaylist.close() try:", "for duplicate in range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist", "\"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list = my_file. readlines()", "Drugz).mp3') print(tag.artist) print('song duration: '+str(tag.duration)) \"\"\" f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3')", "or add another print('biterate') #[x for x in duplicateL if", "in os.walk(\"C:/\"): for file in files: if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root,", "without albumn artist or title for track in reversed(range(len(musicDirectory))): try:", "directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as f:", "for duplicates for j in range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[]", "TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory", "# print(content_list) musicDirectory musicWithoutDuplicates = [] duplicatesList = [] count", "print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__)", "a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove duplicates from list", "= TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for duplicate in range(len(musicDirectory)): duplicateTag =", "the conditional statement or add another print('biterate') #[x for x", "(feat. $ki Mask The Slump God and Drugz).mp3') print(tag.artist) print('song", "Slump God and Drugz).mp3') print(tag.artist) print('song duration: '+str(tag.duration)) \"\"\" f", "playlist for content in enumerate(content_list): # split strings into artist", "if trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist: if trackTag.title", "=[x for x in musicDirectory j = TinyTag.get(x) if x", "duplicate Bite rate\", duplicateLBiterate) for x in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate", "equal to none #musicDirectory =[x for x in musicDirectory j", "track and adding to log file') musicDirectory.remove(musicDirectory[track]) except IndexError: break", "in the new playlist for content in enumerate(content_list): # split", "print(tag.artist) print('song duration: '+str(tag.duration)) \"\"\" f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f)", "\"\"\" f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles =[] #", "f] \"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list = my_file.", "duplicateL=[] duplicateLBiterate=[] for duplicate in range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j])", "TinyTag.get(x) if x != 'wdg'] #remove tracks without albumn artist", "print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as f: content_list", "range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create playlist newPlaylist =", "musicDtag.albumartist: if duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title :", "tag = TinyTag.get(f[0]) # print(tag.artist) # for root, dirs, files", "musicFiles =[] # tag = TinyTag.get(f[0]) # print(tag.artist) # for", "= 'C:/' copy_to = '/copy to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat.", "playlist newPlaylist = open(\"Test.m3u\", \"w\") #add file path to the", "trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for trackDirectory in range(len(musicDirectory)): trackTag =", "artist or title for track in reversed(range(len(musicDirectory))): try: trackTag =", "TinyTag \"\"\" root = 'C:/' copy_to = '/copy to/folder' tag", "#check if last iteration if duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if", "in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current duplicate Bite rate\",", "duration: '+str(tag.duration)) \"\"\" f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles", "duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current duplicate Bite rate\", duplicateLBiterate)", "newPlaylist.close() try: while True: content.next() except StopIteration: pass break else:", "#remove duplicates from list for u in range(len(duplicatesList)): for i", "in f] \"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list =", "newPlaylist.write(trackDirectory + \" \" + content) newPlaylist.close() try: while True:", "musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist: if duplicateTag.title == musicDtag.title or", "os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs, files in os.walk(\"C:/\"): for file in", "import glob import shutil from tinytag import TinyTag \"\"\" root", "max(duplicateLBiterate): #REMOVE ONE WITH THE BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL)", "musicDirectory j = TinyTag.get(x) if x != 'wdg'] #remove tracks", "in range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for duplicate in", "= '/copy to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The", "TinyTag.get(f[0]) # print(tag.artist) # for root, dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"):", "if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create playlist newPlaylist = open(\"Test.m3u\",", "print('song duration: '+str(tag.duration)) \"\"\" f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[]", "duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title : #check if", "#remove tracks without albumn artist or title for track in", "for trackDirectory in range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist ==", "'wdg'] #remove tracks without albumn artist or title for track", "dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs, files in os.walk(\"C:/\"):", "musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file)) print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album", "os import glob import shutil from tinytag import TinyTag \"\"\"", "j = TinyTag.get(x) if x != 'wdg'] #remove tracks without", "or trackTag.title in title: newPlaylist.write(trackDirectory + \" \" + content)", "> musicDirectory[x]] print(\"Current duplicate Bite rate\", duplicateLBiterate) for x in", "list',duplicateL) #Add duplicatesList = duplicatesList + duplicateL else: print(\"found a", "len(duplicateLBiterate)==1:## did something here may need to change the conditional", "# check for tags equal to none #musicDirectory =[x for", "TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God and Drugz).mp3') print(tag.artist)", "'/copy to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump", "BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add duplicatesList = duplicatesList + duplicateL", "duplicatesList = [] count =0 # check for tags equal", "conditional statement or add another print('biterate') #[x for x in", "f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles =[] # tag", "albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for trackDirectory in range(len(musicDirectory)): trackTag", "THE BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add duplicatesList = duplicatesList", "in enumerate(content_list): # split strings into artist and title trackNumber=content[0]", "readlines() \"\"\" # print('playlist contents') # print(content_list) musicDirectory musicWithoutDuplicates =", "if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current duplicate Bite rate\", duplicateLBiterate) for", "print('removing track and adding to log file') musicDirectory.remove(musicDirectory[track]) except IndexError:", "= TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist:", "log file') musicDirectory.remove(musicDirectory[track]) except IndexError: break #check for duplicates for", "glob import shutil from tinytag import TinyTag \"\"\" root =", "\" \" + content) newPlaylist.close() try: while True: content.next() except", "in files: if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file)) print('files'+str(musicFiles))", "trackTag.title in title: newPlaylist.write(trackDirectory + \" \" + content) newPlaylist.close()", "'+str(tag.duration)) \"\"\" f = [] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles =[]", "WITH THE BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add duplicatesList =", "copy_to = '/copy to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask", "from list for u in range(len(duplicatesList)): for i in range(len(musicDirectory)):", "IndexError: break #check for duplicates for j in range(len(musicDirectory)): musicDtag", "duplicateTag.title in musicDtag.title : #check if last iteration if duplicate>=len(musicDirectory)-1:", "or duplicateTag.albumartist in musicDtag.albumartist: if duplicateTag.title == musicDtag.title or duplicateTag.title", "i in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create playlist", "path to the respective track in the new playlist for", "iteration if duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did something", "if len(duplicateLBiterate)==1:## did something here may need to change the", "root, dirs, files in os.walk(\"C:/\"): for file in files: if", "trackTag.title == title or trackTag.title in title: newPlaylist.write(trackDirectory + \"", "count =0 # check for tags equal to none #musicDirectory", "in musicDtag.title : #check if last iteration if duplicate>=len(musicDirectory)-1: print(\"found", "$ki Mask The Slump God and Drugz).mp3') print(tag.artist) print('song duration:", "!= 'wdg'] #remove tracks without albumn artist or title for", "track in reversed(range(len(musicDirectory))): try: trackTag = TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None'", "#print(os.path.join(root, file)) print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title)", "title for track in reversed(range(len(musicDirectory))): try: trackTag = TinyTag.get(musicDirectory[track]) if", "x in musicDirectory j = TinyTag.get(x) if x != 'wdg']", "to log file') musicDirectory.remove(musicDirectory[track]) except IndexError: break #check for duplicates", "albumArtist or trackTag.albumartist in albumArtist: if trackTag.title == title or", "break #check for duplicates for j in range(len(musicDirectory)): musicDtag =", "file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file)) print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0])", "duplicates for j in range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[]", "#REMOVE ONE WITH THE BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add", "and adding to log file') musicDirectory.remove(musicDirectory[track]) except IndexError: break #check", "if duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did something here", "# split strings into artist and title trackNumber=content[0] trackArray =str(content[1]).split('-')", "str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track]) print('removing track and", "in range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist == musicDtag.albumartist", "+ \" \" + content) newPlaylist.close() try: while True: content.next()", "split strings into artist and title trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist=", "Formula_.m3u', \"r\") content_list = my_file. readlines() \"\"\" # print('playlist contents')", "\" + content) newPlaylist.close() try: while True: content.next() except StopIteration:", "to the respective track in the new playlist for content", "TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist: if", "= TinyTag.get(f[0]) # print(tag.artist) # for root, dirs, files in", "tinytag import TinyTag \"\"\" root = 'C:/' copy_to = '/copy", "# print('playlist contents') # print(content_list) musicDirectory musicWithoutDuplicates = [] duplicatesList", "range(len(duplicatesList)): for i in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory)", "musicDtag.title or duplicateTag.title in musicDtag.title : #check if last iteration", "musicDirectory musicWithoutDuplicates = [] duplicatesList = [] count =0 #", "duplicateL else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove", "print(tag.artist) # for root, dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"): for root,", "tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God and", "for j in range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for", "print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u',", "for x in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE ONE", "open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list = my_file. readlines() \"\"\" # print('playlist", "may need to change the conditional statement or add another", "range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE ONE WITH THE BEST", "to none #musicDirectory =[x for x in musicDirectory j =", "range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist == albumArtist or trackTag.albumartist", "musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for duplicate in range(len(musicDirectory)): duplicateTag", "\"w\") #add file path to the respective track in the", "# print(tag.artist) # for root, dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"): for", "or duplicateTag.title in musicDtag.title : #check if last iteration if", "in musicDtag.albumartist: if duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title", "= TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God and Drugz).mp3')", "currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as f: content_list =", "title or trackTag.title in title: newPlaylist.write(trackDirectory + \" \" +", "if x != 'wdg'] #remove tracks without albumn artist or", "#add file path to the respective track in the new", "file in files: if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file))", "print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music", "except IndexError: break #check for duplicates for j in range(len(musicDirectory)):", "duplicateTag.albumartist in musicDtag.albumartist: if duplicateTag.title == musicDtag.title or duplicateTag.title in", "[] count =0 # check for tags equal to none", "in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create playlist newPlaylist", "musicWithoutDuplicates = [] duplicatesList = [] count =0 # check", "here may need to change the conditional statement or add", "= TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory))", "duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist", "range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for duplicate in range(len(musicDirectory)):", "need to change the conditional statement or add another print('biterate')", "with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as f: content_list = [word.strip() for", "for file in files: if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root,", "strings into artist and title trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip()", "[] f=glob.glob('C:/Users/jchap/OneDrive/*.mp3') print(f) musicDirectory=[] musicFiles =[] # tag = TinyTag.get(f[0])", "x in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE ONE WITH", "a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did something here may need to", "musicDirectory.remove(musicDirectory[track]) except IndexError: break #check for duplicates for j in", "print('duplicate list',duplicateL) #Add duplicatesList = duplicatesList + duplicateL else: print(\"found", "print(\"Current duplicate Bite rate\", duplicateLBiterate) for x in range(len(duplicateL)): if", "for root, dirs, files in os.walk(\"C:/\"): for file in files:", "change the conditional statement or add another print('biterate') #[x for", "and Drugz).mp3') print(tag.artist) print('song duration: '+str(tag.duration)) \"\"\" f = []", "range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist == musicDtag.albumartist or", "=os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") as f: content_list = [word.strip()", "for u in range(len(duplicatesList)): for i in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]:", "trackTag = TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None': print('albumArtist =", "',duplicatesList) #remove duplicates from list for u in range(len(duplicatesList)): for", "musicDirectory=[] musicFiles =[] # tag = TinyTag.get(f[0]) # print(tag.artist) #", "print('title:',title) print('albumArtist:',albumArtist) for trackDirectory in range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory]) if", "Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty", "content) newPlaylist.close() try: while True: content.next() except StopIteration: pass break", "my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\") content_list = my_file. readlines() \"\"\"", "#check for duplicates for j in range(len(musicDirectory)): musicDtag = TinyTag.get(musicDirectory[j])", ": #check if last iteration if duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)", "tags equal to none #musicDirectory =[x for x in musicDirectory", "=str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for trackDirectory in range(len(musicDirectory)):", "dirs, files in os.walk(\"C:/\"): for file in files: if file.endswith(\".mp3\"):", "did something here may need to change the conditional statement", "print('dup ',duplicatesList) #remove duplicates from list for u in range(len(duplicatesList)):", "# tag = TinyTag.get(f[0]) # print(tag.artist) # for root, dirs,", "the respective track in the new playlist for content in", "\"\"\" root = 'C:/' copy_to = '/copy to/folder' tag =", "albumArtist: if trackTag.title == title or trackTag.title in title: newPlaylist.write(trackDirectory", "title trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for", "True: content.next() except StopIteration: pass break else: print() else: print()", "trackTag.albumartist in albumArtist: if trackTag.title == title or trackTag.title in", "duplicatesList + duplicateL else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup", "TinyTag.get(musicDirectory[j]) duplicateL=[] duplicateLBiterate=[] for duplicate in range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate])", "another print('biterate') #[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate >", "[word.strip() for word in f] \"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u',", "trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for trackDirectory in", "to change the conditional statement or add another print('biterate') #[x", "print('music ',musicDirectory) #create playlist newPlaylist = open(\"Test.m3u\", \"w\") #add file", "reversed(range(len(musicDirectory))): try: trackTag = TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None':", "#create playlist newPlaylist = open(\"Test.m3u\", \"w\") #add file path to", "none',musicDirectory[track]) print('removing track and adding to log file') musicDirectory.remove(musicDirectory[track]) except", "if last iteration if duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:##", "print('albumArtist:',albumArtist) for trackDirectory in range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist", "root = 'C:/' copy_to = '/copy to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ)", "TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE ONE WITH THE BEST BITERATE duplicateL.remove(duplicateL[x])", "in albumArtist: if trackTag.title == title or trackTag.title in title:", "import os import glob import shutil from tinytag import TinyTag", "musicDirectory[x]] print(\"Current duplicate Bite rate\", duplicateLBiterate) for x in range(len(duplicateL)):", "print(f) musicDirectory=[] musicFiles =[] # tag = TinyTag.get(f[0]) # print(tag.artist)", "print('playlist contents') # print(content_list) musicDirectory musicWithoutDuplicates = [] duplicatesList =", "title: newPlaylist.write(trackDirectory + \" \" + content) newPlaylist.close() try: while", "try: trackTag = TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None': print('albumArtist", "x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current duplicate Bite", "#[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current", "BEST BITERATE duplicateL.remove(duplicateL[x]) print('duplicate list',duplicateL) #Add duplicatesList = duplicatesList +", "title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for trackDirectory in range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory])", "add another print('biterate') #[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate", "musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist: if", "in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE ONE WITH THE", "my_file. readlines() \"\"\" # print('playlist contents') # print(content_list) musicDirectory musicWithoutDuplicates", "list for u in range(len(duplicatesList)): for i in range(len(musicDirectory)): if", "[] duplicatesList = [] count =0 # check for tags", "try: while True: content.next() except StopIteration: pass break else: print()", "+ content) newPlaylist.close() try: while True: content.next() except StopIteration: pass", "rate\", duplicateLBiterate) for x in range(len(duplicateL)): if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate):", "for word in f] \"\"\" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\")", "none #musicDirectory =[x for x in musicDirectory j = TinyTag.get(x)", "from tinytag import TinyTag \"\"\" root = 'C:/' copy_to =", "into artist and title trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip()", "if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist: if duplicateTag.title", "musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create playlist newPlaylist = open(\"Test.m3u\", \"w\") #add", "TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]] print(\"Current duplicate Bite rate\", duplicateLBiterate) for x", "new playlist for content in enumerate(content_list): # split strings into", "duplicateLBiterate=[] for duplicate in range(len(musicDirectory)): duplicateTag = TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if", "files in os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs, files in os.walk(\"C:/\"): for", "=0 # check for tags equal to none #musicDirectory =[x", "and title trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist)", "file)) print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist) print('Album Artist',tag.albumartist) print('Title',tag.title) print('Biterate',tag.bitrate)", "for content in enumerate(content_list): # split strings into artist and", "print('Biterate',tag.bitrate) print('music directory'+str(musicDirectory)) print(len(musicDirectory)) currentDirectory =os.path.dirname(__file__) with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', \"r\")", "duplicate>=len(musicDirectory)-1: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did something here may", "God and Drugz).mp3') print(tag.artist) print('song duration: '+str(tag.duration)) \"\"\" f =", "duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did something here may need to change", "enumerate(content_list): # split strings into artist and title trackNumber=content[0] trackArray", "something here may need to change the conditional statement or", "u in range(len(duplicatesList)): for i in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i])", "newPlaylist = open(\"Test.m3u\", \"w\") #add file path to the respective", "import shutil from tinytag import TinyTag \"\"\" root = 'C:/'", "for root, dirs, files in os.walk(\"C:/Users/jchap/OneDrive/\"): for root, dirs, files", "file path to the respective track in the new playlist", "musicFiles.append(file) musicDirectory.append(os.path.join(root, file)) #print(os.path.join(root, file)) print('files'+str(musicFiles)) tag = TinyTag.get(musicDirectory[0]) print('Artist',tag.artist)", "= open(\"Test.m3u\", \"w\") #add file path to the respective track", "in range(len(duplicatesList)): for i in range(len(musicDirectory)): if duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music", "respective track in the new playlist for content in enumerate(content_list):", "f: content_list = [word.strip() for word in f] \"\"\" my_file", "print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) if len(duplicateLBiterate)==1:## did something here may need", "'C:/' copy_to = '/copy to/folder' tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki", "TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist:", "TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None': print('albumArtist = none',musicDirectory[track]) print('removing", "trackNumber=content[0] trackArray =str(content[1]).split('-') albumArtist= trackArray[0].strip() title=trackArray[1].strip() print('title:',title) print('albumArtist:',albumArtist) for trackDirectory", "x != 'wdg'] #remove tracks without albumn artist or title", "= none',musicDirectory[track]) print('removing track and adding to log file') musicDirectory.remove(musicDirectory[track])", "for x in musicDirectory j = TinyTag.get(x) if x !=", "shutil from tinytag import TinyTag \"\"\" root = 'C:/' copy_to", "trackTag = TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist == albumArtist or trackTag.albumartist in", "if duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title : #check", "adding to log file') musicDirectory.remove(musicDirectory[track]) except IndexError: break #check for", "for track in reversed(range(len(musicDirectory))): try: trackTag = TinyTag.get(musicDirectory[track]) if str(trackTag.albumartist)==", "print('biterate') #[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]]", "duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist: if duplicateTag.title ==", "duplicatesList = duplicatesList + duplicateL else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate])", "else: print(\"found a duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove duplicates", "= [] count =0 # check for tags equal to", "\"r\") content_list = my_file. readlines() \"\"\" # print('playlist contents') #", "= my_file. readlines() \"\"\" # print('playlist contents') # print(content_list) musicDirectory", "= TinyTag.get(musicDirectory[duplicate]) musicWithoutDuplicates.append(musicDirectory[j]) if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in", "check for tags equal to none #musicDirectory =[x for x", "print(content_list) musicDirectory musicWithoutDuplicates = [] duplicatesList = [] count =0", "for tags equal to none #musicDirectory =[x for x in", "duplicatesList[u]==musicDirectory[i]: musicDirectory.remove(musicDirectory[i]) print('music ',musicDirectory) #create playlist newPlaylist = open(\"Test.m3u\", \"w\")", "os.walk(\"C:/\"): for file in files: if file.endswith(\".mp3\"): musicFiles.append(file) musicDirectory.append(os.path.join(root, file))", "duplicate!\",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title) duplicateL.append(musicDirectory[duplicate]) duplicateLBiterate.append(duplicateTag.bitrate) print('dup ',duplicatesList) #remove duplicates from list for", "track in the new playlist for content in enumerate(content_list): #", "in range(len(musicDirectory)): trackTag = TinyTag.get(musicDirectory[trackDirectory]) if trackTag.albumartist == albumArtist or", "== albumArtist or trackTag.albumartist in albumArtist: if trackTag.title == title", "as f: content_list = [word.strip() for word in f] \"\"\"", "if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate): #REMOVE ONE WITH THE BEST BITERATE", "== musicDtag.title or duplicateTag.title in musicDtag.title : #check if last" ]
[ "greatest_dec[1] = change_value avg_change = total_change/len(months) output = ( f\"\\n", "= int(first_row[1]) #loop for row in csvreader: net_total += int(row[1])", "= []; total_m = 1; net_total = 0; total_change =", "monthly_changes = []; greatest_inc = ['', 0]; greatest_dec = ['',", "f\"Total: ${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest Increase in Profits: {greatest_inc[0]}", "csv with open(csvpath) as csvfile: csvreader = csv.reader(csvfile, delimiter=',') header", "if change_value > greatest_inc[1]: greatest_inc[0] = str(row[0]) greatest_inc[1] = change_value", "total_m = total_m+1 current_value = int(row[1]) change_value = int(current_value-previous_row) monthly_changes.append(change_value)", "${avg_change:.2f}\\n\" f\"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease in", "> greatest_inc[1]: greatest_inc[0] = str(row[0]) greatest_inc[1] = change_value if change_value", "current_value = int(row[1]) change_value = int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row =", "f\"\\n Financial Analysis \\n\" f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\" f\"Total: ${net_total}\\n\"", "total_change = 0; monthly_changes = []; greatest_inc = ['', 0];", "first_row = next(csvreader) previous_row = int(first_row[1]) net_total = int(first_row[1]) #loop", "next(csvreader) first_row = next(csvreader) previous_row = int(first_row[1]) net_total = int(first_row[1])", "f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest", "= 1; net_total = 0; total_change = 0; monthly_changes =", "csv #input csvpath = os.path.join('Resources', 'budget_data.csv') #output outfile = os.path.join('Analysis',", "greatest_dec = ['', 0] #open & read csv with open(csvpath)", "& read csv with open(csvpath) as csvfile: csvreader = csv.reader(csvfile,", "csvpath = os.path.join('Resources', 'budget_data.csv') #output outfile = os.path.join('Analysis', 'pybankstatements.txt') #declare", "#open & read csv with open(csvpath) as csvfile: csvreader =", "import os import csv #input csvpath = os.path.join('Resources', 'budget_data.csv') #output", "= next(csvreader) previous_row = int(first_row[1]) net_total = int(first_row[1]) #loop for", "+= int(row[1]) total_m = total_m+1 current_value = int(row[1]) change_value =", "= total_m+1 current_value = int(row[1]) change_value = int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0])", "csvreader = csv.reader(csvfile, delimiter=',') header = next(csvreader) first_row = next(csvreader)", "greatest_inc[1]: greatest_inc[0] = str(row[0]) greatest_inc[1] = change_value if change_value <", "Financial Analysis \\n\" f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average", "total_m+1 current_value = int(row[1]) change_value = int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row", "{total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest Increase in Profits:", "= total_change/len(months) output = ( f\"\\n Financial Analysis \\n\" f\"------------------------------\\n\"", "( f\"\\n Financial Analysis \\n\" f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\" f\"Total:", "Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with open(outfile, \"w\") as txt_file:", "#declare variables months = []; total_m = 1; net_total =", "os.path.join('Resources', 'budget_data.csv') #output outfile = os.path.join('Analysis', 'pybankstatements.txt') #declare variables months", "= 0; monthly_changes = []; greatest_inc = ['', 0]; greatest_dec", "(${greatest_inc[1]})\\n\" f\"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with open(outfile, \"w\")", "os import csv #input csvpath = os.path.join('Resources', 'budget_data.csv') #output outfile", "avg_change = total_change/len(months) output = ( f\"\\n Financial Analysis \\n\"", "total_change/len(months) output = ( f\"\\n Financial Analysis \\n\" f\"------------------------------\\n\" f\"Total", "= os.path.join('Analysis', 'pybankstatements.txt') #declare variables months = []; total_m =", "0] #open & read csv with open(csvpath) as csvfile: csvreader", "previous_row = int(first_row[1]) net_total = int(first_row[1]) #loop for row in", "= []; greatest_inc = ['', 0]; greatest_dec = ['', 0]", "int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row = int(row[1]) total_change = total_change +", "str(row[0]) greatest_dec[1] = change_value avg_change = total_change/len(months) output = (", "Analysis \\n\" f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average Change:", "as csvfile: csvreader = csv.reader(csvfile, delimiter=',') header = next(csvreader) first_row", "Months: {total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest Increase in", "f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest", "= str(row[0]) greatest_inc[1] = change_value if change_value < greatest_dec[1]: greatest_dec[0]", "net_total = int(first_row[1]) #loop for row in csvreader: net_total +=", "'budget_data.csv') #output outfile = os.path.join('Analysis', 'pybankstatements.txt') #declare variables months =", "total_change + change_value if change_value > greatest_inc[1]: greatest_inc[0] = str(row[0])", "Change: ${avg_change:.2f}\\n\" f\"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease", "= total_change + change_value if change_value > greatest_inc[1]: greatest_inc[0] =", "= os.path.join('Resources', 'budget_data.csv') #output outfile = os.path.join('Analysis', 'pybankstatements.txt') #declare variables", "= csv.reader(csvfile, delimiter=',') header = next(csvreader) first_row = next(csvreader) previous_row", "= ['', 0]; greatest_dec = ['', 0] #open & read", "#output outfile = os.path.join('Analysis', 'pybankstatements.txt') #declare variables months = [];", "change_value < greatest_dec[1]: greatest_dec[0] = str(row[0]) greatest_dec[1] = change_value avg_change", "f\"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease in Profits:", "= ( f\"\\n Financial Analysis \\n\" f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\"", "#loop for row in csvreader: net_total += int(row[1]) total_m =", "total_m = 1; net_total = 0; total_change = 0; monthly_changes", "row in csvreader: net_total += int(row[1]) total_m = total_m+1 current_value", "variables months = []; total_m = 1; net_total = 0;", "monthly_changes.append(change_value) months.append(row[0]) previous_row = int(row[1]) total_change = total_change + change_value", "open(csvpath) as csvfile: csvreader = csv.reader(csvfile, delimiter=',') header = next(csvreader)", "${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\"", "f\"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with open(outfile, \"w\") as", "= next(csvreader) first_row = next(csvreader) previous_row = int(first_row[1]) net_total =", "0; total_change = 0; monthly_changes = []; greatest_inc = ['',", "0]; greatest_dec = ['', 0] #open & read csv with", "greatest_inc[1] = change_value if change_value < greatest_dec[1]: greatest_dec[0] = str(row[0])", "Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease in Profits: {greatest_dec[0]}", "= int(row[1]) change_value = int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row = int(row[1])", "with open(csvpath) as csvfile: csvreader = csv.reader(csvfile, delimiter=',') header =", "outfile = os.path.join('Analysis', 'pybankstatements.txt') #declare variables months = []; total_m", "Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with", "net_total = 0; total_change = 0; monthly_changes = []; greatest_inc", "read csv with open(csvpath) as csvfile: csvreader = csv.reader(csvfile, delimiter=',')", "Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with open(outfile, \"w\") as txt_file: txt_file.write(output) outfile", "{greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with open(outfile,", "delimiter=',') header = next(csvreader) first_row = next(csvreader) previous_row = int(first_row[1])", "modules import os import csv #input csvpath = os.path.join('Resources', 'budget_data.csv')", "months = []; total_m = 1; net_total = 0; total_change", "['', 0] #open & read csv with open(csvpath) as csvfile:", "csvreader: net_total += int(row[1]) total_m = total_m+1 current_value = int(row[1])", "in Profits: {greatest_inc[0]} (${greatest_inc[1]})\\n\" f\"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\")", "csvfile: csvreader = csv.reader(csvfile, delimiter=',') header = next(csvreader) first_row =", "+ change_value if change_value > greatest_inc[1]: greatest_inc[0] = str(row[0]) greatest_inc[1]", "str(row[0]) greatest_inc[1] = change_value if change_value < greatest_dec[1]: greatest_dec[0] =", "= str(row[0]) greatest_dec[1] = change_value avg_change = total_change/len(months) output =", "if change_value < greatest_dec[1]: greatest_dec[0] = str(row[0]) greatest_dec[1] = change_value", "os.path.join('Analysis', 'pybankstatements.txt') #declare variables months = []; total_m = 1;", "for row in csvreader: net_total += int(row[1]) total_m = total_m+1", "int(row[1]) total_change = total_change + change_value if change_value > greatest_inc[1]:", "[]; total_m = 1; net_total = 0; total_change = 0;", "1; net_total = 0; total_change = 0; monthly_changes = [];", "['', 0]; greatest_dec = ['', 0] #open & read csv", "greatest_dec[0] = str(row[0]) greatest_dec[1] = change_value avg_change = total_change/len(months) output", "= int(first_row[1]) net_total = int(first_row[1]) #loop for row in csvreader:", "change_value if change_value < greatest_dec[1]: greatest_dec[0] = str(row[0]) greatest_dec[1] =", "#input csvpath = os.path.join('Resources', 'budget_data.csv') #output outfile = os.path.join('Analysis', 'pybankstatements.txt')", "total_change = total_change + change_value if change_value > greatest_inc[1]: greatest_inc[0]", "output = ( f\"\\n Financial Analysis \\n\" f\"------------------------------\\n\" f\"Total Months:", "greatest_inc[0] = str(row[0]) greatest_inc[1] = change_value if change_value < greatest_dec[1]:", "< greatest_dec[1]: greatest_dec[0] = str(row[0]) greatest_dec[1] = change_value avg_change =", "int(first_row[1]) #loop for row in csvreader: net_total += int(row[1]) total_m", "int(row[1]) total_m = total_m+1 current_value = int(row[1]) change_value = int(current_value-previous_row)", "change_value avg_change = total_change/len(months) output = ( f\"\\n Financial Analysis", "greatest_inc = ['', 0]; greatest_dec = ['', 0] #open &", "= change_value avg_change = total_change/len(months) output = ( f\"\\n Financial", "[]; greatest_inc = ['', 0]; greatest_dec = ['', 0] #open", "in csvreader: net_total += int(row[1]) total_m = total_m+1 current_value =", "net_total += int(row[1]) total_m = total_m+1 current_value = int(row[1]) change_value", "= int(row[1]) total_change = total_change + change_value if change_value >", "change_value if change_value > greatest_inc[1]: greatest_inc[0] = str(row[0]) greatest_inc[1] =", "'pybankstatements.txt') #declare variables months = []; total_m = 1; net_total", "in Profits: {greatest_dec[0]} (${greatest_dec[1]})\\n\") with open(outfile, \"w\") as txt_file: txt_file.write(output)", "next(csvreader) previous_row = int(first_row[1]) net_total = int(first_row[1]) #loop for row", "header = next(csvreader) first_row = next(csvreader) previous_row = int(first_row[1]) net_total", "f\"Total Months: {total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\" f\"Greatest Increase", "= 0; total_change = 0; monthly_changes = []; greatest_inc =", "= int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row = int(row[1]) total_change = total_change", "csv.reader(csvfile, delimiter=',') header = next(csvreader) first_row = next(csvreader) previous_row =", "= ['', 0] #open & read csv with open(csvpath) as", "int(row[1]) change_value = int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row = int(row[1]) total_change", "\\n\" f\"------------------------------\\n\" f\"Total Months: {total_m}\\n\" f\"Total: ${net_total}\\n\" f\"Average Change: ${avg_change:.2f}\\n\"", "months.append(row[0]) previous_row = int(row[1]) total_change = total_change + change_value if", "greatest_dec[1]: greatest_dec[0] = str(row[0]) greatest_dec[1] = change_value avg_change = total_change/len(months)", "= change_value if change_value < greatest_dec[1]: greatest_dec[0] = str(row[0]) greatest_dec[1]", "import csv #input csvpath = os.path.join('Resources', 'budget_data.csv') #output outfile =", "0; monthly_changes = []; greatest_inc = ['', 0]; greatest_dec =", "previous_row = int(row[1]) total_change = total_change + change_value if change_value", "change_value > greatest_inc[1]: greatest_inc[0] = str(row[0]) greatest_inc[1] = change_value if", "change_value = int(current_value-previous_row) monthly_changes.append(change_value) months.append(row[0]) previous_row = int(row[1]) total_change =", "#import modules import os import csv #input csvpath = os.path.join('Resources',", "int(first_row[1]) net_total = int(first_row[1]) #loop for row in csvreader: net_total" ]
[ "enum import Enum from constants.globals import HEALTH_EMOJIS NETWORK_ERROR = '😱", "def get_node_health_warning_message(node_data) -> str: return \"⚠️ ️⚠ ️ ️⚠️ ️", "INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN = \"The network is safe and", "*not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck it's health immediately\\n\" \\", "network_health_status is NetworkHealthStatus.INEFFICIENT: severity = \"🦥\" return f\"Network health is", "and efficient again! ✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str: severity", "- *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class NetworkHealthStatus(Enum):", "health is not optimal: {network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data) -> str:", "NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\" OVERBONDED = \"Overbonded\" OPTIMAL = \"Optimal\"", "severity = \"🤒\" if network_health_status is NetworkHealthStatus.INSECURE: severity = \"💀\"", "NetworkHealthStatus.INEFFICIENT: severity = \"🦥\" return f\"Network health is not optimal:", "️⚠️ ️ ⚠ ️⚠ ⚠️ \\n\" \\ f\"Node is *not", "\"🤒\" if network_health_status is NetworkHealthStatus.INSECURE: severity = \"💀\" elif network_health_status", "is NetworkHealthStatus.INEFFICIENT: severity = \"🦥\" return f\"Network health is not", "optimal: {network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data) -> str: return f\"⚕️Node is", "{node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data) -> str: return \"⚠️ ️⚠", "again! ✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str: severity = \"🤒\"", "\"Optimal\" UNDBERBONDED = \"Underbonded\" INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN = \"The", "\\ \"\\nCheck it's health immediately\\n\" \\ \"⚠️ ️⚠ ️ ️⚠️", "= \"🦥\" return f\"Network health is not optimal: {network_health_status.value} {severity}\"", "if network_health_status is NetworkHealthStatus.INSECURE: severity = \"💀\" elif network_health_status is", "\"\\nCheck it's health immediately\\n\" \\ \"⚠️ ️⚠ ️ ️⚠️ ️", "⚠ ️⚠ ⚠️ \\n\" \\ f\"Node is *not responding*!\\nAddress: {node_data['node_address']}\\nIP:", "safe and efficient again! ✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str:", "{network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data) -> str: return f\"⚕️Node is healthy", "while getting data 😱\\nAn API endpoint is down!' HEALTH_LEGEND =", "def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str: severity = \"🤒\" if network_health_status", "️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️", "network_health_status is NetworkHealthStatus.INSECURE: severity = \"💀\" elif network_health_status is NetworkHealthStatus.INEFFICIENT:", "️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠", "- *unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\" OVERBONDED = \"Overbonded\"", "*healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT", "There was an error while getting data 😱\\nAn API endpoint", "an error while getting data 😱\\nAn API endpoint is down!'", "\"The network is safe and efficient again! ✅\" def get_network_health_warning(network_health_status:", "\"Overbonded\" OPTIMAL = \"Optimal\" UNDBERBONDED = \"Underbonded\" INSECURE = \"Insecure\"", "️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️", "'😱 There was an error while getting data 😱\\nAn API", "health immediately\\n\" \\ \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠", "is *not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck it's health immediately\\n\"", "f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n'", "= \"Overbonded\" OPTIMAL = \"Optimal\" UNDBERBONDED = \"Underbonded\" INSECURE =", "-> str: severity = \"🤒\" if network_health_status is NetworkHealthStatus.INSECURE: severity", "\\ def get_node_health_warning_message(node_data) -> str: return \"⚠️ ️⚠ ️ ️⚠️", "it's health immediately\\n\" \\ \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠", "\\n\" \\ f\"Node is *not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck", "return \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠", "is not optimal: {network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data) -> str: return", "error while getting data 😱\\nAn API endpoint is down!' HEALTH_LEGEND", "= \"💀\" elif network_health_status is NetworkHealthStatus.INEFFICIENT: severity = \"🦥\" return", "✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str: severity = \"🤒\" if", "OVERBONDED = \"Overbonded\" OPTIMAL = \"Optimal\" UNDBERBONDED = \"Underbonded\" INSECURE", "⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠", "getting data 😱\\nAn API endpoint is down!' HEALTH_LEGEND = f'\\n*Node", "️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠", "️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️", "NETWORK_ERROR = '😱 There was an error while getting data", "= \"Inefficient\" OVERBONDED = \"Overbonded\" OPTIMAL = \"Optimal\" UNDBERBONDED =", "️ ️⚠️ ️ ⚠ ️⚠ ⚠️ \\n\" \\ f\"Node is", "\"💀\" elif network_health_status is NetworkHealthStatus.INEFFICIENT: severity = \"🦥\" return f\"Network", "\"Underbonded\" INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN = \"The network is safe", "⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️", "class NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\" OVERBONDED = \"Overbonded\" OPTIMAL =", "def get_node_healthy_again_message(node_data) -> str: return f\"⚕️Node is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP:", "\"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠", "get_node_health_warning_message(node_data) -> str: return \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠", "*unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\"", "⚠️ \\n\" \\ f\"Node is *not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\", "⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ \\n\" \\", "endpoint is down!' HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} -", "healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data) -> str: return", "😱\\nAn API endpoint is down!' HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} -", "efficient again! ✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str: severity =", "API endpoint is down!' HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]}", "is NetworkHealthStatus.INSECURE: severity = \"💀\" elif network_health_status is NetworkHealthStatus.INEFFICIENT: severity", "= '😱 There was an error while getting data 😱\\nAn", "{node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data) -> str: return \"⚠️ ️⚠ ️", "constants.globals import HEALTH_EMOJIS NETWORK_ERROR = '😱 There was an error", "{severity}\" def get_node_healthy_again_message(node_data) -> str: return f\"⚕️Node is healthy again⚕️\\nAddress:", "<gh_stars>10-100 from enum import Enum from constants.globals import HEALTH_EMOJIS NETWORK_ERROR", "HEALTH_EMOJIS NETWORK_ERROR = '😱 There was an error while getting", "*unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\" OVERBONDED = \"Overbonded\" OPTIMAL", "severity = \"💀\" elif network_health_status is NetworkHealthStatus.INEFFICIENT: severity = \"🦥\"", "️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ \\n\" \\ f\"Node", "INEFFICIENT = \"Inefficient\" OVERBONDED = \"Overbonded\" OPTIMAL = \"Optimal\" UNDBERBONDED", "NetworkHealthStatus) -> str: severity = \"🤒\" if network_health_status is NetworkHealthStatus.INSECURE:", "-> str: return \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠", "️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ \\n\"", "not optimal: {network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data) -> str: return f\"⚕️Node", "is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data) -> str:", "str: severity = \"🤒\" if network_health_status is NetworkHealthStatus.INSECURE: severity =", "elif network_health_status is NetworkHealthStatus.INEFFICIENT: severity = \"🦥\" return f\"Network health", "get_node_healthy_again_message(node_data) -> str: return f\"⚕️Node is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\"", "from constants.globals import HEALTH_EMOJIS NETWORK_ERROR = '😱 There was an", "\"Inefficient\" OVERBONDED = \"Overbonded\" OPTIMAL = \"Optimal\" UNDBERBONDED = \"Underbonded\"", "NETWORK_HEALTHY_AGAIN = \"The network is safe and efficient again! ✅\"", "network is safe and efficient again! ✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus)", "immediately\\n\" \\ \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️", "health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class", "️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️\"", "f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\" OVERBONDED =", "\"🦥\" return f\"Network health is not optimal: {network_health_status.value} {severity}\" def", "\\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT = \"Inefficient\" OVERBONDED", "\"Insecure\" NETWORK_HEALTHY_AGAIN = \"The network is safe and efficient again!", "HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]}", "= \"Optimal\" UNDBERBONDED = \"Underbonded\" INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN =", "\\ f\"Node is *not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck it's", "from enum import Enum from constants.globals import HEALTH_EMOJIS NETWORK_ERROR =", "= f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} -", "️⚠ ⚠️ \\n\" \\ f\"Node is *not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\"", "OPTIMAL = \"Optimal\" UNDBERBONDED = \"Underbonded\" INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN", "= \"The network is safe and efficient again! ✅\" def", "again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data) -> str: return \"⚠️", "{node_data['ip_address']}\\n\" \\ \"\\nCheck it's health immediately\\n\" \\ \"⚠️ ️⚠ ️", "severity = \"🦥\" return f\"Network health is not optimal: {network_health_status.value}", "- *unhealthy*\\n' \\ f'{HEALTH_EMOJIS[None]} - *unknown*\\n' class NetworkHealthStatus(Enum): INEFFICIENT =", "{node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck it's health immediately\\n\" \\ \"⚠️ ️⚠", "responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck it's health immediately\\n\" \\ \"⚠️", "= \"🤒\" if network_health_status is NetworkHealthStatus.INSECURE: severity = \"💀\" elif", "is down!' HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n'", "️ ⚠ ️⚠ ⚠️ \\n\" \\ f\"Node is *not responding*!\\nAddress:", "return f\"Network health is not optimal: {network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data)", "= \"Insecure\" NETWORK_HEALTHY_AGAIN = \"The network is safe and efficient", "is safe and efficient again! ✅\" def get_network_health_warning(network_health_status: NetworkHealthStatus) ->", "UNDBERBONDED = \"Underbonded\" INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN = \"The network", "import Enum from constants.globals import HEALTH_EMOJIS NETWORK_ERROR = '😱 There", "NetworkHealthStatus.INSECURE: severity = \"💀\" elif network_health_status is NetworkHealthStatus.INEFFICIENT: severity =", "f\"Network health is not optimal: {network_health_status.value} {severity}\" def get_node_healthy_again_message(node_data) ->", "str: return \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️", "-> str: return f\"⚕️Node is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\", "f\"Node is *not responding*!\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ \"\\nCheck it's health", "\\ \"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠", "return f\"⚕️Node is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data)", "str: return f\"⚕️Node is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def", "down!' HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]} - *healthy*\\n{HEALTH_EMOJIS[False]} - *unhealthy*\\n' \\", "Enum from constants.globals import HEALTH_EMOJIS NETWORK_ERROR = '😱 There was", "f\"⚕️Node is healthy again⚕️\\nAddress: {node_data['node_address']}\\nIP: {node_data['ip_address']}\\n\" \\ def get_node_health_warning_message(node_data) ->", "import HEALTH_EMOJIS NETWORK_ERROR = '😱 There was an error while", "= \"Underbonded\" INSECURE = \"Insecure\" NETWORK_HEALTHY_AGAIN = \"The network is", "data 😱\\nAn API endpoint is down!' HEALTH_LEGEND = f'\\n*Node health*:\\n{HEALTH_EMOJIS[True]}", "️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️", "get_network_health_warning(network_health_status: NetworkHealthStatus) -> str: severity = \"🤒\" if network_health_status is", "was an error while getting data 😱\\nAn API endpoint is" ]
[ "\" \" + \"=\" * 40) print(text) print(\"=\" * 80)", "= \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() # Riceve", "raw_text = input(\"Model prompt >>> \") context_tokens = enc.encode(raw_text) generated", "file is empty! Write something yourself.') raw_text = input(\"Model prompt", "samples longer than window size: %s\" % hparams.n_ctx) # Avvio", "= pytumblr.TumblrRestClient( '', '', '', '' ) # Al fine", "in range(batch_size)] })[:, len(context_tokens):] for i in range(batch_size): generated +=", "ValueError(\"Can't get samples longer than window size: %s\" % hparams.n_ctx)", "fine di mantenere la sicurezza del mio account le due", "range(batch_size)] })[:, len(context_tokens):] for i in range(batch_size): generated += 1", "range(batch_size): generated += 1 text = enc.decode(out[i]) print(\"=\" * 40", "al DB mydb = mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\" )", "per la connessione a Tumblr sono state eliminate da questo", "numpy as np import tensorflow as tf import pytumblr import", "database=\"cometa\" ) print(mydb) cursor = mydb.cursor() # Generazione query print(\"prima", "+ \" \" + \"=\" * 40) print(text) print(\"=\" *", "per verificare che il prompt non sia eccessivamente lungo if", "pytumblr.TumblrRestClient( '', '', '', '' ) # Al fine di", "import mysql.connector import datetime from random import seed import model,", "\" + \"=\" * 40) print(text) print(\"=\" * 80) #", "not raw_text: print('The file is empty! Write something yourself.') raw_text", "continua: raw_text = testoBuono # raw_text = f.read() while not", "= tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) while continua: raw_text = testoBuono", "* 80) # Pubblico il testo generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text,", "nsamples % batch_size == 0 # Carico il modello dalla", "in range(nsamples // batch_size): out = sess.run(output, feed_dict={ context: [context_tokens", "due coppie di chiavi per la connessione a Tumblr sono", "il modello con i parametri with tf.Session(graph=tf.Graph()) as sess: context", "batch_size is None: batch_size = 1 assert nsamples % batch_size", ") print(mydb) cursor = mydb.cursor() # Generazione query print(\"prima di", "# Carico il modello dalla directory enc = encoder.get_encoder(model_name, models_dir)", "Generazione query print(\"prima di eseguire la query\") cursor.execute(\"SELECT testo FROM", "sia eccessivamente lungo if length is None: length = hparams.n_ctx", "def interact_model( model_name='1558M', seed=None, nsamples=1, batch_size=1, length=None, temperature=.7, top_k=10, top_p=1,", "BY RAND() LIMIT 1\") print(\"dopo query\") for (testo) in cursor:", "import numpy as np import tensorflow as tf import pytumblr", "trattiene le informazioni del profilo blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if", ") continua=True # Inizio la generazione del testo saver =", "'', '', '' ) # Al fine di mantenere la", "cursor.execute(\"SELECT testo FROM prompts ORDER BY RAND() LIMIT 1\") print(\"dopo", "da questo file. # Connessione al DB mydb = mysql.connector.connect(", "%s\" % hparams.n_ctx) # Avvio il modello con i parametri", "np import tensorflow as tf import pytumblr import mysql.connector import", "yourself.') raw_text = input(\"Model prompt >>> \") context_tokens = enc.encode(raw_text)", "'yes']: continua=True else: continua=False exit() if __name__ == '__main__': fire.Fire(interact_model())", "'', '', '', '' ) # Al fine di mantenere", "= model.default_hparams() with open(os.path.join(models_dir, model_name, 'hparams.json')) as f: hparams.override_from_dict(json.load(f)) #", "slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n') risposta=input() if risposta.lower() in ['y', 'yes']:", "hparams=hparams, length=length, context=context, batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p ) continua=True #", "account le due coppie di chiavi per la connessione a", "% batch_size == 0 # Carico il modello dalla directory", "DB mydb = mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\" ) print(mydb)", "80) # Pubblico il testo generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text)", "informazioni del profilo blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is", "print('Continue? y/n') risposta=input() if risposta.lower() in ['y', 'yes']: continua=True else:", "e trattiene le informazioni del profilo blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir))", "cursor = mydb.cursor() # Generazione query print(\"prima di eseguire la", "str(generated) + \" \" + \"=\" * 40) print(text) print(\"=\"", "<reponame>RanHerOver/cometaai import random import fire import json import os import", "while continua: raw_text = testoBuono # raw_text = f.read() while", "client.info() # Riceve e trattiene le informazioni del profilo blogName='unlikelycrownkitty'", "del mio account le due coppie di chiavi per la", "del testo saver = tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess,", "random import fire import json import os import numpy as", "la sicurezza del mio account le due coppie di chiavi", "range(nsamples // batch_size): out = sess.run(output, feed_dict={ context: [context_tokens for", "modello dalla directory enc = encoder.get_encoder(model_name, models_dir) hparams = model.default_hparams()", "* 40) print(text) print(\"=\" * 80) # Pubblico il testo", "generazione del testo saver = tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name))", "= 1 assert nsamples % batch_size == 0 # Carico", "mydb = mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\" ) print(mydb) cursor", "\" + str(generated) + \" \" + \"=\" * 40)", "context=context, batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p ) continua=True # Inizio la", "= 0 for _ in range(nsamples // batch_size): out =", "Connessione al DB mydb = mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\"", "than window size: %s\" % hparams.n_ctx) # Avvio il modello", "something yourself.') raw_text = input(\"Model prompt >>> \") context_tokens =", "mysql.connector import datetime from random import seed import model, sample,", "datetime from random import seed import model, sample, encoder def", "print(\"=\" * 80) # Pubblico il testo generato client.create_text(blogName, state=\"published\",", "y/n') risposta=input() if risposta.lower() in ['y', 'yes']: continua=True else: continua=False", "import pytumblr import mysql.connector import datetime from random import seed", "mantenere la sicurezza del mio account le due coppie di", "sicurezza del mio account le due coppie di chiavi per", "i parametri with tf.Session(graph=tf.Graph()) as sess: context = tf.placeholder(tf.int32, [batch_size,", "as tf import pytumblr import mysql.connector import datetime from random", "in ['y', 'yes']: continua=True else: continua=False exit() if __name__ ==", "_ in range(nsamples // batch_size): out = sess.run(output, feed_dict={ context:", "= enc.decode(out[i]) print(\"=\" * 40 + \" SAMPLE \" +", "top_p=top_p ) continua=True # Inizio la generazione del testo saver", "[context_tokens for _ in range(batch_size)] })[:, len(context_tokens):] for i in", "ckpt) while continua: raw_text = testoBuono # raw_text = f.read()", "Avvio il modello con i parametri with tf.Session(graph=tf.Graph()) as sess:", "length = hparams.n_ctx // 2 elif length > hparams.n_ctx: raise", "random import seed import model, sample, encoder def interact_model( model_name='1558M',", "connessione a Tumblr sono state eliminate da questo file. #", "enc.encode(raw_text) generated = 0 for _ in range(nsamples // batch_size):", "print(mydb) cursor = mydb.cursor() # Generazione query print(\"prima di eseguire", "os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is None: batch_size = 1 assert nsamples", "hparams.override_from_dict(json.load(f)) # Eseguo un controllo per verificare che il prompt", "coppie di chiavi per la connessione a Tumblr sono state", "query\") for (testo) in cursor: print(\"{}\".format(testo)) # Formattazione del prompt", "import random import fire import json import os import numpy", "while not raw_text: print('The file is empty! Write something yourself.')", "hparams = model.default_hparams() with open(os.path.join(models_dir, model_name, 'hparams.json')) as f: hparams.override_from_dict(json.load(f))", "top_k=10, top_p=1, models_dir='models', ): # Autenticazione client = pytumblr.TumblrRestClient( '',", "context: [context_tokens for _ in range(batch_size)] })[:, len(context_tokens):] for i", "host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\" ) print(mydb) cursor = mydb.cursor() #", "= testoBuono # raw_text = f.read() while not raw_text: print('The", "# Pubblico il testo generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue?", "from random import seed import model, sample, encoder def interact_model(", "as sess: context = tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed) output", "risposta=input() if risposta.lower() in ['y', 'yes']: continua=True else: continua=False exit()", "model_name, 'hparams.json')) as f: hparams.override_from_dict(json.load(f)) # Eseguo un controllo per", "\") context_tokens = enc.encode(raw_text) generated = 0 for _ in", "in cursor: print(\"{}\".format(testo)) # Formattazione del prompt testoBuono = \"{}\".format(testo)", "= mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\" ) print(mydb) cursor =", "_ in range(batch_size)] })[:, len(context_tokens):] for i in range(batch_size): generated", "a Tumblr sono state eliminate da questo file. # Connessione", "file. # Connessione al DB mydb = mysql.connector.connect( host=\"localhost\", user=\"root\",", "con i parametri with tf.Session(graph=tf.Graph()) as sess: context = tf.placeholder(tf.int32,", "sample.sample_sequence( hparams=hparams, length=length, context=context, batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p ) continua=True", "get samples longer than window size: %s\" % hparams.n_ctx) #", "with open(os.path.join(models_dir, model_name, 'hparams.json')) as f: hparams.override_from_dict(json.load(f)) # Eseguo un", "hparams.n_ctx // 2 elif length > hparams.n_ctx: raise ValueError(\"Can't get", "Autenticazione client = pytumblr.TumblrRestClient( '', '', '', '' ) #", "tensorflow as tf import pytumblr import mysql.connector import datetime from", "* 40 + \" SAMPLE \" + str(generated) + \"", "print(text) print(\"=\" * 80) # Pubblico il testo generato client.create_text(blogName,", "context_tokens = enc.encode(raw_text) generated = 0 for _ in range(nsamples", "= sess.run(output, feed_dict={ context: [context_tokens for _ in range(batch_size)] })[:,", "testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() # Riceve e trattiene", "+= 1 text = enc.decode(out[i]) print(\"=\" * 40 + \"", "prompts ORDER BY RAND() LIMIT 1\") print(\"dopo query\") for (testo)", "# Formattazione del prompt testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\")", "top_p=1, models_dir='models', ): # Autenticazione client = pytumblr.TumblrRestClient( '', '',", "di eseguire la query\") cursor.execute(\"SELECT testo FROM prompts ORDER BY", "Carico il modello dalla directory enc = encoder.get_encoder(model_name, models_dir) hparams", "# Avvio il modello con i parametri with tf.Session(graph=tf.Graph()) as", "temperature=temperature, top_k=top_k, top_p=top_p ) continua=True # Inizio la generazione del", "+ \" SAMPLE \" + str(generated) + \" \" +", "batch_size = 1 assert nsamples % batch_size == 0 #", "import fire import json import os import numpy as np", "models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is None: batch_size = 1", "tf.Session(graph=tf.Graph()) as sess: context = tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed)", "# Inizio la generazione del testo saver = tf.train.Saver() ckpt", "la connessione a Tumblr sono state eliminate da questo file.", "mydb.cursor() # Generazione query print(\"prima di eseguire la query\") cursor.execute(\"SELECT", "encoder def interact_model( model_name='1558M', seed=None, nsamples=1, batch_size=1, length=None, temperature=.7, top_k=10,", "= tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) while continua:", "length is None: length = hparams.n_ctx // 2 elif length", "elif length > hparams.n_ctx: raise ValueError(\"Can't get samples longer than", "saver.restore(sess, ckpt) while continua: raw_text = testoBuono # raw_text =", "len(context_tokens):] for i in range(batch_size): generated += 1 text =", "Inizio la generazione del testo saver = tf.train.Saver() ckpt =", "mio account le due coppie di chiavi per la connessione", "non sia eccessivamente lungo if length is None: length =", "[batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed) output = sample.sample_sequence( hparams=hparams, length=length, context=context,", "context = tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed) output = sample.sample_sequence(", "eseguire la query\") cursor.execute(\"SELECT testo FROM prompts ORDER BY RAND()", "is None: batch_size = 1 assert nsamples % batch_size ==", "LIMIT 1\") print(\"dopo query\") for (testo) in cursor: print(\"{}\".format(testo)) #", "'' ) # Al fine di mantenere la sicurezza del", "la query\") cursor.execute(\"SELECT testo FROM prompts ORDER BY RAND() LIMIT", "feed_dict={ context: [context_tokens for _ in range(batch_size)] })[:, len(context_tokens):] for", "np.random.seed(seed) tf.set_random_seed(seed) output = sample.sample_sequence( hparams=hparams, length=length, context=context, batch_size=batch_size, temperature=temperature,", "model_name)) saver.restore(sess, ckpt) while continua: raw_text = testoBuono # raw_text", "import seed import model, sample, encoder def interact_model( model_name='1558M', seed=None,", "\"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() # Riceve e", "if batch_size is None: batch_size = 1 assert nsamples %", "prompt testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info()", "saver = tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) while", "longer than window size: %s\" % hparams.n_ctx) # Avvio il", "i in range(batch_size): generated += 1 text = enc.decode(out[i]) print(\"=\"", "with tf.Session(graph=tf.Graph()) as sess: context = tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed)", "RAND() LIMIT 1\") print(\"dopo query\") for (testo) in cursor: print(\"{}\".format(testo))", "raw_text = f.read() while not raw_text: print('The file is empty!", "\" SAMPLE \" + str(generated) + \" \" + \"=\"", "query print(\"prima di eseguire la query\") cursor.execute(\"SELECT testo FROM prompts", "sono state eliminate da questo file. # Connessione al DB", "length=length, context=context, batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p ) continua=True # Inizio", "print(\"=\" * 40 + \" SAMPLE \" + str(generated) +", "body=text) print('Continue? y/n') risposta=input() if risposta.lower() in ['y', 'yes']: continua=True", "testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() #", "top_k=top_k, top_p=top_p ) continua=True # Inizio la generazione del testo", "modello con i parametri with tf.Session(graph=tf.Graph()) as sess: context =", "prompt non sia eccessivamente lungo if length is None: length", "for (testo) in cursor: print(\"{}\".format(testo)) # Formattazione del prompt testoBuono", "model_name='1558M', seed=None, nsamples=1, batch_size=1, length=None, temperature=.7, top_k=10, top_p=1, models_dir='models', ):", "model.default_hparams() with open(os.path.join(models_dir, model_name, 'hparams.json')) as f: hparams.override_from_dict(json.load(f)) # Eseguo", "None]) np.random.seed(seed) tf.set_random_seed(seed) output = sample.sample_sequence( hparams=hparams, length=length, context=context, batch_size=batch_size,", "['y', 'yes']: continua=True else: continua=False exit() if __name__ == '__main__':", "mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\", database=\"cometa\" ) print(mydb) cursor = mydb.cursor()", "prompt >>> \") context_tokens = enc.encode(raw_text) generated = 0 for", "lungo if length is None: length = hparams.n_ctx // 2", "= sample.sample_sequence( hparams=hparams, length=length, context=context, batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p )", "for _ in range(batch_size)] })[:, len(context_tokens):] for i in range(batch_size):", "1 assert nsamples % batch_size == 0 # Carico il", "= enc.encode(raw_text) generated = 0 for _ in range(nsamples //", "sess: context = tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed) output =", "seed=None, nsamples=1, batch_size=1, length=None, temperature=.7, top_k=10, top_p=1, models_dir='models', ): #", "as f: hparams.override_from_dict(json.load(f)) # Eseguo un controllo per verificare che", "eccessivamente lungo if length is None: length = hparams.n_ctx //", "is None: length = hparams.n_ctx // 2 elif length >", "raise ValueError(\"Can't get samples longer than window size: %s\" %", "import os import numpy as np import tensorflow as tf", "ORDER BY RAND() LIMIT 1\") print(\"dopo query\") for (testo) in", "del profilo blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is None:", "import model, sample, encoder def interact_model( model_name='1558M', seed=None, nsamples=1, batch_size=1,", "Pubblico il testo generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n')", "tf import pytumblr import mysql.connector import datetime from random import", "os import numpy as np import tensorflow as tf import", "directory enc = encoder.get_encoder(model_name, models_dir) hparams = model.default_hparams() with open(os.path.join(models_dir,", "// batch_size): out = sess.run(output, feed_dict={ context: [context_tokens for _", "# Eseguo un controllo per verificare che il prompt non", "state eliminate da questo file. # Connessione al DB mydb", "'hparams.json')) as f: hparams.override_from_dict(json.load(f)) # Eseguo un controllo per verificare", "print(\"{}\".format(testo)) # Formattazione del prompt testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\")", "1\") print(\"dopo query\") for (testo) in cursor: print(\"{}\".format(testo)) # Formattazione", "le informazioni del profilo blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if batch_size", "batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p ) continua=True # Inizio la generazione", "= f.read() while not raw_text: print('The file is empty! Write", "length=None, temperature=.7, top_k=10, top_p=1, models_dir='models', ): # Autenticazione client =", "dalla directory enc = encoder.get_encoder(model_name, models_dir) hparams = model.default_hparams() with", "Tumblr sono state eliminate da questo file. # Connessione al", "= input(\"Model prompt >>> \") context_tokens = enc.encode(raw_text) generated =", "hparams.n_ctx) # Avvio il modello con i parametri with tf.Session(graph=tf.Graph())", "1 text = enc.decode(out[i]) print(\"=\" * 40 + \" SAMPLE", "password=\"\", database=\"cometa\" ) print(mydb) cursor = mydb.cursor() # Generazione query", "f.read() while not raw_text: print('The file is empty! Write something", "generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n') risposta=input() if risposta.lower()", "eliminate da questo file. # Connessione al DB mydb =", "enc = encoder.get_encoder(model_name, models_dir) hparams = model.default_hparams() with open(os.path.join(models_dir, model_name,", "for _ in range(nsamples // batch_size): out = sess.run(output, feed_dict={", "testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() # Riceve e trattiene le informazioni", "enc.decode(out[i]) print(\"=\" * 40 + \" SAMPLE \" + str(generated)", "blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is None: batch_size =", "length > hparams.n_ctx: raise ValueError(\"Can't get samples longer than window", "json import os import numpy as np import tensorflow as", "FROM prompts ORDER BY RAND() LIMIT 1\") print(\"dopo query\") for", "assert nsamples % batch_size == 0 # Carico il modello", "tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) while continua: raw_text = testoBuono #", "batch_size == 0 # Carico il modello dalla directory enc", "model, sample, encoder def interact_model( model_name='1558M', seed=None, nsamples=1, batch_size=1, length=None,", "# Al fine di mantenere la sicurezza del mio account", "models_dir='models', ): # Autenticazione client = pytumblr.TumblrRestClient( '', '', '',", "size: %s\" % hparams.n_ctx) # Avvio il modello con i", "SAMPLE \" + str(generated) + \" \" + \"=\" *", "= mydb.cursor() # Generazione query print(\"prima di eseguire la query\")", "print(testoBuono) client.info() # Riceve e trattiene le informazioni del profilo", "risposta.lower() in ['y', 'yes']: continua=True else: continua=False exit() if __name__", "di mantenere la sicurezza del mio account le due coppie", "== 0 # Carico il modello dalla directory enc =", "print(\"prima di eseguire la query\") cursor.execute(\"SELECT testo FROM prompts ORDER", "for i in range(batch_size): generated += 1 text = enc.decode(out[i])", "il modello dalla directory enc = encoder.get_encoder(model_name, models_dir) hparams =", "batch_size=1, length=None, temperature=.7, top_k=10, top_p=1, models_dir='models', ): # Autenticazione client", "profilo blogName='unlikelycrownkitty' models_dir = os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is None: batch_size", "= tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed) output = sample.sample_sequence( hparams=hparams,", "tf.set_random_seed(seed) output = sample.sample_sequence( hparams=hparams, length=length, context=context, batch_size=batch_size, temperature=temperature, top_k=top_k,", "40) print(text) print(\"=\" * 80) # Pubblico il testo generato", "client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n') risposta=input() if risposta.lower() in", "import datetime from random import seed import model, sample, encoder", "testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() # Riceve e trattiene le informazioni del", "user=\"root\", password=\"\", database=\"cometa\" ) print(mydb) cursor = mydb.cursor() # Generazione", "window size: %s\" % hparams.n_ctx) # Avvio il modello con", "(testo) in cursor: print(\"{}\".format(testo)) # Formattazione del prompt testoBuono =", "client = pytumblr.TumblrRestClient( '', '', '', '' ) # Al", "import json import os import numpy as np import tensorflow", "models_dir) hparams = model.default_hparams() with open(os.path.join(models_dir, model_name, 'hparams.json')) as f:", ">>> \") context_tokens = enc.encode(raw_text) generated = 0 for _", ") # Al fine di mantenere la sicurezza del mio", "Riceve e trattiene le informazioni del profilo blogName='unlikelycrownkitty' models_dir =", "Write something yourself.') raw_text = input(\"Model prompt >>> \") context_tokens", "out = sess.run(output, feed_dict={ context: [context_tokens for _ in range(batch_size)]", "None: batch_size = 1 assert nsamples % batch_size == 0", "% hparams.n_ctx) # Avvio il modello con i parametri with", "Al fine di mantenere la sicurezza del mio account le", "temperature=.7, top_k=10, top_p=1, models_dir='models', ): # Autenticazione client = pytumblr.TumblrRestClient(", "import tensorflow as tf import pytumblr import mysql.connector import datetime", "# raw_text = f.read() while not raw_text: print('The file is", "'', '' ) # Al fine di mantenere la sicurezza", "2 elif length > hparams.n_ctx: raise ValueError(\"Can't get samples longer", "tf.placeholder(tf.int32, [batch_size, None]) np.random.seed(seed) tf.set_random_seed(seed) output = sample.sample_sequence( hparams=hparams, length=length,", "che il prompt non sia eccessivamente lungo if length is", "chiavi per la connessione a Tumblr sono state eliminate da", "): # Autenticazione client = pytumblr.TumblrRestClient( '', '', '', ''", "text = enc.decode(out[i]) print(\"=\" * 40 + \" SAMPLE \"", "if risposta.lower() in ['y', 'yes']: continua=True else: continua=False exit() if", "testoBuono # raw_text = f.read() while not raw_text: print('The file", "40 + \" SAMPLE \" + str(generated) + \" \"", "nsamples=1, batch_size=1, length=None, temperature=.7, top_k=10, top_p=1, models_dir='models', ): # Autenticazione", "sess.run(output, feed_dict={ context: [context_tokens for _ in range(batch_size)] })[:, len(context_tokens):]", "is empty! Write something yourself.') raw_text = input(\"Model prompt >>>", "continua=True # Inizio la generazione del testo saver = tf.train.Saver()", "sample, encoder def interact_model( model_name='1558M', seed=None, nsamples=1, batch_size=1, length=None, temperature=.7,", "raw_text = testoBuono # raw_text = f.read() while not raw_text:", "+ str(generated) + \" \" + \"=\" * 40) print(text)", "\"=\" * 40) print(text) print(\"=\" * 80) # Pubblico il", "# Generazione query print(\"prima di eseguire la query\") cursor.execute(\"SELECT testo", "testo saver = tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt)", "if length is None: length = hparams.n_ctx // 2 elif", "None: length = hparams.n_ctx // 2 elif length > hparams.n_ctx:", "+ \"=\" * 40) print(text) print(\"=\" * 80) # Pubblico", "generated += 1 text = enc.decode(out[i]) print(\"=\" * 40 +", "cursor: print(\"{}\".format(testo)) # Formattazione del prompt testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\")", "> hparams.n_ctx: raise ValueError(\"Can't get samples longer than window size:", "as np import tensorflow as tf import pytumblr import mysql.connector", "testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono) client.info() # Riceve e trattiene le", "Formattazione del prompt testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\")", "0 for _ in range(nsamples // batch_size): out = sess.run(output,", "output = sample.sample_sequence( hparams=hparams, length=length, context=context, batch_size=batch_size, temperature=temperature, top_k=top_k, top_p=top_p", "query\") cursor.execute(\"SELECT testo FROM prompts ORDER BY RAND() LIMIT 1\")", "Eseguo un controllo per verificare che il prompt non sia", "0 # Carico il modello dalla directory enc = encoder.get_encoder(model_name,", "fire import json import os import numpy as np import", "un controllo per verificare che il prompt non sia eccessivamente", "# Riceve e trattiene le informazioni del profilo blogName='unlikelycrownkitty' models_dir", "# Connessione al DB mydb = mysql.connector.connect( host=\"localhost\", user=\"root\", password=\"\",", "generated = 0 for _ in range(nsamples // batch_size): out", "verificare che il prompt non sia eccessivamente lungo if length", "questo file. # Connessione al DB mydb = mysql.connector.connect( host=\"localhost\",", "raw_text: print('The file is empty! Write something yourself.') raw_text =", "parametri with tf.Session(graph=tf.Graph()) as sess: context = tf.placeholder(tf.int32, [batch_size, None])", "encoder.get_encoder(model_name, models_dir) hparams = model.default_hparams() with open(os.path.join(models_dir, model_name, 'hparams.json')) as", "})[:, len(context_tokens):] for i in range(batch_size): generated += 1 text", "del prompt testoBuono = \"{}\".format(testo) testoBuono=testoBuono.replace(\"(\",\"\") testoBuono=testoBuono.replace(\")\",\"\") testoBuono=testoBuono.replace(\"'\",\"\") testoBuono=testoBuono.replace(\",\",\"\") print(testoBuono)", "open(os.path.join(models_dir, model_name, 'hparams.json')) as f: hparams.override_from_dict(json.load(f)) # Eseguo un controllo", "interact_model( model_name='1558M', seed=None, nsamples=1, batch_size=1, length=None, temperature=.7, top_k=10, top_p=1, models_dir='models',", "il prompt non sia eccessivamente lungo if length is None:", "pytumblr import mysql.connector import datetime from random import seed import", "hparams.n_ctx: raise ValueError(\"Can't get samples longer than window size: %s\"", "= os.path.expanduser(os.path.expandvars(models_dir)) if batch_size is None: batch_size = 1 assert", "di chiavi per la connessione a Tumblr sono state eliminate", "testo generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n') risposta=input() if", "in range(batch_size): generated += 1 text = enc.decode(out[i]) print(\"=\" *", "batch_size): out = sess.run(output, feed_dict={ context: [context_tokens for _ in", "la generazione del testo saver = tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir,", "le due coppie di chiavi per la connessione a Tumblr", "# Autenticazione client = pytumblr.TumblrRestClient( '', '', '', '' )", "input(\"Model prompt >>> \") context_tokens = enc.encode(raw_text) generated = 0", "ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) while continua: raw_text =", "// 2 elif length > hparams.n_ctx: raise ValueError(\"Can't get samples", "state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n') risposta=input() if risposta.lower() in ['y',", "testo FROM prompts ORDER BY RAND() LIMIT 1\") print(\"dopo query\")", "seed import model, sample, encoder def interact_model( model_name='1558M', seed=None, nsamples=1,", "f: hparams.override_from_dict(json.load(f)) # Eseguo un controllo per verificare che il", "print(\"dopo query\") for (testo) in cursor: print(\"{}\".format(testo)) # Formattazione del", "print('The file is empty! Write something yourself.') raw_text = input(\"Model", "il testo generato client.create_text(blogName, state=\"published\", slug=\"testing-text-posts\",title=raw_text, body=text) print('Continue? y/n') risposta=input()", "tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) while continua: raw_text", "= hparams.n_ctx // 2 elif length > hparams.n_ctx: raise ValueError(\"Can't", "controllo per verificare che il prompt non sia eccessivamente lungo", "empty! Write something yourself.') raw_text = input(\"Model prompt >>> \")", "= encoder.get_encoder(model_name, models_dir) hparams = model.default_hparams() with open(os.path.join(models_dir, model_name, 'hparams.json'))" ]
[ "version_info[0] <= 2 and version_info[1] <= 4: def all(iterable): for", "and version_info[1] <= 4: def all(iterable): for element in iterable:", "if version_info[0] <= 2 and version_info[1] <= 4: def all(iterable):", "def all(iterable): for element in iterable: if not element: return", "in iterable: if not element: return False return True else:", "2 and version_info[1] <= 4: def all(iterable): for element in", "import version_info if version_info[0] <= 2 and version_info[1] <= 4:", "<= 4: def all(iterable): for element in iterable: if not", "version_info[1] <= 4: def all(iterable): for element in iterable: if", "not element: return False return True else: all = all", "all(iterable): for element in iterable: if not element: return False", "<= 2 and version_info[1] <= 4: def all(iterable): for element", "if not element: return False return True else: all =", "element in iterable: if not element: return False return True", "from sys import version_info if version_info[0] <= 2 and version_info[1]", "version_info if version_info[0] <= 2 and version_info[1] <= 4: def", "<filename>desktop/core/ext-py/pyasn1-0.1.8/pyasn1/compat/iterfunc.py from sys import version_info if version_info[0] <= 2 and", "for element in iterable: if not element: return False return", "iterable: if not element: return False return True else: all", "sys import version_info if version_info[0] <= 2 and version_info[1] <=", "4: def all(iterable): for element in iterable: if not element:" ]
[ "Meta: model = CarouselItemLink fields = '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass):", "carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return", "CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None # pragma: no cover class", "class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel = CarouselForeignKey() def to_representation(self, instance): data", "cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers import MediaSerializer from .", "CarouselItemForeignKey() class Meta: model = CarouselItemLocalization fields = '__all__' read_only_fields", "carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return", "'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance): data = super().to_representation(instance) data['value']", "class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model =", "= '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self,", "CarouselItemLinkForeignKey() class Meta: model = CarouselItemLinkLocalization fields = '__all__' read_only_fields", "request = self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return", "= '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item", "item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None # pragma:", "self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None", "model = CarouselItem fields = '__all__' read_only_fields = ('created_by', 'modified_by')", "if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id =", "CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance): data = super().to_representation(instance) data['value'] = instance.pk", "from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers import MediaSerializer from", "return Carousel.objects.filter(pk=carousel_id) return None # pragma: no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField):", "= CarouselItemForeignKey() class Meta: model = CarouselItemLink fields = '__all__'", "fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass):", "fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass):", "instance.name return data class Meta: model = Carousel fields =", "import serializers from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers import", "= ('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel = CarouselForeignKey() def", "CarouselItemLink fields = '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey()", "return None # pragma: no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self):", "class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if request:", "Meta: model = CarouselItemLocalization fields = '__all__' read_only_fields = ('created_by',", "data['image'] = image.data return data class Meta: model = CarouselItem", "return None # pragma: no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self):", "cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if", "CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if request: carousel_id", "CarouselForeignKey() def to_representation(self, instance): data = super().to_representation(instance) image = MediaSerializer(instance.image)", "class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if request:", "CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if request: carousel_id", "def to_representation(self, instance): data = super().to_representation(instance) data['value'] = instance.pk data['text']", "if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return None #", "data class Meta: model = CarouselItem fields = '__all__' read_only_fields", "no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None)", "= super().to_representation(instance) data['value'] = instance.pk data['text'] = instance.name return data", "CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request',", "serializers from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers import MediaSerializer", "no cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta: model = Carousel", "class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta: model = Carousel fields =", "= image.data return data class Meta: model = CarouselItem fields", "'__all__' read_only_fields = ('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance):", "= CarouselItem fields = '__all__' read_only_fields = ('created_by', 'modified_by') class", "= CarouselItemLink fields = '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link =", "read_only_fields = ('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel = CarouselForeignKey()", "return None # pragma: no cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class", "None # pragma: no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request", "carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None # pragma: no cover class CarouselSerializer(UniCMSCreateUpdateSerializer,", "UniCMSCreateUpdateSerializer from cms.medias.serializers import MediaSerializer from . models import Carousel,", "CarouselItemLinkLocalization fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer):", "request = self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id", "data = super().to_representation(instance) data['value'] = instance.pk data['text'] = instance.name return", "MediaSerializer from . models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization", "if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id,", "= super().to_representation(instance) image = MediaSerializer(instance.image) data['image'] = image.data return data", "class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model =", "'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel = CarouselForeignKey() def to_representation(self, instance):", "read_only_fields = ('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey()", "CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model = CarouselItemLocalization", "class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance): data = super().to_representation(instance) data['value'] =", "= self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None # pragma:", "class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey() class Meta: model =", "'__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey() class Meta: model", "'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model", "instance): data = super().to_representation(instance) image = MediaSerializer(instance.image) data['image'] = image.data", "from rest_framework import serializers from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from", "= self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id)", "= Carousel fields = '__all__' read_only_fields = ('created_by', 'modified_by') class", "carousel_item = CarouselItemForeignKey() class Meta: model = CarouselItemLink fields =", "CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if request: carousel_id", "data['text'] = instance.name return data class Meta: model = Carousel", "self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']", "carousel_item = CarouselItemForeignKey() class Meta: model = CarouselItemLocalization fields =", "CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey() class Meta: model = CarouselItemLinkLocalization", "CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel = CarouselForeignKey() def to_representation(self, instance): data =", "item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id)", "= CarouselItemLocalization fields = '__all__' read_only_fields = ('created_by', 'modified_by') class", "None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return None", "self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None # pragma: no cover", "Carousel fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer,", "= '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel", "data = super().to_representation(instance) image = MediaSerializer(instance.image) data['image'] = image.data return", "None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return", "rest_framework import serializers from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers", "cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if", "'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model", "= '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey() class Meta:", "self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return None # pragma: no cover class", "Meta: model = Carousel fields = '__all__' read_only_fields = ('created_by',", "= self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None", "instance.pk data['text'] = instance.name return data class Meta: model =", "super().to_representation(instance) data['value'] = instance.pk data['text'] = instance.name return data class", "get_queryset(self): request = self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']", "pragma: no cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta: model =", "import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self):", "None # pragma: no cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta:", "= CarouselItemLinkLocalization fields = '__all__' read_only_fields = ('created_by', 'modified_by') class", "class Meta: model = CarouselItem fields = '__all__' read_only_fields =", "('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta:", "cms.medias.serializers import MediaSerializer from . models import Carousel, CarouselItem, CarouselItemLink,", "cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta: model = Carousel fields", "class Meta: model = CarouselItemLocalization fields = '__all__' read_only_fields =", "UniCMSContentTypeClass): class Meta: model = Carousel fields = '__all__' read_only_fields", "instance): data = super().to_representation(instance) data['value'] = instance.pk data['text'] = instance.name", "model = Carousel fields = '__all__' read_only_fields = ('created_by', 'modified_by')", "= self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return None # pragma: no cover", "= '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item", "fields = '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey() class", "models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def", "self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return", "= self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None # pragma: no", "return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None # pragma: no cover class", "UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model = CarouselItemLocalization fields", "carousel_item_link = CarouselItemLinkForeignKey() class Meta: model = CarouselItemLinkLocalization fields =", "carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return None # pragma: no", "return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None # pragma: no cover", "Carousel.objects.filter(pk=carousel_id) return None # pragma: no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def", "from cms.medias.serializers import MediaSerializer from . models import Carousel, CarouselItem,", "UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model = CarouselItemLink fields", "class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if request:", "None # pragma: no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request", "CarouselItem fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer,", "request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id)", "class Meta: model = Carousel fields = '__all__' read_only_fields =", "request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] return Carousel.objects.filter(pk=carousel_id) return None # pragma:", "# pragma: no cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta: model", "def get_queryset(self): request = self.context.get('request', None) if request: carousel_id =", "read_only_fields = ('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey()", "request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id']", "fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass):", "model = CarouselItemLink fields = '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item_link", "carousel__pk=carousel_id) return None # pragma: no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def", "MediaSerializer(instance.image) data['image'] = image.data return data class Meta: model =", "= self.context.get('request', None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id =", "to_representation(self, instance): data = super().to_representation(instance) image = MediaSerializer(instance.image) data['image'] =", "UniCMSContentTypeClass): carousel_item_link = CarouselItemLinkForeignKey() class Meta: model = CarouselItemLinkLocalization fields", "= ('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class", "# pragma: no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request =", "def to_representation(self, instance): data = super().to_representation(instance) image = MediaSerializer(instance.image) data['image']", "no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None)", "= self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return", "('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel = CarouselForeignKey() def to_representation(self,", "pragma: no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request',", "class Meta: model = CarouselItemLinkLocalization fields = '__all__' read_only_fields =", "self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id,", "data['value'] = instance.pk data['text'] = instance.name return data class Meta:", "import MediaSerializer from . models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization,", "self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None # pragma: no", ". models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField):", "('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta:", "Meta: model = CarouselItemLinkLocalization fields = '__all__' read_only_fields = ('created_by',", "read_only_fields = ('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance): data", "Meta: model = CarouselItem fields = '__all__' read_only_fields = ('created_by',", "model = CarouselItemLinkLocalization fields = '__all__' read_only_fields = ('created_by', 'modified_by')", "carousel = CarouselForeignKey() def to_representation(self, instance): data = super().to_representation(instance) image", "= CarouselItemLinkForeignKey() class Meta: model = CarouselItemLinkLocalization fields = '__all__'", "CarouselItemForeignKey() class Meta: model = CarouselItemLink fields = '__all__' class", "CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None # pragma: no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField):", "model = CarouselItemLocalization fields = '__all__' read_only_fields = ('created_by', 'modified_by')", "CarouselItemLocalization fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer,", "= instance.pk data['text'] = instance.name return data class Meta: model", "# pragma: no cover class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request =", "'__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item =", "image.data return data class Meta: model = CarouselItem fields =", "CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None) if", "class Meta: model = CarouselItemLink fields = '__all__' class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer,", "super().to_representation(instance) image = MediaSerializer(instance.image) data['image'] = image.data return data class", "image = MediaSerializer(instance.image) data['image'] = image.data return data class Meta:", "CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request =", "to_representation(self, instance): data = super().to_representation(instance) data['value'] = instance.pk data['text'] =", "from . models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class", "('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance): data = super().to_representation(instance)", "fields = '__all__' read_only_fields = ('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def", "link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id, carousel_item__pk=item_id, carousel_item__carousel__pk=carousel_id) return None #", "CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): class Meta: model = Carousel fields = '__all__'", "= ('created_by', 'modified_by') class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class", "Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request", "= MediaSerializer(instance.image) data['image'] = image.data return data class Meta: model", "CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item = CarouselItemForeignKey() class Meta: model = CarouselItemLink", "'__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel_item =", "return data class Meta: model = Carousel fields = ()", "self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] return CarouselItem.objects.filter(pk=item_id, carousel__pk=carousel_id) return None #", "= CarouselItemForeignKey() class Meta: model = CarouselItemLocalization fields = '__all__'", "pragma: no cover class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request',", "= self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id'] return CarouselItemLink.objects.filter(pk=link_id,", "None) if request: carousel_id = self.context['request'].parser_context['kwargs']['carousel_id'] item_id = self.context['request'].parser_context['kwargs']['carousel_item_id'] link_id", "carousel_item__carousel__pk=carousel_id) return None # pragma: no cover class CarouselSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass):", "return data class Meta: model = CarouselItem fields = '__all__'", "UniCMSContentTypeClass): carousel = CarouselForeignKey() def to_representation(self, instance): data = super().to_representation(instance)", "UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers import MediaSerializer from . models import", "= CarouselForeignKey() def to_representation(self, instance): data = super().to_representation(instance) image =", "CarouselItemLinkLocalization, CarouselItemLocalization class CarouselForeignKey(serializers.PrimaryKeyRelatedField): def get_queryset(self): request = self.context.get('request', None)", "= ('created_by', 'modified_by') class CarouselSelectOptionsSerializer(serializers.ModelSerializer): def to_representation(self, instance): data =", "'__all__' read_only_fields = ('created_by', 'modified_by') class CarouselItemSerializer(UniCMSCreateUpdateSerializer, UniCMSContentTypeClass): carousel =", "import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer from cms.medias.serializers import MediaSerializer from . models", "= instance.name return data class Meta: model = Carousel fields" ]
[ "Corporation Licensed under the Apache License, Version 2.0 (the \"License\");", "log.info('\\tbuild: {}'.format(get_version())) core = Core() log.info('Reading model {}'.format(args.model)) model =", "= cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb", "(c) 2018-2021 Intel Corporation Licensed under the Apache License, Version", "\"\"\" from openvino.runtime import Core, get_version import cv2 as cv", "= parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the script.')", "log from time import perf_counter import sys from argparse import", "'a folder of images, video file or camera id.') in_args.add_argument('--loop',", "* 2 - graph_size[1], graph_size) metrics = PerformanceMetrics() video_writer =", "RuntimeError(\"Can't open video writer\") start_time = perf_counter() original_frame = cap.read()", "open video writer\") start_time = perf_counter() original_frame = cap.read() if", "2, cv.LINE_AA) ir_image = [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image =", "'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)", "numpy as np import logging as log from time import", "cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0] inputs[input_tensor_name] =", "original_frame is not None: (h_orig, w_orig) = original_frame.shape[:2] if original_frame.shape[2]", "from openvino.runtime import Core, get_version import cv2 as cv import", "_, _, h_in, w_in = input_shape frames_processed = 0 imshow_size", "original_image = cv.resize(original_frame, imshow_size) grayscale_image = cv.resize(frame, imshow_size) colorize_image =", "res = next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res) out = update_res.transpose((1, 2,", "update_res = np.squeeze(res) out = update_res.transpose((1, 2, 0)) out =", "(args.output_limit <= 0 or frames_processed <= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if", "Apache License, Version 2.0 (the \"License\"); you may not use", "type=int, help='Optional. Number of frames to store in output. '", "1, \"Expected model input shape with 1 channel\" inputs =", "2, 0)) out = cv.resize(out, (w_orig, h_orig)) img_lab_out = np.concatenate((img_lab[:,", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "action='help', default=SUPPRESS, help='Help with the script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path", "\"--input\", required=True, help='Required. An input to process. The input must", "for input in model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape) assert len(model.outputs) ==", "cv.VideoWriter() if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] *", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "ir_image = [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image = cv.vconcat(ir_image) metrics.update(start_time,", "of frames to store in output. ' 'If 0 is", "if key in {ord(\"q\"), ord(\"Q\"), 27}: break presenter.handleKey(key) start_time =", "cv.waitKey(1) if key in {ord(\"q\"), ord(\"Q\"), 27}: break presenter.handleKey(key) start_time", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "model = core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l' input_shape = model.input(input_tensor_name).shape", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "= cv.putText(lab_image, 'LAB interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0,", "reading the input in a loop.') in_args.add_argument('-o', '--output', required=False, help='Optional.", "or camera id.') in_args.add_argument('--loop', default=False, action='store_true', help='Optional. Enable reading the", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "for rep in presenter.reportMeans(): log.info(rep) if __name__ == \"__main__\": args", "ANY KIND, either express or implied. See the License for", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "args.device)) _, _, h_in, w_in = input_shape frames_processed = 0", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout) def build_arg(): parser =", "time import perf_counter import sys from argparse import ArgumentParser, SUPPRESS", "model input shape with 1 channel\" inputs = {} for", "python3 \"\"\" Copyright (c) 2018-2021 Intel Corporation Licensed under the", "open_images_capture(args.input, args.loop) log.info('OpenVINO Inference Engine') log.info('\\tbuild: {}'.format(get_version())) core = Core()", "SUPPRESS from pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] /", "frames_processed = 0 imshow_size = (640, 480) graph_size = (imshow_size[0]", "under the License is distributed on an \"AS IS\" BASIS,", "= ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help", "- graph_size[1], graph_size) metrics = PerformanceMetrics() video_writer = cv.VideoWriter() if", "0, 255), 2, cv.LINE_AA) ir_image = [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])]", "= Core() log.info('Reading model {}'.format(args.model)) model = core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name", "args.loop) log.info('OpenVINO Inference Engine') log.info('\\tbuild: {}'.format(get_version())) core = Core() log.info('Reading", "video_writer = cv.VideoWriter() if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(),", "4) presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size)", "help='Help with the script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path to .xml", "as np import logging as log from time import perf_counter", "help=\"Optional. List of monitors to show initially.\") return parser def", "{}'.format(get_version())) core = Core() log.info('Reading model {}'.format(args.model)) model = core.read_model(args.model,", "50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) grayscale_image =", "2, cv.LINE_AA) grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,", "metrics = PerformanceMetrics() video_writer = cv.VideoWriter() if args.output and not", "this file except in compliance with the License. You may", "store in output. ' 'If 0 is set, all frames", "h_in))[:, :, 0] inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1]) res =", "imshow_size[1] // 4) presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 -", "file(s) to save.') in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int, help='Optional. Number", "import perf_counter import sys from argparse import ArgumentParser, SUPPRESS from", "number of outputs is equal 1\" compiled_model = core.compile_model(model, device_name=args.device)", "perf_counter() original_frame = cap.read() metrics.log_total() for rep in presenter.reportMeans(): log.info(rep)", "help='Required. An input to process. The input must be a", "governing permissions and limitations under the License. \"\"\" from openvino.runtime", "_, h_in, w_in = input_shape frames_processed = 0 imshow_size =", "single image, ' 'a folder of images, video file or", "language governing permissions and limitations under the License. \"\"\" from", "core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l' input_shape = model.input(input_tensor_name).shape assert input_shape[1]", "img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1) original_image = cv.resize(original_frame, imshow_size)", "(25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) lab_image", "or frames_processed <= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if not args.no_show: cv.imshow('Colorization", "= perf_counter() original_frame = cap.read() metrics.log_total() for rep in presenter.reportMeans():", "(imshow_size[0] // 2, imshow_size[1] // 4) presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1]", "np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1) original_image = cv.resize(original_frame, imshow_size) grayscale_image =", "show initially.\") return parser def main(args): cap = open_images_capture(args.input, args.loop)", "return parser def main(args): cap = open_images_capture(args.input, args.loop) log.info('OpenVINO Inference", "file except in compliance with the License. You may obtain", "* 255).astype(np.uint8) lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image = cv.putText(original_image, 'Original',", "original_frame.shape[:2] if original_frame.shape[2] > 1: frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB)", "in_args.add_argument('--loop', default=False, action='store_true', help='Optional. Enable reading the input in a", "(0, 0, 255), 2, cv.LINE_AA) ir_image = [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image,", "img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :,", "{}'.format(args.model)) model = core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l' input_shape =", "cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)): raise RuntimeError(\"Can't open", "cv.LINE_AA) lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,", "RuntimeError(\"Can't read an image from the input\") while original_frame is", "cv.LINE_AA) grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0,", "inputs = {} for input in model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape)", "start_time = perf_counter() original_frame = cap.read() metrics.log_total() for rep in", "OR CONDITIONS OF ANY KIND, either express or implied. See", "perf_counter() original_frame = cap.read() if original_frame is None: raise RuntimeError(\"Can't", "grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image = cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed +=", "2018-2021 Intel Corporation Licensed under the Apache License, Version 2.0", "= model.input(input_tensor_name).shape assert input_shape[1] == 1, \"Expected model input shape", "break presenter.handleKey(key) start_time = perf_counter() original_frame = cap.read() metrics.log_total() for", "out), axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1) original_image =", "\"\"\" Copyright (c) 2018-2021 Intel Corporation Licensed under the Apache", "0, 1) original_image = cv.resize(original_frame, imshow_size) grayscale_image = cv.resize(frame, imshow_size)", "video file or camera id.') in_args.add_argument('--loop', default=False, action='store_true', help='Optional. Enable", "not args.no_show: cv.imshow('Colorization Demo', final_image) key = cv.waitKey(1) if key", "metrics.log_total() for rep in presenter.reportMeans(): log.info(rep) if __name__ == \"__main__\":", "under the Apache License, Version 2.0 (the \"License\"); you may", "argparse import ArgumentParser, SUPPRESS from pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2] /", "\"--utilization_monitors\", default=\"\", type=str, help=\"Optional. List of monitors to show initially.\")", "of images, video file or camera id.') in_args.add_argument('--loop', default=False, action='store_true',", "255), 2, cv.LINE_AA) ir_image = [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image", "default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\", required=True, help='Required. An input to process.", "cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)): raise RuntimeError(\"Can't", "openvino.runtime import Core, get_version import cv2 as cv import numpy", "monitors to show initially.\") return parser def main(args): cap =", "np.squeeze(res) out = update_res.transpose((1, 2, 0)) out = cv.resize(out, (w_orig,", "video writer\") start_time = perf_counter() original_frame = cap.read() if original_frame", "\"--device\", help=\"Optional. Specify target device for infer: CPU, GPU, HDDL", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "See the License for the specific language governing permissions and", "(imshow_size[0] * 2, imshow_size[1] * 2)): raise RuntimeError(\"Can't open video", "input_shape = model.input(input_tensor_name).shape assert input_shape[1] == 1, \"Expected model input", "the script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path to .xml file with", "cv.putText(lab_image, 'LAB interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255),", "interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)", "presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size) metrics", "read an image from the input\") while original_frame is not", "not None: (h_orig, w_orig) = original_frame.shape[:2] if original_frame.shape[2] > 1:", "> 1: frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame =", "(25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) ir_image", "sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import monitors from images_capture", "in writing, software distributed under the License is distributed on", ".xml file with pre-trained model.\", required=True, type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional.", "required by applicable law or agreed to in writing, software", "= cv.resize(original_frame, imshow_size) grayscale_image = cv.resize(frame, imshow_size) colorize_image = (cv.resize(img_bgr_out,", "args.no_show: cv.imshow('Colorization Demo', final_image) key = cv.waitKey(1) if key in", "img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2) img_bgr_out", "= np.squeeze(res) out = update_res.transpose((1, 2, 0)) out = cv.resize(out,", "imshow_size) grayscale_image = cv.resize(frame, imshow_size) colorize_image = (cv.resize(img_bgr_out, imshow_size) *", "be a single image, ' 'a folder of images, video", "not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)):", "np.zeros(input.shape) assert len(model.outputs) == 1, \"Expected number of outputs is", "and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] * 2, imshow_size[1] *", "from pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo'))", "args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] * 2, imshow_size[1]", "args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l' input_shape = model.input(input_tensor_name).shape assert input_shape[1] ==", "'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)", "input_shape[1] == 1, \"Expected model input shape with 1 channel\"", "255).astype(np.uint8) lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image = cv.putText(original_image, 'Original', (25,", "= np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2) img_bgr_out =", "= {} for input in model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape) assert", "import cv2 as cv import numpy as np import logging", "an image from the input\") while original_frame is not None:", "== 1, \"Expected model input shape with 1 channel\" inputs", "open_images_capture from model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG,", "out = update_res.transpose((1, 2, 0)) out = cv.resize(out, (w_orig, h_orig))", "main(args): cap = open_images_capture(args.input, args.loop) log.info('OpenVINO Inference Engine') log.info('\\tbuild: {}'.format(get_version()))", "+= 1 if video_writer.isOpened() and (args.output_limit <= 0 or frames_processed", "1\" compiled_model = core.compile_model(model, device_name=args.device) infer_request = compiled_model.create_infer_request() log.info('The model", "= (imshow_size[0] // 2, imshow_size[1] // 4) presenter = monitors.Presenter(args.utilization_monitors,", "Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import monitors from", "img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0] inputs[input_tensor_name] = np.expand_dims(img_l_rs,", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "def build_arg(): parser = ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options') in_args.add_argument('-h', '--help',", "image from the input\") while original_frame is not None: (h_orig,", "'LAB interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2,", "in a loop.') in_args.add_argument('-o', '--output', required=False, help='Optional. Name of the", "CONDITIONS OF ANY KIND, either express or implied. See the", "frames to store in output. ' 'If 0 is set,", "cap.read() metrics.log_total() for rep in presenter.reportMeans(): log.info(rep) if __name__ ==", "cv.LINE_AA) colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0,", "Version 2.0 (the \"License\"); you may not use this file", "target device for infer: CPU, GPU, HDDL or MYRIAD. \"", "input in a loop.') in_args.add_argument('-o', '--output', required=False, help='Optional. Name of", "help=\"Optional. Don't show output.\", action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str,", "'common/python/openvino/model_zoo')) import monitors from images_capture import open_images_capture from model_api.performance_metrics import", "255), 2, cv.LINE_AA) colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX,", "in model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape) assert len(model.outputs) == 1, \"Expected", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "output. ' 'If 0 is set, all frames are stored.')", "build_arg(): parser = ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help',", "to save.') in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int, help='Optional. Number of", "help='Optional. Enable reading the input in a loop.') in_args.add_argument('-o', '--output',", "you may not use this file except in compliance with", "imshow_size) * 255).astype(np.uint8) lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image = cv.putText(original_image,", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "imshow_size) colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8) lab_image = cv.resize(img_lab_out,", "Enable reading the input in a loop.') in_args.add_argument('-o', '--output', required=False,", "channel\" inputs = {} for input in model.inputs: inputs[input.get_any_name()] =", "use this file except in compliance with the License. You", "loaded to {}'.format(args.model, args.device)) _, _, h_in, w_in = input_shape", "video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)): raise", "colorize_image])] final_image = cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed += 1 if", "1, (0, 0, 255), 2, cv.LINE_AA) grayscale_image = cv.putText(grayscale_image, 'Grayscale',", "original_image = cv.putText(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0,", "metrics.update(start_time, final_image) frames_processed += 1 if video_writer.isOpened() and (args.output_limit <=", "equal 1\" compiled_model = core.compile_model(model, device_name=args.device) infer_request = compiled_model.create_infer_request() log.info('The", "] %(message)s', level=log.DEBUG, stream=sys.stdout) def build_arg(): parser = ArgumentParser(add_help=False) in_args", "type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify target device for infer: CPU,", "'If 0 is set, all frames are stored.') in_args.add_argument(\"--no_show\", help=\"Optional.", "colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0,", "= np.zeros(input.shape) assert len(model.outputs) == 1, \"Expected number of outputs", "1: frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame = cv.cvtColor(original_frame,", "Demo', final_image) key = cv.waitKey(1) if key in {ord(\"q\"), ord(\"Q\"),", "Inference Engine') log.info('\\tbuild: {}'.format(get_version())) core = Core() log.info('Reading model {}'.format(args.model))", "(0, 0, 255), 2, cv.LINE_AA) colorize_image = cv.putText(colorize_image, 'Colorize', (25,", "output file(s) to save.') in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int, help='Optional.", "original_frame = cap.read() if original_frame is None: raise RuntimeError(\"Can't read", "{}'.format(args.model, args.device)) _, _, h_in, w_in = input_shape frames_processed =", "import numpy as np import logging as log from time", "'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import monitors from images_capture import open_images_capture", "cv2 as cv import numpy as np import logging as", "' 'If 0 is set, all frames are stored.') in_args.add_argument(\"--no_show\",", "(the \"License\"); you may not use this file except in", "graph_size[1], graph_size) metrics = PerformanceMetrics() video_writer = cv.VideoWriter() if args.output", "is loaded to {}'.format(args.model, args.device)) _, _, h_in, w_in =", "0 imshow_size = (640, 480) graph_size = (imshow_size[0] // 2,", "Core, get_version import cv2 as cv import numpy as np", "'--help', action='help', default=SUPPRESS, help='Help with the script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required.", "raise RuntimeError(\"Can't read an image from the input\") while original_frame", "frames_processed <= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if not args.no_show: cv.imshow('Colorization Demo',", "video_writer.write(final_image) presenter.drawGraphs(final_image) if not args.no_show: cv.imshow('Colorization Demo', final_image) key =", "* 2)): raise RuntimeError(\"Can't open video writer\") start_time = perf_counter()", "2, cv.LINE_AA) lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX,", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "(25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) grayscale_image", "input\") while original_frame is not None: (h_orig, w_orig) = original_frame.shape[:2]", "as log from time import perf_counter import sys from argparse", "'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)", "Path to .xml file with pre-trained model.\", required=True, type=Path) in_args.add_argument(\"-d\",", "the Apache License, Version 2.0 (the \"License\"); you may not", "or implied. See the License for the specific language governing", "of the output file(s) to save.') in_args.add_argument('-limit', '--output_limit', required=False, default=1000,", "cv.resize(out, (w_orig, h_orig)) img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis],", "KIND, either express or implied. See the License for the", "in presenter.reportMeans(): log.info(rep) if __name__ == \"__main__\": args = build_arg().parse_args()", "np import logging as log from time import perf_counter import", "to in writing, software distributed under the License is distributed", "required=True, help='Required. An input to process. The input must be", "parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the script.') in_args.add_argument(\"-m\",", "cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32) / 255 img_lab = cv.cvtColor(img_rgb,", "1) original_image = cv.resize(original_frame, imshow_size) grayscale_image = cv.resize(frame, imshow_size) colorize_image", "law or agreed to in writing, software distributed under the", "process. The input must be a single image, ' 'a", ":, 0] inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1]) res = next(iter(infer_request.infer(inputs).values()))", "cv.hconcat([lab_image, colorize_image])] final_image = cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed += 1", "action='store_true', help='Optional. Enable reading the input in a loop.') in_args.add_argument('-o',", "cv.putText(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2,", "pre-trained model.\", required=True, type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify target device", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "= cv.putText(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255),", "sys from argparse import ArgumentParser, SUPPRESS from pathlib import Path", "1, (0, 0, 255), 2, cv.LINE_AA) colorize_image = cv.putText(colorize_image, 'Colorize',", "sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import monitors from images_capture import open_images_capture from", "compiled_model = core.compile_model(model, device_name=args.device) infer_request = compiled_model.create_infer_request() log.info('The model {}", "h_orig)) img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2)", "stream=sys.stdout) def build_arg(): parser = ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options') in_args.add_argument('-h',", "cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32) / 255 img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab)", "(cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8) lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image =", "(w_orig, h_orig)) img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out),", "ord(\"Q\"), 27}: break presenter.handleKey(key) start_time = perf_counter() original_frame = cap.read()", "= update_res.transpose((1, 2, 0)) out = cv.resize(out, (w_orig, h_orig)) img_lab_out", "the License. \"\"\" from openvino.runtime import Core, get_version import cv2", "cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0] inputs[input_tensor_name]", "or MYRIAD. \" \"Default: CPU\", default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\", required=True,", "import sys from argparse import ArgumentParser, SUPPRESS from pathlib import", "for the specific language governing permissions and limitations under the", "def main(args): cap = open_images_capture(args.input, args.loop) log.info('OpenVINO Inference Engine') log.info('\\tbuild:", "in_args.add_argument('-i', \"--input\", required=True, help='Required. An input to process. The input", "= original_frame.shape[:2] if original_frame.shape[2] > 1: frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY),", "2, cv.LINE_AA) colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,", "action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str, help=\"Optional. List of monitors", "frame.astype(np.float32) / 255 img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(),", "the License for the specific language governing permissions and limitations", "may not use this file except in compliance with the", "implied. See the License for the specific language governing permissions", "#!/usr/bin/env python3 \"\"\" Copyright (c) 2018-2021 Intel Corporation Licensed under", "model {} is loaded to {}'.format(args.model, args.device)) _, _, h_in,", "2, imshow_size[1] * 2)): raise RuntimeError(\"Can't open video writer\") start_time", "frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB)", "'--output_limit', required=False, default=1000, type=int, help='Optional. Number of frames to store", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "is None: raise RuntimeError(\"Can't read an image from the input\")", "key in {ord(\"q\"), ord(\"Q\"), 27}: break presenter.handleKey(key) start_time = perf_counter()", "the output file(s) to save.') in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int,", "1, (0, 0, 255), 2, cv.LINE_AA) lab_image = cv.putText(lab_image, 'LAB", "is equal 1\" compiled_model = core.compile_model(model, device_name=args.device) infer_request = compiled_model.create_infer_request()", "imshow_size[1] * 2)): raise RuntimeError(\"Can't open video writer\") start_time =", "grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0,", "cv.COLOR_Lab2BGR), 0, 1) original_image = cv.resize(original_frame, imshow_size) grayscale_image = cv.resize(frame,", "np.newaxis], out), axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1) original_image", "import PerformanceMetrics log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout) def build_arg():", "the input in a loop.') in_args.add_argument('-o', '--output', required=False, help='Optional. Name", "to show initially.\") return parser def main(args): cap = open_images_capture(args.input,", "output.\", action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str, help=\"Optional. List of", "as cv import numpy as np import logging as log", "if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0] * 2,", "\" \"Default: CPU\", default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\", required=True, help='Required. An", "input shape with 1 channel\" inputs = {} for input", "= cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0] inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0,", "image, ' 'a folder of images, video file or camera", "type=str, help=\"Optional. List of monitors to show initially.\") return parser", "help=\"Optional. Specify target device for infer: CPU, GPU, HDDL or", "= cv.putText(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255),", "final_image = cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed += 1 if video_writer.isOpened()", "CPU, GPU, HDDL or MYRIAD. \" \"Default: CPU\", default=\"CPU\", type=str)", "cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0] inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1])", "writing, software distributed under the License is distributed on an", "of outputs is equal 1\" compiled_model = core.compile_model(model, device_name=args.device) infer_request", "= open_images_capture(args.input, args.loop) log.info('OpenVINO Inference Engine') log.info('\\tbuild: {}'.format(get_version())) core =", "1 if video_writer.isOpened() and (args.output_limit <= 0 or frames_processed <=", "get_version import cv2 as cv import numpy as np import", "0)) out = cv.resize(out, (w_orig, h_orig)) img_lab_out = np.concatenate((img_lab[:, :,", "if original_frame is None: raise RuntimeError(\"Can't read an image from", "help='Optional. Number of frames to store in output. ' 'If", "imshow_size = (640, 480) graph_size = (imshow_size[0] // 2, imshow_size[1]", "GPU, HDDL or MYRIAD. \" \"Default: CPU\", default=\"CPU\", type=str) in_args.add_argument('-i',", "in compliance with the License. You may obtain a copy", "= (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8) lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image", "255), 2, cv.LINE_AA) lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50),", "1 channel\" inputs = {} for input in model.inputs: inputs[input.get_any_name()]", "HDDL or MYRIAD. \" \"Default: CPU\", default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\",", "in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path to .xml file with pre-trained model.\",", "CPU\", default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\", required=True, help='Required. An input to", "255 img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:,", "agreed to in writing, software distributed under the License is", "2 - graph_size[1], graph_size) metrics = PerformanceMetrics() video_writer = cv.VideoWriter()", "= next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res) out = update_res.transpose((1, 2, 0))", "file with pre-trained model.\", required=True, type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify", "inputs[input.get_any_name()] = np.zeros(input.shape) assert len(model.outputs) == 1, \"Expected number of", "[cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image = cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "// 4) presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1],", "update_res.transpose((1, 2, 0)) out = cv.resize(out, (w_orig, h_orig)) img_lab_out =", "logging as log from time import perf_counter import sys from", "input must be a single image, ' 'a folder of", "The input must be a single image, ' 'a folder", "= cv.resize(out, (w_orig, h_orig)) img_lab_out = np.concatenate((img_lab[:, :, 0][:, :,", "cap = open_images_capture(args.input, args.loop) log.info('OpenVINO Inference Engine') log.info('\\tbuild: {}'.format(get_version())) core", "from time import perf_counter import sys from argparse import ArgumentParser,", "import Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import monitors", "log.info('OpenVINO Inference Engine') log.info('\\tbuild: {}'.format(get_version())) core = Core() log.info('Reading model", "Copyright (c) 2018-2021 Intel Corporation Licensed under the Apache License,", "= frame.astype(np.float32) / 255 img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs =", "else: frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32) / 255", "Core() log.info('Reading model {}'.format(args.model)) model = core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name =", "either express or implied. See the License for the specific", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "assert len(model.outputs) == 1, \"Expected number of outputs is equal", "\"License\"); you may not use this file except in compliance", "0 is set, all frames are stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "0 or frames_processed <= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if not args.no_show:", "with pre-trained model.\", required=True, type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify target", "= compiled_model.create_infer_request() log.info('The model {} is loaded to {}'.format(args.model, args.device))", "(h_orig, w_orig) = original_frame.shape[:2] if original_frame.shape[2] > 1: frame =", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "log.info(rep) if __name__ == \"__main__\": args = build_arg().parse_args() sys.exit(main(args) or", "with 1 channel\" inputs = {} for input in model.inputs:", "License for the specific language governing permissions and limitations under", "rep in presenter.reportMeans(): log.info(rep) if __name__ == \"__main__\": args =", "model {}'.format(args.model)) model = core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l' input_shape", "Engine') log.info('\\tbuild: {}'.format(get_version())) core = Core() log.info('Reading model {}'.format(args.model)) model", "/ 'common/python/openvino/model_zoo')) import monitors from images_capture import open_images_capture from model_api.performance_metrics", "infer: CPU, GPU, HDDL or MYRIAD. \" \"Default: CPU\", default=\"CPU\",", "%(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout) def build_arg(): parser = ArgumentParser(add_help=False)", "help='Optional. Name of the output file(s) to save.') in_args.add_argument('-limit', '--output_limit',", "default=\"\", type=str, help=\"Optional. List of monitors to show initially.\") return", "480) graph_size = (imshow_size[0] // 2, imshow_size[1] // 4) presenter", "* 2, imshow_size[1] * 2)): raise RuntimeError(\"Can't open video writer\")", "cv.putText(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2,", "\"Default: CPU\", default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\", required=True, help='Required. An input", "Specify target device for infer: CPU, GPU, HDDL or MYRIAD.", "axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1) original_image = cv.resize(original_frame,", "\"Expected model input shape with 1 channel\" inputs = {}", "lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0,", "required=True, type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify target device for infer:", "permissions and limitations under the License. \"\"\" from openvino.runtime import", "perf_counter import sys from argparse import ArgumentParser, SUPPRESS from pathlib", "imshow_size[1] * 2 - graph_size[1], graph_size) metrics = PerformanceMetrics() video_writer", "np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out,", "writer\") start_time = perf_counter() original_frame = cap.read() if original_frame is", "default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str, help=\"Optional. List of monitors to", "to {}'.format(args.model, args.device)) _, _, h_in, w_in = input_shape frames_processed", "(0, 0, 255), 2, cv.LINE_AA) grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25,", "in {ord(\"q\"), ord(\"Q\"), 27}: break presenter.handleKey(key) start_time = perf_counter() original_frame", ":, 0][:, :, np.newaxis], out), axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR),", "ArgumentParser, SUPPRESS from pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2]", "monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size) metrics = PerformanceMetrics()", "cv.COLOR_GRAY2RGB) else: frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32) /", "input_tensor_name = 'data_l' input_shape = model.input(input_tensor_name).shape assert input_shape[1] == 1,", "except in compliance with the License. You may obtain a", "Don't show output.\", action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str, help=\"Optional.", "final_image) frames_processed += 1 if video_writer.isOpened() and (args.output_limit <= 0", "cv.resize(frame, imshow_size) colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8) lab_image =", "= 'data_l' input_shape = model.input(input_tensor_name).shape assert input_shape[1] == 1, \"Expected", "core.compile_model(model, device_name=args.device) infer_request = compiled_model.create_infer_request() log.info('The model {} is loaded", "args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if not args.no_show: cv.imshow('Colorization Demo', final_image) key", "level=log.DEBUG, stream=sys.stdout) def build_arg(): parser = ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options')", "compliance with the License. You may obtain a copy of", "None: raise RuntimeError(\"Can't read an image from the input\") while", "images, video file or camera id.') in_args.add_argument('--loop', default=False, action='store_true', help='Optional.", "a loop.') in_args.add_argument('-o', '--output', required=False, help='Optional. Name of the output", "== 1, \"Expected number of outputs is equal 1\" compiled_model", "<= 0 or frames_processed <= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if not", "An input to process. The input must be a single", "model.\", required=True, type=Path) in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify target device for", "\"Expected number of outputs is equal 1\" compiled_model = core.compile_model(model,", "model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape) assert len(model.outputs) == 1, \"Expected number", "the input\") while original_frame is not None: (h_orig, w_orig) =", "loop.') in_args.add_argument('-o', '--output', required=False, help='Optional. Name of the output file(s)", "0] inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1]) res = next(iter(infer_request.infer(inputs).values())) update_res", "Number of frames to store in output. ' 'If 0", "compiled_model.create_infer_request() log.info('The model {} is loaded to {}'.format(args.model, args.device)) _,", "List of monitors to show initially.\") return parser def main(args):", "cv.resize(original_frame, imshow_size) grayscale_image = cv.resize(frame, imshow_size) colorize_image = (cv.resize(img_bgr_out, imshow_size)", "from model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout)", "\"--model\", help=\"Required. Path to .xml file with pre-trained model.\", required=True,", "cv.LINE_AA) ir_image = [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image = cv.vconcat(ir_image)", "stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't show output.\", action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\",", "log.info('The model {} is loaded to {}'.format(args.model, args.device)) _, _,", "= core.compile_model(model, device_name=args.device) infer_request = compiled_model.create_infer_request() log.info('The model {} is", "License. \"\"\" from openvino.runtime import Core, get_version import cv2 as", "w_in = input_shape frames_processed = 0 imshow_size = (640, 480)", "is not None: (h_orig, w_orig) = original_frame.shape[:2] if original_frame.shape[2] >", "outputs is equal 1\" compiled_model = core.compile_model(model, device_name=args.device) infer_request =", "raise RuntimeError(\"Can't open video writer\") start_time = perf_counter() original_frame =", "next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res) out = update_res.transpose((1, 2, 0)) out", "frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32) / 255 img_lab", "img_rgb = frame.astype(np.float32) / 255 img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs", "in_args.add_argument('-o', '--output', required=False, help='Optional. Name of the output file(s) to", "cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) grayscale_image = cv.putText(grayscale_image,", "limitations under the License. \"\"\" from openvino.runtime import Core, get_version", "is set, all frames are stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't show", "log.info('Reading model {}'.format(args.model)) model = core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l'", "initially.\") return parser def main(args): cap = open_images_capture(args.input, args.loop) log.info('OpenVINO", "(25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) colorize_image", "presenter.reportMeans(): log.info(rep) if __name__ == \"__main__\": args = build_arg().parse_args() sys.exit(main(args)", "= core.read_model(args.model, args.model.with_suffix(\".bin\")) input_tensor_name = 'data_l' input_shape = model.input(input_tensor_name).shape assert", "assert input_shape[1] == 1, \"Expected model input shape with 1", "of monitors to show initially.\") return parser def main(args): cap", "= perf_counter() original_frame = cap.read() if original_frame is None: raise", "in_args = parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the", "2, imshow_size[1] // 4) presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2", "50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) lab_image =", "lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image = cv.putText(original_image, 'Original', (25, 50),", "infer_request = compiled_model.create_infer_request() log.info('The model {} is loaded to {}'.format(args.model,", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "= np.expand_dims(img_l_rs, axis=[0, 1]) res = next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res)", "in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int, help='Optional. Number of frames to", "cap.read() if original_frame is None: raise RuntimeError(\"Can't read an image", "/ 255 img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(), (w_in,", "file or camera id.') in_args.add_argument('--loop', default=False, action='store_true', help='Optional. Enable reading", "1, (0, 0, 255), 2, cv.LINE_AA) ir_image = [cv.hconcat([original_image, grayscale_image]),", "cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) ir_image = [cv.hconcat([original_image,", "cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed += 1 if video_writer.isOpened() and (args.output_limit", "import monitors from images_capture import open_images_capture from model_api.performance_metrics import PerformanceMetrics", "if __name__ == \"__main__\": args = build_arg().parse_args() sys.exit(main(args) or 0)", "'--output', required=False, help='Optional. Name of the output file(s) to save.')", "with the script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path to .xml file", "= PerformanceMetrics() video_writer = cv.VideoWriter() if args.output and not video_writer.open(args.output,", "and limitations under the License. \"\"\" from openvino.runtime import Core,", "express or implied. See the License for the specific language", "= cv.VideoWriter() if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'), cap.fps(), (imshow_size[0]", "1]) res = next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res) out = update_res.transpose((1,", "= np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1) original_image = cv.resize(original_frame, imshow_size) grayscale_image", "import logging as log from time import perf_counter import sys", "= input_shape frames_processed = 0 imshow_size = (640, 480) graph_size", "model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout) def", "{} is loaded to {}'.format(args.model, args.device)) _, _, h_in, w_in", "= monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size) metrics =", "input in model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape) assert len(model.outputs) == 1,", "Name of the output file(s) to save.') in_args.add_argument('-limit', '--output_limit', required=False,", "cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb =", "0][:, :, np.newaxis], out), axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0,", "in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the script.') in_args.add_argument(\"-m\", \"--model\",", "help=\"Required. Path to .xml file with pre-trained model.\", required=True, type=Path)", "%(message)s', level=log.DEBUG, stream=sys.stdout) def build_arg(): parser = ArgumentParser(add_help=False) in_args =", "save.') in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int, help='Optional. Number of frames", "show output.\", action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str, help=\"Optional. List", "'data_l' input_shape = model.input(input_tensor_name).shape assert input_shape[1] == 1, \"Expected model", "in_args.add_argument(\"-d\", \"--device\", help=\"Optional. Specify target device for infer: CPU, GPU,", "cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) colorize_image = cv.putText(colorize_image,", "and (args.output_limit <= 0 or frames_processed <= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image)", "axis=[0, 1]) res = next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res) out =", "cv import numpy as np import logging as log from", "out = cv.resize(out, (w_orig, h_orig)) img_lab_out = np.concatenate((img_lab[:, :, 0][:,", "= cap.read() metrics.log_total() for rep in presenter.reportMeans(): log.info(rep) if __name__", "= cv.resize(frame, imshow_size) colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8) lab_image", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "import open_images_capture from model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[ %(levelname)s ] %(message)s',", "= cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image = cv.putText(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX,", "cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) lab_image = cv.putText(lab_image,", "= cv.waitKey(1) if key in {ord(\"q\"), ord(\"Q\"), 27}: break presenter.handleKey(key)", "core = Core() log.info('Reading model {}'.format(args.model)) model = core.read_model(args.model, args.model.with_suffix(\".bin\"))", "default=1000, type=int, help='Optional. Number of frames to store in output.", "= cap.read() if original_frame is None: raise RuntimeError(\"Can't read an", ":, np.newaxis], out), axis=2) img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1)", "script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path to .xml file with pre-trained", "for infer: CPU, GPU, HDDL or MYRIAD. \" \"Default: CPU\",", "while original_frame is not None: (h_orig, w_orig) = original_frame.shape[:2] if", "255), 2, cv.LINE_AA) grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX,", "camera id.') in_args.add_argument('--loop', default=False, action='store_true', help='Optional. Enable reading the input", "if not args.no_show: cv.imshow('Colorization Demo', final_image) key = cv.waitKey(1) if", "original_frame.shape[2] > 1: frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame", "to .xml file with pre-trained model.\", required=True, type=Path) in_args.add_argument(\"-d\", \"--device\",", "with the License. You may obtain a copy of the", "under the License. \"\"\" from openvino.runtime import Core, get_version import", "None: (h_orig, w_orig) = original_frame.shape[:2] if original_frame.shape[2] > 1: frame", "import Core, get_version import cv2 as cv import numpy as", "(640, 480) graph_size = (imshow_size[0] // 2, imshow_size[1] // 4)", "27}: break presenter.handleKey(key) start_time = perf_counter() original_frame = cap.read() metrics.log_total()", "set, all frames are stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't show output.\",", "a single image, ' 'a folder of images, video file", "presenter.handleKey(key) start_time = perf_counter() original_frame = cap.read() metrics.log_total() for rep", "= cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab) img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0]", "len(model.outputs) == 1, \"Expected number of outputs is equal 1\"", "cv.putText(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2,", "are stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't show output.\", action='store_true', default=False) in_args.add_argument(\"-u\",", "final_image) key = cv.waitKey(1) if key in {ord(\"q\"), ord(\"Q\"), 27}:", "graph_size = (imshow_size[0] // 2, imshow_size[1] // 4) presenter =", "h_in, w_in = input_shape frames_processed = 0 imshow_size = (640,", "= cv.vconcat(ir_image) metrics.update(start_time, final_image) frames_processed += 1 if video_writer.isOpened() and", "0, 255), 2, cv.LINE_AA) lab_image = cv.putText(lab_image, 'LAB interpretation', (25,", "specific language governing permissions and limitations under the License. \"\"\"", "model.input(input_tensor_name).shape assert input_shape[1] == 1, \"Expected model input shape with", "original_frame = cap.read() metrics.log_total() for rep in presenter.reportMeans(): log.info(rep) if", "applicable law or agreed to in writing, software distributed under", "(w_in, h_in))[:, :, 0] inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1]) res", "parser = ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help', default=SUPPRESS,", "frames_processed += 1 if video_writer.isOpened() and (args.output_limit <= 0 or", "colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8) lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8)", "0, 255), 2, cv.LINE_AA) colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50),", "{} for input in model.inputs: inputs[input.get_any_name()] = np.zeros(input.shape) assert len(model.outputs)", "type=str) in_args.add_argument('-i', \"--input\", required=True, help='Required. An input to process. The", "in_args.add_argument(\"--no_show\", help=\"Optional. Don't show output.\", action='store_true', default=False) in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\",", "original_frame is None: raise RuntimeError(\"Can't read an image from the", "50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) colorize_image =", "= cv.putText(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255),", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "folder of images, video file or camera id.') in_args.add_argument('--loop', default=False,", "the specific language governing permissions and limitations under the License.", "all frames are stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't show output.\", action='store_true',", "in_args.add_argument(\"-u\", \"--utilization_monitors\", default=\"\", type=str, help=\"Optional. List of monitors to show", "grayscale_image = cv.resize(frame, imshow_size) colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8)", "(0, 0, 255), 2, cv.LINE_AA) lab_image = cv.putText(lab_image, 'LAB interpretation',", "shape with 1 channel\" inputs = {} for input in", "PerformanceMetrics() video_writer = cv.VideoWriter() if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'),", "device for infer: CPU, GPU, HDDL or MYRIAD. \" \"Default:", "= cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32) / 255 img_lab =", "key = cv.waitKey(1) if key in {ord(\"q\"), ord(\"Q\"), 27}: break", "or agreed to in writing, software distributed under the License", "images_capture import open_images_capture from model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[ %(levelname)s ]", "import ArgumentParser, SUPPRESS from pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python'))", "// 2, imshow_size[1] // 4) presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] *", "if video_writer.isOpened() and (args.output_limit <= 0 or frames_processed <= args.output_limit):", "1, \"Expected number of outputs is equal 1\" compiled_model =", "ArgumentParser(add_help=False) in_args = parser.add_argument_group('Options') in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with", "OF ANY KIND, either express or implied. See the License", "MYRIAD. \" \"Default: CPU\", default=\"CPU\", type=str) in_args.add_argument('-i', \"--input\", required=True, help='Required.", "PerformanceMetrics log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout) def build_arg(): parser", "id.') in_args.add_argument('--loop', default=False, action='store_true', help='Optional. Enable reading the input in", "input_shape frames_processed = 0 imshow_size = (640, 480) graph_size =", "graph_size) metrics = PerformanceMetrics() video_writer = cv.VideoWriter() if args.output and", "video_writer.isOpened() and (args.output_limit <= 0 or frames_processed <= args.output_limit): video_writer.write(final_image)", "to store in output. ' 'If 0 is set, all", "default=False, action='store_true', help='Optional. Enable reading the input in a loop.')", "License, Version 2.0 (the \"License\"); you may not use this", "parser def main(args): cap = open_images_capture(args.input, args.loop) log.info('OpenVINO Inference Engine')", "monitors from images_capture import open_images_capture from model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[", "<= args.output_limit): video_writer.write(final_image) presenter.drawGraphs(final_image) if not args.no_show: cv.imshow('Colorization Demo', final_image)", "from images_capture import open_images_capture from model_api.performance_metrics import PerformanceMetrics log.basicConfig(format='[ %(levelname)s", "Intel Corporation Licensed under the Apache License, Version 2.0 (the", "imshow_size).astype(np.uint8) original_image = cv.putText(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0,", "= [cv.hconcat([original_image, grayscale_image]), cv.hconcat([lab_image, colorize_image])] final_image = cv.vconcat(ir_image) metrics.update(start_time, final_image)", "= (640, 480) graph_size = (imshow_size[0] // 2, imshow_size[1] //", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "if original_frame.shape[2] > 1: frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else:", "input to process. The input must be a single image,", "inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1]) res = next(iter(infer_request.infer(inputs).values())) update_res =", "presenter.drawGraphs(final_image) if not args.no_show: cv.imshow('Colorization Demo', final_image) key = cv.waitKey(1)", "0, 255), 2, cv.LINE_AA) grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50),", "w_orig) = original_frame.shape[:2] if original_frame.shape[2] > 1: frame = cv.cvtColor(cv.cvtColor(original_frame,", "cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB) else: frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB) img_rgb = frame.astype(np.float32)", "required=False, help='Optional. Name of the output file(s) to save.') in_args.add_argument('-limit',", "{ord(\"q\"), ord(\"Q\"), 27}: break presenter.handleKey(key) start_time = perf_counter() original_frame =", "License. You may obtain a copy of the License at", "device_name=args.device) infer_request = compiled_model.create_infer_request() log.info('The model {} is loaded to", "to process. The input must be a single image, '", "must be a single image, ' 'a folder of images,", "' 'a folder of images, video file or camera id.')", "from argparse import ArgumentParser, SUPPRESS from pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2]", "/ 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import monitors from images_capture import", "cv.imshow('Colorization Demo', final_image) key = cv.waitKey(1) if key in {ord(\"q\"),", "start_time = perf_counter() original_frame = cap.read() if original_frame is None:", "from the input\") while original_frame is not None: (h_orig, w_orig)", "cv.resize(img_lab_out, imshow_size).astype(np.uint8) original_image = cv.putText(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,", "default=SUPPRESS, help='Help with the script.') in_args.add_argument(\"-m\", \"--model\", help=\"Required. Path to", "50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA) ir_image =", "in output. ' 'If 0 is set, all frames are", "required=False, default=1000, type=int, help='Optional. Number of frames to store in", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "frames are stored.') in_args.add_argument(\"--no_show\", help=\"Optional. Don't show output.\", action='store_true', default=False)", "np.expand_dims(img_l_rs, axis=[0, 1]) res = next(iter(infer_request.infer(inputs).values())) update_res = np.squeeze(res) out", "2)): raise RuntimeError(\"Can't open video writer\") start_time = perf_counter() original_frame", "pathlib import Path sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python')) sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo')) import", "= 0 imshow_size = (640, 480) graph_size = (imshow_size[0] //" ]
[ "lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value ))", "\"\"\"Sets the tan_scheme of this Transfer. TANScheme - The scheme", "noqa: E501 :rtype: str \"\"\" return self._purpose @purpose.setter def purpose(self,", ":return: The tan_scheme of this Transfer. # noqa: E501 :rtype:", "The amount of this Transfer. # noqa: E501 :type: Amount", "this Transfer. # noqa: E501 :type: str \"\"\" if name", "= None self._purpose = None self._tan_media_id = None self._tan_scheme =", "= tan_media_id @property def tan_scheme(self): \"\"\"Gets the tan_scheme of this", "must not be `None`\") # noqa: E501 self._tan_scheme = tan_scheme", "\"\"\"Gets the bic of this Transfer. # noqa: E501 BIC", "`amount`, must not be `None`\") # noqa: E501 self._amount =", "bic: The bic of this Transfer. # noqa: E501 :type:", "in ISO 13616-1) # noqa: E501 :return: The iban of", "E501 :type: str \"\"\" self._purpose = purpose @property def tan_media_id(self):", "name self.amount = amount if purpose is not None: self.purpose", "the name of this Transfer. # noqa: E501 Name -", "other): \"\"\"Returns true if both objects are equal\"\"\" if not", "value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value,", "noqa: E501 :return: The bic of this Transfer. # noqa:", "= bic self.name = name self.amount = amount if purpose", "noqa: E501 self._name = name @property def amount(self): \"\"\"Gets the", "if purpose is not None: self.purpose = purpose self.tan_media_id =", "E501 Purpose # noqa: E501 :return: The purpose of this", "'bic': 'str', 'name': 'str', 'amount': 'Amount', 'purpose': 'str', 'tan_media_id': 'str',", "Account Number (defined in ISO 13616-1) # noqa: E501 :return:", "this Transfer. # noqa: E501 :rtype: str \"\"\" return self._name", "of this Transfer. # noqa: E501 TANScheme - The scheme", "tan_media_id(self, tan_media_id): \"\"\"Sets the tan_media_id of this Transfer. TANMediaId -", "self._bic = None self._name = None self._amount = None self._purpose", "amount of this Transfer. Amount to be transfered # noqa:", "be transfered # noqa: E501 :return: The amount of this", "None self._tan_scheme = None self.discriminator = None self.iban = iban", "= None self.discriminator = None self.iban = iban if bic", "tan_scheme of this Transfer. TANScheme - The scheme **id** that", "str \"\"\" self._bic = bic @property def name(self): \"\"\"Gets the", "__repr__(self): \"\"\"For `print` and `pprint`\"\"\" return self.to_str() def __eq__(self, other):", "key in definition. \"\"\" swagger_types = { 'iban': 'str', 'bic':", "str \"\"\" if iban is None: raise ValueError(\"Invalid value for", "both objects are not equal\"\"\" return not self == other", "return self._purpose @purpose.setter def purpose(self, purpose): \"\"\"Sets the purpose of", "tan_scheme of this Transfer. # noqa: E501 :rtype: str \"\"\"", "the value is attribute type. attribute_map (dict): The key is", "creditor # noqa: E501 :return: The name of this Transfer.", "`pprint`\"\"\" return self.to_str() def __eq__(self, other): \"\"\"Returns true if both", "Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint import re # noqa:", "\"\"\"Sets the purpose of this Transfer. Purpose # noqa: E501", "code generator program. Do not edit the class manually. \"\"\"", "- The scheme **id** that is used to verify this", "iban(self): \"\"\"Gets the iban of this Transfer. # noqa: E501", "None self._bic = None self._name = None self._amount = None", "result[attr] = value return result def to_str(self): \"\"\"Returns the string", "model\"\"\" return pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For `print` and `pprint`\"\"\" return", "noqa: E501 :param bic: The bic of this Transfer. #", "E501 :type: str \"\"\" if name is None: raise ValueError(\"Invalid", "\\\"901\\\") # noqa: E501 :return: The tan_scheme of this Transfer.", "string representation of the model\"\"\" return pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For", "2016&dash;2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa:", "return result def to_str(self): \"\"\"Returns the string representation of the", "self.name = name self.amount = amount if purpose is not", "IBAN - International Bank Account Number (defined in ISO 13616-1)", "Transfer. # noqa: E501 TANMediaId - The identifying ID of", "of this Transfer. TANMediaId - The identifying ID of the", "\"\"\" return self._amount @amount.setter def amount(self, amount): \"\"\"Sets the amount", "noqa: E501 Amount to be transfered # noqa: E501 :return:", "Transfer. # noqa: E501 :type: str \"\"\" if name is", "der Finanz Informatik # noqa: E501 OpenAPI spec version: 2.1.0", "\"\"\"For `print` and `pprint`\"\"\" return self.to_str() def __eq__(self, other): \"\"\"Returns", "name and the value is json key in definition. \"\"\"", "{ 'iban': 'str', 'bic': 'str', 'name': 'str', 'amount': 'Amount', 'purpose':", "`name`, must not be `None`\") # noqa: E501 self._name =", "The key is attribute name and the value is attribute", "self.to_str() def __eq__(self, other): \"\"\"Returns true if both objects are", "# noqa: E501 Name - Name of the creditor #", "# noqa: E501 :rtype: str \"\"\" return self._bic @bic.setter def", "the bic of this Transfer. BIC - Business Identifier Code", "Name of the creditor # noqa: E501 :return: The name", "of this Transfer. # noqa: E501 Name - Name of", "def amount(self): \"\"\"Gets the amount of this Transfer. # noqa:", "amount is None: raise ValueError(\"Invalid value for `amount`, must not", "this Transfer. # noqa: E501 :rtype: str \"\"\" return self._iban", "bic of this Transfer. # noqa: E501 :type: str \"\"\"", "(e.g. \\\"901\\\") # noqa: E501 :return: The tan_scheme of this", "'bic', 'name': 'name', 'amount': 'amount', 'purpose': 'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme':", "self._tan_media_id = tan_media_id @property def tan_scheme(self): \"\"\"Gets the tan_scheme of", "TANMediaId - The identifying ID of the TANMedia. # noqa:", "purpose): \"\"\"Sets the purpose of this Transfer. Purpose # noqa:", "# noqa: E501 self._tan_media_id = tan_media_id @property def tan_scheme(self): \"\"\"Gets", "import Amount # noqa: F401,E501 class Transfer(object): \"\"\"NOTE: This class", "self._bic @bic.setter def bic(self, bic): \"\"\"Sets the bic of this", "Transfer. # noqa: E501 :rtype: str \"\"\" return self._tan_media_id @tan_media_id.setter", "= purpose @property def tan_media_id(self): \"\"\"Gets the tan_media_id of this", "both objects are equal\"\"\" if not isinstance(other, Transfer): return False", "\"\"\" if amount is None: raise ValueError(\"Invalid value for `amount`,", "{ 'iban': 'iban', 'bic': 'bic', 'name': 'name', 'amount': 'amount', 'purpose':", "\"\"\"Gets the purpose of this Transfer. # noqa: E501 Purpose", "Transfer. # noqa: E501 :type: str \"\"\" if tan_scheme is", "'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme' } def __init__(self, iban=None, bic=None, name=None,", "The bic of this Transfer. # noqa: E501 :type: str", "@property def tan_scheme(self): \"\"\"Gets the tan_scheme of this Transfer. #", "# noqa: E501 :param tan_media_id: The tan_media_id of this Transfer.", "# noqa: F401 import six from swagger_client.models.amount import Amount #", ")) else: result[attr] = value return result def to_str(self): \"\"\"Returns", "= { 'iban': 'str', 'bic': 'str', 'name': 'str', 'amount': 'Amount',", "= iban @property def bic(self): \"\"\"Gets the bic of this", "return self._tan_media_id @tan_media_id.setter def tan_media_id(self, tan_media_id): \"\"\"Sets the tan_media_id of", ":return: The name of this Transfer. # noqa: E501 :rtype:", "tan_media_id of this Transfer. TANMediaId - The identifying ID of", "= bic @property def name(self): \"\"\"Gets the name of this", "'iban': 'str', 'bic': 'str', 'name': 'str', 'amount': 'Amount', 'purpose': 'str',", "E501 BIC - Business Identifier Code (defined in ISO-9362) #", ":param tan_media_id: The tan_media_id of this Transfer. # noqa: E501", "noqa: E501 :rtype: str \"\"\" return self._tan_scheme @tan_scheme.setter def tan_scheme(self,", "dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1],", "self._tan_scheme = None self.discriminator = None self.iban = iban if", "None: raise ValueError(\"Invalid value for `amount`, must not be `None`\")", "self._tan_media_id @tan_media_id.setter def tan_media_id(self, tan_media_id): \"\"\"Sets the tan_media_id of this", "in definition. \"\"\" swagger_types = { 'iban': 'str', 'bic': 'str',", "model defined in Swagger\"\"\" # noqa: E501 self._iban = None", "hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] = value", "ISO 13616-1) # noqa: E501 :param iban: The iban of", "# noqa: E501 :param amount: The amount of this Transfer.", "is used to verify this payment (e.g. \\\"901\\\") # noqa:", "self._tan_scheme @tan_scheme.setter def tan_scheme(self, tan_scheme): \"\"\"Sets the tan_scheme of this", "json key in definition. \"\"\" swagger_types = { 'iban': 'str',", "== other.__dict__ def __ne__(self, other): \"\"\"Returns true if both objects", "E501 :return: The purpose of this Transfer. # noqa: E501", "is attribute name and the value is json key in", "F401,E501 class Transfer(object): \"\"\"NOTE: This class is auto generated by", "# noqa: E501 :rtype: str \"\"\" return self._iban @iban.setter def", "value for `tan_media_id`, must not be `None`\") # noqa: E501", "verify this payment (e.g. \\\"901\\\") # noqa: E501 :return: The", "ValueError(\"Invalid value for `name`, must not be `None`\") # noqa:", "\"\"\"Sets the bic of this Transfer. BIC - Business Identifier", "= {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self,", "\"\"\"Returns true if both objects are not equal\"\"\" return not", "noqa: E501 :param tan_scheme: The tan_scheme of this Transfer. #", "\"\"\" self._bic = bic @property def name(self): \"\"\"Gets the name", ":return: The bic of this Transfer. # noqa: E501 :rtype:", "= None self._name = None self._amount = None self._purpose =", "'tan_scheme': 'tanScheme' } def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None,", "return self._iban @iban.setter def iban(self, iban): \"\"\"Sets the iban of", "representation of the model\"\"\" return pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For `print`", "type. attribute_map (dict): The key is attribute name and the", "= name self.amount = amount if purpose is not None:", "E501 self._iban = iban @property def bic(self): \"\"\"Gets the bic", "E501 self._tan_media_id = tan_media_id @property def tan_scheme(self): \"\"\"Gets the tan_scheme", "if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"):", "Name - Name of the creditor # noqa: E501 :return:", "'bic': 'bic', 'name': 'name', 'amount': 'amount', 'purpose': 'purpose', 'tan_media_id': 'tanMediaId',", "item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr]", "return pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For `print` and `pprint`\"\"\" return self.to_str()", "# noqa: E501 BIC - Business Identifier Code (defined in", "def name(self): \"\"\"Gets the name of this Transfer. # noqa:", ":type: str \"\"\" self._purpose = purpose @property def tan_media_id(self): \"\"\"Gets", "Transfer. IBAN - International Bank Account Number (defined in ISO", "# noqa: E501 :return: The purpose of this Transfer. #", "to_str(self): \"\"\"Returns the string representation of the model\"\"\" return pprint.pformat(self.to_dict())", "@property def iban(self): \"\"\"Gets the iban of this Transfer. #", "Transfer): return False return self.__dict__ == other.__dict__ def __ne__(self, other):", "E501 :type: str \"\"\" self._bic = bic @property def name(self):", "def purpose(self, purpose): \"\"\"Sets the purpose of this Transfer. Purpose", "@bic.setter def bic(self, bic): \"\"\"Sets the bic of this Transfer.", "result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda", "[Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz - Ein Unternehmen", "noqa: E501 OpenAPI spec version: 2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\"", ":rtype: str \"\"\" return self._bic @bic.setter def bic(self, bic): \"\"\"Sets", "__init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa:", "tan_media_id of this Transfer. # noqa: E501 TANMediaId - The", "'tanScheme' } def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None,", "self.amount = amount if purpose is not None: self.purpose =", "E501 :rtype: Amount \"\"\" return self._amount @amount.setter def amount(self, amount):", "# noqa: E501 :param iban: The iban of this Transfer.", "this Transfer. Purpose # noqa: E501 :param purpose: The purpose", "E501 :return: The tan_media_id of this Transfer. # noqa: E501", "the tan_media_id of this Transfer. TANMediaId - The identifying ID", "# noqa: E501 :return: The iban of this Transfer. #", "this Transfer. # noqa: E501 BIC - Business Identifier Code", "noqa: E501 self._amount = amount @property def purpose(self): \"\"\"Gets the", "'amount': 'Amount', 'purpose': 'str', 'tan_media_id': 'str', 'tan_scheme': 'str' } attribute_map", ":rtype: str \"\"\" return self._purpose @purpose.setter def purpose(self, purpose): \"\"\"Sets", "E501 :rtype: str \"\"\" return self._tan_scheme @tan_scheme.setter def tan_scheme(self, tan_scheme):", "that is used to verify this payment (e.g. \\\"901\\\") #", "E501 :return: The bic of this Transfer. # noqa: E501", "value for `iban`, must not be `None`\") # noqa: E501", "# noqa: E501 :return: The amount of this Transfer. #", ":return: The purpose of this Transfer. # noqa: E501 :rtype:", "if iban is None: raise ValueError(\"Invalid value for `iban`, must", "Number (defined in ISO 13616-1) # noqa: E501 :param iban:", "if tan_media_id is None: raise ValueError(\"Invalid value for `tan_media_id`, must", "noqa: E501 :type: str \"\"\" if name is None: raise", "E501 :type: str \"\"\" if tan_scheme is None: raise ValueError(\"Invalid", "The tan_media_id of this Transfer. # noqa: E501 :type: str", "# noqa: E501 TANMediaId - The identifying ID of the", "2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint import re #", "this Transfer. # noqa: E501 :rtype: str \"\"\" return self._tan_media_id", "} attribute_map = { 'iban': 'iban', 'bic': 'bic', 'name': 'name',", "Transfer. # noqa: E501 :rtype: str \"\"\" return self._bic @bic.setter", "return self._tan_scheme @tan_scheme.setter def tan_scheme(self, tan_scheme): \"\"\"Sets the tan_scheme of", "import six from swagger_client.models.amount import Amount # noqa: F401,E501 class", "The bic of this Transfer. # noqa: E501 :rtype: str", "coding: utf-8 \"\"\" [AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms)", "# noqa: E501 :rtype: str \"\"\" return self._name @name.setter def", "The amount of this Transfer. # noqa: E501 :rtype: Amount", "tan_media_id=None, tan_scheme=None): # noqa: E501 \"\"\"Transfer - a model defined", "tan_media_id(self): \"\"\"Gets the tan_media_id of this Transfer. # noqa: E501", "- International Bank Account Number (defined in ISO 13616-1) #", "self._name = name @property def amount(self): \"\"\"Gets the amount of", ":param purpose: The purpose of this Transfer. # noqa: E501", "The tan_media_id of this Transfer. # noqa: E501 :rtype: str", "iban @property def bic(self): \"\"\"Gets the bic of this Transfer.", "ValueError(\"Invalid value for `tan_media_id`, must not be `None`\") # noqa:", "noqa: E501 :rtype: Amount \"\"\" return self._amount @amount.setter def amount(self,", "the creditor # noqa: E501 :return: The name of this", "transfered # noqa: E501 :param amount: The amount of this", "utf-8 \"\"\" [AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/)", "tan_media_id): \"\"\"Sets the tan_media_id of this Transfer. TANMediaId - The", "noqa: E501 :type: str \"\"\" if tan_scheme is None: raise", "this Transfer. TANScheme - The scheme **id** that is used", "if amount is None: raise ValueError(\"Invalid value for `amount`, must", "of this Transfer. # noqa: E501 :rtype: Amount \"\"\" return", "13616-1) # noqa: E501 :return: The iban of this Transfer.", "in ISO-9362) # noqa: E501 :param bic: The bic of", "not be `None`\") # noqa: E501 self._name = name @property", "amount if purpose is not None: self.purpose = purpose self.tan_media_id", "be `None`\") # noqa: E501 self._amount = amount @property def", "noqa: E501 :type: str \"\"\" if tan_media_id is None: raise", "re # noqa: F401 import six from swagger_client.models.amount import Amount", "None: raise ValueError(\"Invalid value for `tan_media_id`, must not be `None`\")", "program. Do not edit the class manually. \"\"\" \"\"\" Attributes:", "swagger_types = { 'iban': 'str', 'bic': 'str', 'name': 'str', 'amount':", "the purpose of this Transfer. # noqa: E501 Purpose #", "the creditor # noqa: E501 :param name: The name of", "\"\"\" if tan_scheme is None: raise ValueError(\"Invalid value for `tan_scheme`,", "raise ValueError(\"Invalid value for `tan_scheme`, must not be `None`\") #", "def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): #", "class is auto generated by the swagger code generator program.", "not edit the class manually. \"\"\" \"\"\" Attributes: swagger_types (dict):", "\"\"\" if iban is None: raise ValueError(\"Invalid value for `iban`,", "E501 self._amount = amount @property def purpose(self): \"\"\"Gets the purpose", "return self._name @name.setter def name(self, name): \"\"\"Sets the name of", "E501 TANMediaId - The identifying ID of the TANMedia. #", "this Transfer. # noqa: E501 :type: str \"\"\" if tan_scheme", "Amount \"\"\" return self._amount @amount.setter def amount(self, amount): \"\"\"Sets the", "= tan_scheme def to_dict(self): \"\"\"Returns the model properties as a", "list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\")", "amount of this Transfer. # noqa: E501 :type: Amount \"\"\"", "value for `amount`, must not be `None`\") # noqa: E501", "iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501", "of this Transfer. IBAN - International Bank Account Number (defined", "E501 :param bic: The bic of this Transfer. # noqa:", "class Transfer(object): \"\"\"NOTE: This class is auto generated by the", "definition. \"\"\" swagger_types = { 'iban': 'str', 'bic': 'str', 'name':", "raise ValueError(\"Invalid value for `amount`, must not be `None`\") #", "iban of this Transfer. # noqa: E501 IBAN - International", "this Transfer. Name - Name of the creditor # noqa:", "a dict\"\"\" result = {} for attr, _ in six.iteritems(self.swagger_types):", "# noqa: E501 Amount to be transfered # noqa: E501", "@iban.setter def iban(self, iban): \"\"\"Sets the iban of this Transfer.", "bic(self, bic): \"\"\"Sets the bic of this Transfer. BIC -", "E501 \"\"\"Transfer - a model defined in Swagger\"\"\" # noqa:", "\"\"\"Gets the name of this Transfer. # noqa: E501 Name", "Transfer. # noqa: E501 :type: str \"\"\" self._bic = bic", "tan_scheme @property def iban(self): \"\"\"Gets the iban of this Transfer.", "tan_scheme def to_dict(self): \"\"\"Returns the model properties as a dict\"\"\"", "swagger code generator program. Do not edit the class manually.", "key is attribute name and the value is attribute type.", "not be `None`\") # noqa: E501 self._tan_media_id = tan_media_id @property", "must not be `None`\") # noqa: E501 self._tan_media_id = tan_media_id", "\"\"\" if tan_media_id is None: raise ValueError(\"Invalid value for `tan_media_id`,", "name @property def amount(self): \"\"\"Gets the amount of this Transfer.", "of this Transfer. # noqa: E501 BIC - Business Identifier", "E501 self._iban = None self._bic = None self._name = None", "of this Transfer. Purpose # noqa: E501 :param purpose: The", "self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\"Returns true if both", "amount(self): \"\"\"Gets the amount of this Transfer. # noqa: E501", "https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint import re # noqa: F401 import", "= None self._amount = None self._purpose = None self._tan_media_id =", "in ISO 13616-1) # noqa: E501 :param iban: The iban", "of this Transfer. # noqa: E501 Purpose # noqa: E501", "Transfer. TANMediaId - The identifying ID of the TANMedia. #", "item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() ))", "None self._amount = None self._purpose = None self._tan_media_id = None", "\"\"\"Returns the model properties as a dict\"\"\" result = {}", "E501 :return: The tan_scheme of this Transfer. # noqa: E501", "def to_str(self): \"\"\"Returns the string representation of the model\"\"\" return", "BIC - Business Identifier Code (defined in ISO-9362) # noqa:", "OpenAPI spec version: 2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint", "of this Transfer. # noqa: E501 Amount to be transfered", "of this Transfer. Amount to be transfered # noqa: E501", "verify this payment (e.g. \\\"901\\\") # noqa: E501 :param tan_scheme:", "= purpose self.tan_media_id = tan_media_id self.tan_scheme = tan_scheme @property def", "`print` and `pprint`\"\"\" return self.to_str() def __eq__(self, other): \"\"\"Returns true", "name and the value is attribute type. attribute_map (dict): The", "self.tan_scheme = tan_scheme @property def iban(self): \"\"\"Gets the iban of", ":type: str \"\"\" if name is None: raise ValueError(\"Invalid value", "this Transfer. # noqa: E501 TANScheme - The scheme **id**", "self._purpose = purpose @property def tan_media_id(self): \"\"\"Gets the tan_media_id of", "noqa: E501 :return: The tan_scheme of this Transfer. # noqa:", "for `iban`, must not be `None`\") # noqa: E501 self._iban", "# noqa: E501 Purpose # noqa: E501 :return: The purpose", "purpose is not None: self.purpose = purpose self.tan_media_id = tan_media_id", "Transfer. # noqa: E501 BIC - Business Identifier Code (defined", "# noqa: E501 :param name: The name of this Transfer.", "@property def purpose(self): \"\"\"Gets the purpose of this Transfer. #", "of this Transfer. # noqa: E501 TANMediaId - The identifying", ":return: The tan_media_id of this Transfer. # noqa: E501 :rtype:", "for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if", "\"\"\"Returns the string representation of the model\"\"\" return pprint.pformat(self.to_dict()) def", "= list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,", "# noqa: F401,E501 class Transfer(object): \"\"\"NOTE: This class is auto", "'name': 'str', 'amount': 'Amount', 'purpose': 'str', 'tan_media_id': 'str', 'tan_scheme': 'str'", "this Transfer. # noqa: E501 :rtype: str \"\"\" return self._bic", "of this Transfer. # noqa: E501 :rtype: str \"\"\" return", "Account Number (defined in ISO 13616-1) # noqa: E501 :param", "def bic(self): \"\"\"Gets the bic of this Transfer. # noqa:", "self._tan_scheme = tan_scheme def to_dict(self): \"\"\"Returns the model properties as", "The key is attribute name and the value is json", "Transfer. Amount to be transfered # noqa: E501 :param amount:", "manually. \"\"\" \"\"\" Attributes: swagger_types (dict): The key is attribute", "(defined in ISO 13616-1) # noqa: E501 :return: The iban", "not None: self.bic = bic self.name = name self.amount =", "name of this Transfer. # noqa: E501 :type: str \"\"\"", "purpose of this Transfer. Purpose # noqa: E501 :param purpose:", "amount of this Transfer. # noqa: E501 :rtype: Amount \"\"\"", "\"\"\"Transfer - a model defined in Swagger\"\"\" # noqa: E501", "Name of the creditor # noqa: E501 :param name: The", "name of this Transfer. # noqa: E501 :rtype: str \"\"\"", "transfered # noqa: E501 :return: The amount of this Transfer.", "F401 import six from swagger_client.models.amount import Amount # noqa: F401,E501", "\"\"\" [AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy;", "bic(self): \"\"\"Gets the bic of this Transfer. # noqa: E501", "@amount.setter def amount(self, amount): \"\"\"Sets the amount of this Transfer.", "pprint import re # noqa: F401 import six from swagger_client.models.amount", "# noqa: E501 self._iban = iban @property def bic(self): \"\"\"Gets", "in ISO-9362) # noqa: E501 :return: The bic of this", "ID of the TANMedia. # noqa: E501 :param tan_media_id: The", "= None self._tan_media_id = None self._tan_scheme = None self.discriminator =", "is None: raise ValueError(\"Invalid value for `name`, must not be", "getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x:", "iban of this Transfer. IBAN - International Bank Account Number", "# noqa: E501 :rtype: Amount \"\"\" return self._amount @amount.setter def", "are equal\"\"\" if not isinstance(other, Transfer): return False return self.__dict__", "hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] =", "@property def tan_media_id(self): \"\"\"Gets the tan_media_id of this Transfer. #", "bic @property def name(self): \"\"\"Gets the name of this Transfer.", "if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if", "tan_media_id self.tan_scheme = tan_scheme @property def iban(self): \"\"\"Gets the iban", "auto generated by the swagger code generator program. Do not", "to verify this payment (e.g. \\\"901\\\") # noqa: E501 :return:", "= amount @property def purpose(self): \"\"\"Gets the purpose of this", "self._iban @iban.setter def iban(self, iban): \"\"\"Sets the iban of this", "the purpose of this Transfer. Purpose # noqa: E501 :param", "purpose: The purpose of this Transfer. # noqa: E501 :type:", "def __eq__(self, other): \"\"\"Returns true if both objects are equal\"\"\"", "of this Transfer. # noqa: E501 IBAN - International Bank", "self._tan_media_id = None self._tan_scheme = None self.discriminator = None self.iban", "E501 :rtype: str \"\"\" return self._purpose @purpose.setter def purpose(self, purpose):", "self._purpose @purpose.setter def purpose(self, purpose): \"\"\"Sets the purpose of this", "of the model\"\"\" return pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For `print` and", "attribute name and the value is json key in definition.", "tan_media_id of this Transfer. # noqa: E501 :rtype: str \"\"\"", "(dict): The key is attribute name and the value is", "None: raise ValueError(\"Invalid value for `tan_scheme`, must not be `None`\")", ")) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict):", "None: raise ValueError(\"Invalid value for `name`, must not be `None`\")", "the value is json key in definition. \"\"\" swagger_types =", "be `None`\") # noqa: E501 self._name = name @property def", "ISO-9362) # noqa: E501 :return: The bic of this Transfer.", "# noqa: E501 :rtype: str \"\"\" return self._purpose @purpose.setter def", "iban of this Transfer. # noqa: E501 :rtype: str \"\"\"", "is None: raise ValueError(\"Invalid value for `amount`, must not be", "be `None`\") # noqa: E501 self._tan_media_id = tan_media_id @property def", "noqa: E501 TANScheme - The scheme **id** that is used", "tan_scheme): \"\"\"Sets the tan_scheme of this Transfer. TANScheme - The", "\"\"\" \"\"\" Attributes: swagger_types (dict): The key is attribute name", "None self._tan_media_id = None self._tan_scheme = None self.discriminator = None", "swagger_client.models.amount import Amount # noqa: F401,E501 class Transfer(object): \"\"\"NOTE: This", "'str', 'bic': 'str', 'name': 'str', 'amount': 'Amount', 'purpose': 'str', 'tan_media_id':", "noqa: E501 :type: str \"\"\" self._bic = bic @property def", "def purpose(self): \"\"\"Gets the purpose of this Transfer. # noqa:", "(item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else:", "\"\"\" return self._name @name.setter def name(self, name): \"\"\"Sets the name", "this Transfer. # noqa: E501 :type: str \"\"\" if tan_media_id", ":rtype: str \"\"\" return self._iban @iban.setter def iban(self, iban): \"\"\"Sets", "# noqa: E501 IBAN - International Bank Account Number (defined", "noqa: E501 BIC - Business Identifier Code (defined in ISO-9362)", "Transfer. # noqa: E501 Amount to be transfered # noqa:", "Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz - Ein", "result = {} for attr, _ in six.iteritems(self.swagger_types): value =", "The iban of this Transfer. # noqa: E501 :type: str", "\"\"\" swagger_types = { 'iban': 'str', 'bic': 'str', 'name': 'str',", "__ne__(self, other): \"\"\"Returns true if both objects are not equal\"\"\"", "self._amount = amount @property def purpose(self): \"\"\"Gets the purpose of", "@tan_media_id.setter def tan_media_id(self, tan_media_id): \"\"\"Sets the tan_media_id of this Transfer.", ":rtype: str \"\"\" return self._tan_scheme @tan_scheme.setter def tan_scheme(self, tan_scheme): \"\"\"Sets", "noqa: E501 TANMediaId - The identifying ID of the TANMedia.", "and the value is json key in definition. \"\"\" swagger_types", "amount: The amount of this Transfer. # noqa: E501 :type:", "this Transfer. # noqa: E501 :rtype: str \"\"\" return self._tan_scheme", "\"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map(", "noqa: E501 :param iban: The iban of this Transfer. #", "is None: raise ValueError(\"Invalid value for `tan_media_id`, must not be", "bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501 \"\"\"Transfer", "None: raise ValueError(\"Invalid value for `iban`, must not be `None`\")", "# noqa: E501 :rtype: str \"\"\" return self._tan_scheme @tan_scheme.setter def", "str \"\"\" return self._tan_media_id @tan_media_id.setter def tan_media_id(self, tan_media_id): \"\"\"Sets the", "= None self._bic = None self._name = None self._amount =", "- Name of the creditor # noqa: E501 :param name:", "Purpose # noqa: E501 :return: The purpose of this Transfer.", "model properties as a dict\"\"\" result = {} for attr,", "name(self, name): \"\"\"Sets the name of this Transfer. Name -", "self.discriminator = None self.iban = iban if bic is not", "Amount \"\"\" if amount is None: raise ValueError(\"Invalid value for", "def __ne__(self, other): \"\"\"Returns true if both objects are not", "- Business Identifier Code (defined in ISO-9362) # noqa: E501", "'str' } attribute_map = { 'iban': 'iban', 'bic': 'bic', 'name':", "the iban of this Transfer. # noqa: E501 IBAN -", "noqa: E501 self._tan_scheme = tan_scheme def to_dict(self): \"\"\"Returns the model", "noqa: E501 \"\"\"Transfer - a model defined in Swagger\"\"\" #", "[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017", "def name(self, name): \"\"\"Sets the name of this Transfer. Name", "the TANMedia. # noqa: E501 :param tan_media_id: The tan_media_id of", "self._amount = None self._purpose = None self._tan_media_id = None self._tan_scheme", ":type: Amount \"\"\" if amount is None: raise ValueError(\"Invalid value", "Business Identifier Code (defined in ISO-9362) # noqa: E501 :param", "= dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else", "tan_scheme: The tan_scheme of this Transfer. # noqa: E501 :type:", "of this Transfer. # noqa: E501 :type: str \"\"\" if", "this Transfer. # noqa: E501 IBAN - International Bank Account", "must not be `None`\") # noqa: E501 self._iban = iban", "version: 2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint import re", "for `name`, must not be `None`\") # noqa: E501 self._name", "noqa: E501 :param amount: The amount of this Transfer. #", "this Transfer. # noqa: E501 TANMediaId - The identifying ID", "self._iban = iban @property def bic(self): \"\"\"Gets the bic of", "amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501 \"\"\"Transfer - a", "defined in Swagger\"\"\" # noqa: E501 self._iban = None self._bic", "raise ValueError(\"Invalid value for `name`, must not be `None`\") #", "the tan_media_id of this Transfer. # noqa: E501 TANMediaId -", "raise ValueError(\"Invalid value for `tan_media_id`, must not be `None`\") #", "# noqa: E501 OpenAPI spec version: 2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git", "noqa: F401 import six from swagger_client.models.amount import Amount # noqa:", "purpose @property def tan_media_id(self): \"\"\"Gets the tan_media_id of this Transfer.", "Finanz Informatik # noqa: E501 OpenAPI spec version: 2.1.0 Generated", "E501 :type: Amount \"\"\" if amount is None: raise ValueError(\"Invalid", "\"\"\"Sets the name of this Transfer. Name - Name of", "'tan_scheme': 'str' } attribute_map = { 'iban': 'iban', 'bic': 'bic',", "self._iban = None self._bic = None self._name = None self._amount", "str \"\"\" if tan_media_id is None: raise ValueError(\"Invalid value for", "is None: raise ValueError(\"Invalid value for `tan_scheme`, must not be", ":rtype: str \"\"\" return self._name @name.setter def name(self, name): \"\"\"Sets", "Purpose # noqa: E501 :param purpose: The purpose of this", "of this Transfer. # noqa: E501 :type: str \"\"\" self._bic", "iban): \"\"\"Sets the iban of this Transfer. IBAN - International", "key is attribute name and the value is json key", "# noqa: E501 :type: str \"\"\" if tan_media_id is None:", "The purpose of this Transfer. # noqa: E501 :type: str", "generator program. Do not edit the class manually. \"\"\" \"\"\"", "@property def bic(self): \"\"\"Gets the bic of this Transfer. #", "str \"\"\" return self._tan_scheme @tan_scheme.setter def tan_scheme(self, tan_scheme): \"\"\"Sets the", "None: self.bic = bic self.name = name self.amount = amount", "ID of the TANMedia. # noqa: E501 :return: The tan_media_id", "if both objects are not equal\"\"\" return not self ==", "'str', 'name': 'str', 'amount': 'Amount', 'purpose': 'str', 'tan_media_id': 'str', 'tan_scheme':", "the name of this Transfer. Name - Name of the", "def __repr__(self): \"\"\"For `print` and `pprint`\"\"\" return self.to_str() def __eq__(self,", "the bic of this Transfer. # noqa: E501 BIC -", "and `pprint`\"\"\" return self.to_str() def __eq__(self, other): \"\"\"Returns true if", "Code (defined in ISO-9362) # noqa: E501 :return: The bic", "# noqa: E501 :return: The tan_scheme of this Transfer. #", ":type: str \"\"\" if tan_media_id is None: raise ValueError(\"Invalid value", "E501 :param tan_media_id: The tan_media_id of this Transfer. # noqa:", "payment (e.g. \\\"901\\\") # noqa: E501 :param tan_scheme: The tan_scheme", "(defined in ISO-9362) # noqa: E501 :param bic: The bic", "Transfer. # noqa: E501 :type: Amount \"\"\" if amount is", "list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value", "E501 :param purpose: The purpose of this Transfer. # noqa:", "str \"\"\" return self._purpose @purpose.setter def purpose(self, purpose): \"\"\"Sets the", "tan_media_id @property def tan_scheme(self): \"\"\"Gets the tan_scheme of this Transfer.", "\"\"\" return self._tan_scheme @tan_scheme.setter def tan_scheme(self, tan_scheme): \"\"\"Sets the tan_scheme", "= iban if bic is not None: self.bic = bic", "\"\"\"Gets the amount of this Transfer. # noqa: E501 Amount", "'Amount', 'purpose': 'str', 'tan_media_id': 'str', 'tan_scheme': 'str' } attribute_map =", "of the creditor # noqa: E501 :param name: The name", "# noqa: E501 :rtype: str \"\"\" return self._tan_media_id @tan_media_id.setter def", "\"\"\"Returns true if both objects are equal\"\"\" if not isinstance(other,", "E501 :rtype: str \"\"\" return self._tan_media_id @tan_media_id.setter def tan_media_id(self, tan_media_id):", "\\\"901\\\") # noqa: E501 :param tan_scheme: The tan_scheme of this", "E501 :param amount: The amount of this Transfer. # noqa:", "E501 :rtype: str \"\"\" return self._bic @bic.setter def bic(self, bic):", "{} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr)", "\"\"\" Attributes: swagger_types (dict): The key is attribute name and", "of this Transfer. Name - Name of the creditor #", "\"\"\" return self._bic @bic.setter def bic(self, bic): \"\"\"Sets the bic", "noqa: E501 :return: The tan_media_id of this Transfer. # noqa:", "other): \"\"\"Returns true if both objects are not equal\"\"\" return", "# noqa: E501 :return: The bic of this Transfer. #", "creditor # noqa: E501 :param name: The name of this", "isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x,", "noqa: F401,E501 class Transfer(object): \"\"\"NOTE: This class is auto generated", "[Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz -", "this payment (e.g. \\\"901\\\") # noqa: E501 :return: The tan_scheme", "if name is None: raise ValueError(\"Invalid value for `name`, must", "if both objects are equal\"\"\" if not isinstance(other, Transfer): return", "The identifying ID of the TANMedia. # noqa: E501 :return:", "def to_dict(self): \"\"\"Returns the model properties as a dict\"\"\" result", "- Name of the creditor # noqa: E501 :return: The", "the amount of this Transfer. # noqa: E501 Amount to", "this Transfer. # noqa: E501 Amount to be transfered #", "bic of this Transfer. # noqa: E501 :rtype: str \"\"\"", "be `None`\") # noqa: E501 self._iban = iban @property def", "str \"\"\" return self._iban @iban.setter def iban(self, iban): \"\"\"Sets the", "return False return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\"Returns", "cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz", "for `tan_media_id`, must not be `None`\") # noqa: E501 self._tan_media_id", ":param iban: The iban of this Transfer. # noqa: E501", "noqa: E501 self._tan_media_id = tan_media_id @property def tan_scheme(self): \"\"\"Gets the", "not be `None`\") # noqa: E501 self._iban = iban @property", "in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr]", "__eq__(self, other): \"\"\"Returns true if both objects are equal\"\"\" if", "# coding: utf-8 \"\"\" [AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of", "for `amount`, must not be `None`\") # noqa: E501 self._amount", "= getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda", "self._amount @amount.setter def amount(self, amount): \"\"\"Sets the amount of this", "noqa: E501 self._iban = iban @property def bic(self): \"\"\"Gets the", "self.bic = bic self.name = name self.amount = amount if", "# noqa: E501 :return: The name of this Transfer. #", "- Ein Unternehmen der Finanz Informatik # noqa: E501 OpenAPI", ":return: The amount of this Transfer. # noqa: E501 :rtype:", "noqa: E501 :param purpose: The purpose of this Transfer. #", "noqa: E501 :rtype: str \"\"\" return self._name @name.setter def name(self,", "value return result def to_str(self): \"\"\"Returns the string representation of", "def tan_scheme(self): \"\"\"Gets the tan_scheme of this Transfer. # noqa:", "Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz - Ein Unternehmen der Finanz", "import pprint import re # noqa: F401 import six from", "noqa: E501 IBAN - International Bank Account Number (defined in", "noqa: E501 :rtype: str \"\"\" return self._bic @bic.setter def bic(self,", "must not be `None`\") # noqa: E501 self._name = name", "tan_media_id is None: raise ValueError(\"Invalid value for `tan_media_id`, must not", "purpose of this Transfer. # noqa: E501 :rtype: str \"\"\"", "other.__dict__ def __ne__(self, other): \"\"\"Returns true if both objects are", "str \"\"\" return self._bic @bic.setter def bic(self, bic): \"\"\"Sets the", "'amount', 'purpose': 'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme' } def __init__(self,", "def tan_scheme(self, tan_scheme): \"\"\"Sets the tan_scheme of this Transfer. TANScheme", "= tan_media_id self.tan_scheme = tan_scheme @property def iban(self): \"\"\"Gets the", "**id** that is used to verify this payment (e.g. \\\"901\\\")", "`tan_media_id`, must not be `None`\") # noqa: E501 self._tan_media_id =", "} def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None):", "E501 :param iban: The iban of this Transfer. # noqa:", "return self._amount @amount.setter def amount(self, amount): \"\"\"Sets the amount of", "= amount if purpose is not None: self.purpose = purpose", "noqa: E501 :return: The amount of this Transfer. # noqa:", "tan_media_id: The tan_media_id of this Transfer. # noqa: E501 :type:", "def iban(self, iban): \"\"\"Sets the iban of this Transfer. IBAN", "\"\"\" return self._iban @iban.setter def iban(self, iban): \"\"\"Sets the iban", "of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz - Ein Unternehmen der", "The scheme **id** that is used to verify this payment", "else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict()", "if tan_scheme is None: raise ValueError(\"Invalid value for `tan_scheme`, must", "item, value.items() )) else: result[attr] = value return result def", "swagger_types (dict): The key is attribute name and the value", "in Swagger\"\"\" # noqa: E501 self._iban = None self._bic =", "name: The name of this Transfer. # noqa: E501 :type:", "of the TANMedia. # noqa: E501 :return: The tan_media_id of", "= value return result def to_str(self): \"\"\"Returns the string representation", "@property def amount(self): \"\"\"Gets the amount of this Transfer. #", "\"\"\"Sets the tan_media_id of this Transfer. TANMediaId - The identifying", ":type: str \"\"\" if tan_scheme is None: raise ValueError(\"Invalid value", "None self.discriminator = None self.iban = iban if bic is", "of this Transfer. BIC - Business Identifier Code (defined in", "Amount to be transfered # noqa: E501 :return: The amount", "@purpose.setter def purpose(self, purpose): \"\"\"Sets the purpose of this Transfer.", "None self._purpose = None self._tan_media_id = None self._tan_scheme = None", "x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif", "\"\"\"NOTE: This class is auto generated by the swagger code", "`None`\") # noqa: E501 self._iban = iban @property def bic(self):", "'tan_media_id': 'str', 'tan_scheme': 'str' } attribute_map = { 'iban': 'iban',", "bic): \"\"\"Sets the bic of this Transfer. BIC - Business", "noqa: E501 Name - Name of the creditor # noqa:", "not isinstance(other, Transfer): return False return self.__dict__ == other.__dict__ def", "self.purpose = purpose self.tan_media_id = tan_media_id self.tan_scheme = tan_scheme @property", "Identifier Code (defined in ISO-9362) # noqa: E501 :return: The", "name(self): \"\"\"Gets the name of this Transfer. # noqa: E501", "`None`\") # noqa: E501 self._name = name @property def amount(self):", "- The identifying ID of the TANMedia. # noqa: E501", "tan_media_id of this Transfer. # noqa: E501 :type: str \"\"\"", "result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\")", "# noqa: E501 :type: str \"\"\" if iban is None:", "name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501 \"\"\"Transfer -", "'purpose': 'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme' } def __init__(self, iban=None,", "None self._name = None self._amount = None self._purpose = None", "properties as a dict\"\"\" result = {} for attr, _", "else item, value.items() )) else: result[attr] = value return result", "edit the class manually. \"\"\" \"\"\" Attributes: swagger_types (dict): The", "'tanMediaId', 'tan_scheme': 'tanScheme' } def __init__(self, iban=None, bic=None, name=None, amount=None,", "identifying ID of the TANMedia. # noqa: E501 :param tan_media_id:", "isinstance(other, Transfer): return False return self.__dict__ == other.__dict__ def __ne__(self,", "Transfer. # noqa: E501 :rtype: str \"\"\" return self._name @name.setter", "International Bank Account Number (defined in ISO 13616-1) # noqa:", "[Imprint](https://sparkassen-hub.com/impressum/) &copy; 2016&dash;2017 Starfinanz - Ein Unternehmen der Finanz Informatik", "\"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] =", "\"\"\" return self._purpose @purpose.setter def purpose(self, purpose): \"\"\"Sets the purpose", "'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme' } def __init__(self, iban=None, bic=None,", "E501 :return: The name of this Transfer. # noqa: E501", "objects are equal\"\"\" if not isinstance(other, Transfer): return False return", "of this Transfer. TANScheme - The scheme **id** that is", "Identifier Code (defined in ISO-9362) # noqa: E501 :param bic:", "(defined in ISO-9362) # noqa: E501 :return: The bic of", "tan_scheme(self): \"\"\"Gets the tan_scheme of this Transfer. # noqa: E501", "'amount': 'amount', 'purpose': 'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme' } def", "'iban', 'bic': 'bic', 'name': 'name', 'amount': 'amount', 'purpose': 'purpose', 'tan_media_id':", "to be transfered # noqa: E501 :param amount: The amount", "used to verify this payment (e.g. \\\"901\\\") # noqa: E501", "noqa: E501 :type: Amount \"\"\" if amount is None: raise", "The iban of this Transfer. # noqa: E501 :rtype: str", "tan_scheme is None: raise ValueError(\"Invalid value for `tan_scheme`, must not", "attribute name and the value is attribute type. attribute_map (dict):", "noqa: E501 :type: str \"\"\" self._purpose = purpose @property def", "iban: The iban of this Transfer. # noqa: E501 :type:", "Transfer. TANScheme - The scheme **id** that is used to", ":param amount: The amount of this Transfer. # noqa: E501", "noqa: E501 :type: str \"\"\" if iban is None: raise", "E501 :return: The amount of this Transfer. # noqa: E501", "noqa: E501 Purpose # noqa: E501 :return: The purpose of", "amount): \"\"\"Sets the amount of this Transfer. Amount to be", "Name - Name of the creditor # noqa: E501 :param", "this Transfer. # noqa: E501 :rtype: Amount \"\"\" return self._amount", "elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict())", "str \"\"\" self._purpose = purpose @property def tan_media_id(self): \"\"\"Gets the", "the model properties as a dict\"\"\" result = {} for", "def tan_media_id(self, tan_media_id): \"\"\"Sets the tan_media_id of this Transfer. TANMediaId", "\"\"\"Gets the iban of this Transfer. # noqa: E501 IBAN", "the model\"\"\" return pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For `print` and `pprint`\"\"\"", "if not isinstance(other, Transfer): return False return self.__dict__ == other.__dict__", "ValueError(\"Invalid value for `amount`, must not be `None`\") # noqa:", "E501 :rtype: str \"\"\" return self._name @name.setter def name(self, name):", "= name @property def amount(self): \"\"\"Gets the amount of this", "x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value,", "noqa: E501 self._iban = None self._bic = None self._name =", "six from swagger_client.models.amount import Amount # noqa: F401,E501 class Transfer(object):", "The tan_scheme of this Transfer. # noqa: E501 :rtype: str", "equal\"\"\" if not isinstance(other, Transfer): return False return self.__dict__ ==", "purpose(self, purpose): \"\"\"Sets the purpose of this Transfer. Purpose #", "Attributes: swagger_types (dict): The key is attribute name and the", "\"\"\"Sets the amount of this Transfer. Amount to be transfered", "# noqa: E501 :param purpose: The purpose of this Transfer.", "Transfer. # noqa: E501 IBAN - International Bank Account Number", "value for `name`, must not be `None`\") # noqa: E501", "# noqa: E501 \"\"\"Transfer - a model defined in Swagger\"\"\"", "(defined in ISO 13616-1) # noqa: E501 :param iban: The", "by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint import re # noqa: F401", "E501 :param name: The name of this Transfer. # noqa:", "noqa: E501 :return: The name of this Transfer. # noqa:", "to verify this payment (e.g. \\\"901\\\") # noqa: E501 :param", "E501 OpenAPI spec version: 2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import", "is auto generated by the swagger code generator program. Do", "is json key in definition. \"\"\" swagger_types = { 'iban':", "amount @property def purpose(self): \"\"\"Gets the purpose of this Transfer.", "# noqa: E501 :param bic: The bic of this Transfer.", "Transfer. # noqa: E501 :rtype: str \"\"\" return self._purpose @purpose.setter", "from swagger_client.models.amount import Amount # noqa: F401,E501 class Transfer(object): \"\"\"NOTE:", "value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0],", "Ein Unternehmen der Finanz Informatik # noqa: E501 OpenAPI spec", "is None: raise ValueError(\"Invalid value for `iban`, must not be", "if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] =", "the amount of this Transfer. Amount to be transfered #", "E501 Name - Name of the creditor # noqa: E501", "Transfer. Name - Name of the creditor # noqa: E501", ":param name: The name of this Transfer. # noqa: E501", "Swagger\"\"\" # noqa: E501 self._iban = None self._bic = None", "ISO 13616-1) # noqa: E501 :return: The iban of this", "Transfer. # noqa: E501 :type: str \"\"\" self._purpose = purpose", "else: result[attr] = value return result def to_str(self): \"\"\"Returns the", "\"\"\" import pprint import re # noqa: F401 import six", "dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item,", "bic of this Transfer. BIC - Business Identifier Code (defined", "true if both objects are equal\"\"\" if not isinstance(other, Transfer):", "identifying ID of the TANMedia. # noqa: E501 :return: The", "noqa: E501 :rtype: str \"\"\" return self._tan_media_id @tan_media_id.setter def tan_media_id(self,", "noqa: E501 :return: The iban of this Transfer. # noqa:", "this Transfer. # noqa: E501 :type: str \"\"\" self._bic =", "purpose(self): \"\"\"Gets the purpose of this Transfer. # noqa: E501", "`iban`, must not be `None`\") # noqa: E501 self._iban =", "noqa: E501 :param tan_media_id: The tan_media_id of this Transfer. #", "@name.setter def name(self, name): \"\"\"Sets the name of this Transfer.", "False return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\"Returns true", "= { 'iban': 'iban', 'bic': 'bic', 'name': 'name', 'amount': 'amount',", "of this Transfer. # noqa: E501 :type: Amount \"\"\" if", "The name of this Transfer. # noqa: E501 :type: str", "\"\"\"Sets the iban of this Transfer. IBAN - International Bank", "# noqa: E501 :return: The tan_media_id of this Transfer. #", "ISO-9362) # noqa: E501 :param bic: The bic of this", "of this Transfer. # noqa: E501 :type: str \"\"\" self._purpose", "def bic(self, bic): \"\"\"Sets the bic of this Transfer. BIC", "E501 :param tan_scheme: The tan_scheme of this Transfer. # noqa:", "self._bic = bic @property def name(self): \"\"\"Gets the name of", "Transfer. Purpose # noqa: E501 :param purpose: The purpose of", "Business Identifier Code (defined in ISO-9362) # noqa: E501 :return:", "purpose of this Transfer. # noqa: E501 Purpose # noqa:", "ValueError(\"Invalid value for `tan_scheme`, must not be `None`\") # noqa:", "value is attribute type. attribute_map (dict): The key is attribute", "# noqa: E501 self._name = name @property def amount(self): \"\"\"Gets", "return self.to_str() def __eq__(self, other): \"\"\"Returns true if both objects", "x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif", "'name': 'name', 'amount': 'amount', 'purpose': 'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme'", "def iban(self): \"\"\"Gets the iban of this Transfer. # noqa:", "Transfer. # noqa: E501 :rtype: str \"\"\" return self._tan_scheme @tan_scheme.setter", ":rtype: Amount \"\"\" return self._amount @amount.setter def amount(self, amount): \"\"\"Sets", "name of this Transfer. Name - Name of the creditor", "Transfer. BIC - Business Identifier Code (defined in ISO-9362) #", "and the value is attribute type. attribute_map (dict): The key", "this Transfer. # noqa: E501 :type: Amount \"\"\" if amount", ":type: str \"\"\" self._bic = bic @property def name(self): \"\"\"Gets", "value is json key in definition. \"\"\" swagger_types = {", "E501 IBAN - International Bank Account Number (defined in ISO", "lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items()", "# noqa: E501 :type: Amount \"\"\" if amount is None:", "import re # noqa: F401 import six from swagger_client.models.amount import", "must not be `None`\") # noqa: E501 self._amount = amount", "raise ValueError(\"Invalid value for `iban`, must not be `None`\") #", ":return: The iban of this Transfer. # noqa: E501 :rtype:", "this payment (e.g. \\\"901\\\") # noqa: E501 :param tan_scheme: The", "name): \"\"\"Sets the name of this Transfer. Name - Name", "noqa: E501 :return: The purpose of this Transfer. # noqa:", "TANMedia. # noqa: E501 :param tan_media_id: The tan_media_id of this", "@property def name(self): \"\"\"Gets the name of this Transfer. #", "Transfer. # noqa: E501 Name - Name of the creditor", "TANMedia. # noqa: E501 :return: The tan_media_id of this Transfer.", "= value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item:", "Transfer. # noqa: E501 :rtype: str \"\"\" return self._iban @iban.setter", "noqa: E501 :rtype: str \"\"\" return self._iban @iban.setter def iban(self,", "this Transfer. TANMediaId - The identifying ID of the TANMedia.", "(e.g. \\\"901\\\") # noqa: E501 :param tan_scheme: The tan_scheme of", "this Transfer. # noqa: E501 :type: str \"\"\" self._purpose =", "iban(self, iban): \"\"\"Sets the iban of this Transfer. IBAN -", "the iban of this Transfer. IBAN - International Bank Account", "str \"\"\" if tan_scheme is None: raise ValueError(\"Invalid value for", "return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\"Returns true if", "ValueError(\"Invalid value for `iban`, must not be `None`\") # noqa:", "The identifying ID of the TANMedia. # noqa: E501 :param", "Amount # noqa: F401,E501 class Transfer(object): \"\"\"NOTE: This class is", "purpose self.tan_media_id = tan_media_id self.tan_scheme = tan_scheme @property def iban(self):", "scheme **id** that is used to verify this payment (e.g.", "is not None: self.bic = bic self.name = name self.amount", "to be transfered # noqa: E501 :return: The amount of", "this Transfer. IBAN - International Bank Account Number (defined in", "# noqa: E501 TANScheme - The scheme **id** that is", "- a model defined in Swagger\"\"\" # noqa: E501 self._iban", "iban if bic is not None: self.bic = bic self.name", "purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501 \"\"\"Transfer - a model", "Transfer. # noqa: E501 :type: str \"\"\" if tan_media_id is", "purpose of this Transfer. # noqa: E501 :type: str \"\"\"", "the class manually. \"\"\" \"\"\" Attributes: swagger_types (dict): The key", "the swagger code generator program. Do not edit the class", "\"to_dict\") else item, value.items() )) else: result[attr] = value return", "= None self._tan_scheme = None self.discriminator = None self.iban =", "str \"\"\" if name is None: raise ValueError(\"Invalid value for", "The tan_scheme of this Transfer. # noqa: E501 :type: str", "this Transfer. BIC - Business Identifier Code (defined in ISO-9362)", "tan_scheme of this Transfer. # noqa: E501 TANScheme - The", "# noqa: E501 :type: str \"\"\" if name is None:", "as a dict\"\"\" result = {} for attr, _ in", "name is None: raise ValueError(\"Invalid value for `name`, must not", "iban is None: raise ValueError(\"Invalid value for `iban`, must not", "Number (defined in ISO 13616-1) # noqa: E501 :return: The", "tan_scheme of this Transfer. # noqa: E501 :type: str \"\"\"", "# noqa: E501 self._iban = None self._bic = None self._name", "# noqa: E501 self._tan_scheme = tan_scheme def to_dict(self): \"\"\"Returns the", "Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501", "# noqa: E501 :type: str \"\"\" self._bic = bic @property", "Bank Account Number (defined in ISO 13616-1) # noqa: E501", "this Transfer. # noqa: E501 :rtype: str \"\"\" return self._purpose", "value.items() )) else: result[attr] = value return result def to_str(self):", "# noqa: E501 :type: str \"\"\" self._purpose = purpose @property", "\"\"\"Gets the tan_scheme of this Transfer. # noqa: E501 TANScheme", "payment (e.g. \\\"901\\\") # noqa: E501 :return: The tan_scheme of", "be `None`\") # noqa: E501 self._tan_scheme = tan_scheme def to_dict(self):", "tan_scheme=None): # noqa: E501 \"\"\"Transfer - a model defined in", "amount of this Transfer. # noqa: E501 Amount to be", "This class is auto generated by the swagger code generator", "Code (defined in ISO-9362) # noqa: E501 :param bic: The", "generated by the swagger code generator program. Do not edit", "Unternehmen der Finanz Informatik # noqa: E501 OpenAPI spec version:", "'str', 'amount': 'Amount', 'purpose': 'str', 'tan_media_id': 'str', 'tan_scheme': 'str' }", "'iban': 'iban', 'bic': 'bic', 'name': 'name', 'amount': 'amount', 'purpose': 'purpose',", "'name', 'amount': 'amount', 'purpose': 'purpose', 'tan_media_id': 'tanMediaId', 'tan_scheme': 'tanScheme' }", "this Transfer. Amount to be transfered # noqa: E501 :param", "Transfer. # noqa: E501 Purpose # noqa: E501 :return: The", "attribute_map = { 'iban': 'iban', 'bic': 'bic', 'name': 'name', 'amount':", "isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if", "= tan_scheme @property def iban(self): \"\"\"Gets the iban of this", "of the creditor # noqa: E501 :return: The name of", "name of this Transfer. # noqa: E501 Name - Name", "by the swagger code generator program. Do not edit the", "if bic is not None: self.bic = bic self.name =", "result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else", ":param tan_scheme: The tan_scheme of this Transfer. # noqa: E501", ":type: str \"\"\" if iban is None: raise ValueError(\"Invalid value", "iban of this Transfer. # noqa: E501 :type: str \"\"\"", "attribute_map (dict): The key is attribute name and the value", "elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr]", "self._name = None self._amount = None self._purpose = None self._tan_media_id", "is not None: self.purpose = purpose self.tan_media_id = tan_media_id self.tan_scheme", "tan_scheme(self, tan_scheme): \"\"\"Sets the tan_scheme of this Transfer. TANScheme -", "hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr]", "Amount to be transfered # noqa: E501 :param amount: The", "The purpose of this Transfer. # noqa: E501 :rtype: str", "E501 :type: str \"\"\" if tan_media_id is None: raise ValueError(\"Invalid", "attribute type. attribute_map (dict): The key is attribute name and", "this Transfer. # noqa: E501 :type: str \"\"\" if iban", "\"\"\" if name is None: raise ValueError(\"Invalid value for `name`,", "&copy; 2016&dash;2017 Starfinanz - Ein Unternehmen der Finanz Informatik #", "`None`\") # noqa: E501 self._tan_media_id = tan_media_id @property def tan_scheme(self):", "`None`\") # noqa: E501 self._tan_scheme = tan_scheme def to_dict(self): \"\"\"Returns", "the string representation of the model\"\"\" return pprint.pformat(self.to_dict()) def __repr__(self):", "bic is not None: self.bic = bic self.name = name", "Transfer(object): \"\"\"NOTE: This class is auto generated by the swagger", "true if both objects are not equal\"\"\" return not self", "'purpose': 'str', 'tan_media_id': 'str', 'tan_scheme': 'str' } attribute_map = {", "E501 self._name = name @property def amount(self): \"\"\"Gets the amount", "# noqa: E501 :param tan_scheme: The tan_scheme of this Transfer.", "for `tan_scheme`, must not be `None`\") # noqa: E501 self._tan_scheme", "_ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list):", "'str', 'tan_scheme': 'str' } attribute_map = { 'iban': 'iban', 'bic':", "E501 :type: str \"\"\" if iban is None: raise ValueError(\"Invalid", "E501 :rtype: str \"\"\" return self._iban @iban.setter def iban(self, iban):", "The name of this Transfer. # noqa: E501 :rtype: str", "self._name @name.setter def name(self, name): \"\"\"Sets the name of this", "the TANMedia. # noqa: E501 :return: The tan_media_id of this", "noqa: E501 :param name: The name of this Transfer. #", "dict\"\"\" result = {} for attr, _ in six.iteritems(self.swagger_types): value", "None self.iban = iban if bic is not None: self.bic", "return self._bic @bic.setter def bic(self, bic): \"\"\"Sets the bic of", "str \"\"\" return self._name @name.setter def name(self, name): \"\"\"Sets the", "`None`\") # noqa: E501 self._amount = amount @property def purpose(self):", "this Transfer. # noqa: E501 Purpose # noqa: E501 :return:", "13616-1) # noqa: E501 :param iban: The iban of this", "attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict()", "\"\"\" return self._tan_media_id @tan_media_id.setter def tan_media_id(self, tan_media_id): \"\"\"Sets the tan_media_id", "'str', 'tan_media_id': 'str', 'tan_scheme': 'str' } attribute_map = { 'iban':", ":rtype: str \"\"\" return self._tan_media_id @tan_media_id.setter def tan_media_id(self, tan_media_id): \"\"\"Sets", "Transfer. # noqa: E501 TANScheme - The scheme **id** that", "value for `tan_scheme`, must not be `None`\") # noqa: E501", "six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] =", "spec version: 2.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git \"\"\" import pprint import", "not be `None`\") # noqa: E501 self._amount = amount @property", "TANScheme - The scheme **id** that is used to verify", "is attribute name and the value is attribute type. attribute_map", "E501 :return: The iban of this Transfer. # noqa: E501", "Do not edit the class manually. \"\"\" \"\"\" Attributes: swagger_types", "def amount(self, amount): \"\"\"Sets the amount of this Transfer. Amount", ":param bic: The bic of this Transfer. # noqa: E501", "\"\"\" self._purpose = purpose @property def tan_media_id(self): \"\"\"Gets the tan_media_id", "E501 self._tan_scheme = tan_scheme def to_dict(self): \"\"\"Returns the model properties", "Transfer. # noqa: E501 :rtype: Amount \"\"\" return self._amount @amount.setter", "class manually. \"\"\" \"\"\" Attributes: swagger_types (dict): The key is", "a model defined in Swagger\"\"\" # noqa: E501 self._iban =", "result def to_str(self): \"\"\"Returns the string representation of the model\"\"\"", "pprint.pformat(self.to_dict()) def __repr__(self): \"\"\"For `print` and `pprint`\"\"\" return self.to_str() def", "self.tan_media_id = tan_media_id self.tan_scheme = tan_scheme @property def iban(self): \"\"\"Gets", "bic self.name = name self.amount = amount if purpose is", "be transfered # noqa: E501 :param amount: The amount of", "\"\"\"Gets the tan_media_id of this Transfer. # noqa: E501 TANMediaId", "the tan_scheme of this Transfer. # noqa: E501 TANScheme -", "= None self.iban = iban if bic is not None:", "self._purpose = None self._tan_media_id = None self._tan_scheme = None self.discriminator", "# noqa: E501 self._amount = amount @property def purpose(self): \"\"\"Gets", "amount(self, amount): \"\"\"Sets the amount of this Transfer. Amount to", "to_dict(self): \"\"\"Returns the model properties as a dict\"\"\" result =", "this Transfer. # noqa: E501 Name - Name of the", "value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map(", "bic of this Transfer. # noqa: E501 BIC - Business", "Transfer. # noqa: E501 :type: str \"\"\" if iban is", "the tan_scheme of this Transfer. TANScheme - The scheme **id**", "not None: self.purpose = purpose self.tan_media_id = tan_media_id self.tan_scheme =", "Informatik # noqa: E501 OpenAPI spec version: 2.1.0 Generated by:", "attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value,", "of the TANMedia. # noqa: E501 :param tan_media_id: The tan_media_id", "`tan_scheme`, must not be `None`\") # noqa: E501 self._tan_scheme =", "def tan_media_id(self): \"\"\"Gets the tan_media_id of this Transfer. # noqa:", "is attribute type. attribute_map (dict): The key is attribute name", "@tan_scheme.setter def tan_scheme(self, tan_scheme): \"\"\"Sets the tan_scheme of this Transfer.", "self.iban = iban if bic is not None: self.bic =", "E501 TANScheme - The scheme **id** that is used to", "not be `None`\") # noqa: E501 self._tan_scheme = tan_scheme def", "None: self.purpose = purpose self.tan_media_id = tan_media_id self.tan_scheme = tan_scheme", "# noqa: E501 :type: str \"\"\" if tan_scheme is None:", "E501 Amount to be transfered # noqa: E501 :return: The" ]
[ "os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache' self.assertFalse(cache_dir in", "\"\"\" Test that the cached and uncached traces are same", "without normalizing time trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time)", "f) # Change 1 character of the stored checksum md5sum", "80.402065, 80.001337] # - parsed dataframe timestamps: # [76.402065000000007, 80.402065000000007,", "2.0 (the \"License\"); # you may not use this file", "trace-read timestamps. # # This test ensures that this stays", "the newly added event (which is not present in the", "created when disabled \"\"\" GenericFTrace.disable_cache = True traces = (trappy.FTrace(),", "This test ensures that this stays true. cached_times = [r[0]", "def test_cache_not_created(self): \"\"\"Test that cache should not be created when", "By default, the str to float conversion done when reading", "trappy.FTrace() # Check we're actually testing what we think we", "trace to create a cache GenericFTrace.disable_cache = False trace1 =", "timestamps: # [76.402064999999993, 80.402064999999993, 82.001337000000007] # # To fix this,", "cache_dir = '.' + trace_file + '.cache' number_of_trace_categories = 31", "(trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces: trace_path = os.path.abspath(trace.trace_path) trace_dir", "= False self.test_cache_created() trace = trappy.FTrace() version = int(trace._version) cpus", "window=(0, 1)) # Check that we're testing what we think", "GenericFTrace.disable_cache = False trace = trappy.FTrace() # Check that the", "window should get rid of one of # them. if", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "read from previously parsed cache by reusing the path cached_trace", "trace_file = os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache'", "os.path.join(cache_path, \"metadata.json\") def read_metadata(): with open(metadata_path, \"r\") as f: return", "events; this window should get rid of one of #", "GenericFTrace.disable_cache = False self.test_cache_created() trace = trappy.FTrace() version = int(trace._version)", "# [76.402065, 80.402065, 80.001337] # - cached dataframe timestamps: #", "# cache-read timestamps being identical to trace-read timestamps. # #", "the files_to_copy arg of # SetUpDirectory, just do it ourselves.", "= trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002)", "path cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame # By default,", "GenericFTrace.disable_cache = False trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, 1)) #", "that we're testing what we think we're testing The trace", "1) write_md5(md5sum_inc) # Parse a trace, this should delete and", "False trace1 = trappy.FTrace() # Check we're actually testing what", "Limited, Google and contributors # # Licensed under the Apache", "= [r[0] for r in uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times) #", "uncached_times) # compare other columns as well self.assertTrue([r[1].pid for r", "== [r[1].prio for r in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test a", "is not present in the cache) is # parsed. parse_class", "the cache) is # parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2", "# contains 2 sched_wakeup events; this window should get rid", "of # SetUpDirectory, just do it ourselves. cache_path = \".trace.txt.cache\"", "[r[1].prio for r in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test a cache", "== [r[1].comm for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r in", "caching doesn't break the 'window' parameter\"\"\" GenericFTrace.disable_cache = False trace1", "= int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus, 6) def test_cache_delete_single(self): GenericFTrace.disable_cache =", "dataframe timestamps: # [76.402065000000007, 80.402065000000007, 82.001337000000007] # # - csv", "Generate trace again, should regenerate only the missing item trace", "use this file except in compliance with the License. #", "testing what we think we are if hasattr(trace1, 'dynamic_event'): raise", "__future__ import division from __future__ import print_function from builtins import", "As described in test_compare_cache_vs_uncached, reading from cache # results in", "from the trace.txt file. # # Here's an example: #", "timestamps: # [76.402065, 80.402065, 80.001337] # - parsed dataframe timestamps:", "uncached traces are same \"\"\" # Build the cache, but", "for trace in traces: trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path)", "from trappy.ftrace import GenericFTrace from trappy.systrace import SysTrace class TestCaching(utils_tests.SetupDirectory):", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "False trace = trappy.FTrace() # Check that the modified md5sum", "1)) # Check that we're testing what we think we're", "= False traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces:", "def test_cache_window_broad(self): \"\"\"Test that caching doesn't break the 'window' parameter\"\"\"", "# As described in test_compare_cache_vs_uncached, reading from cache # results", "License. # You may obtain a copy of the License", "def test_cache_dynamic_events(self): \"\"\"Test that caching works if new event parsers", "the trace.txt parser, which results in # cache-read timestamps being", "ensures that this stays true. cached_times = [r[0] for r", "6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self): \"\"\"Test", "# Now register a new event type, call the constructor", "+ trace_file + '.cache' number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir,", "we're testing The trace # contains 2 sched_wakeup events; this", "only the missing item trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for", "under the License is distributed on an \"AS IS\" BASIS,", "\"trace.html\")], *args, **kwargs) def test_cache_created(self): \"\"\"Test cache creation when enabled\"\"\"", "= False trace = trappy.FTrace() # Check that the modified", "License for the specific language governing permissions and # limitations", "traces: trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file = os.path.basename(trace_path)", "the trace to create a cache GenericFTrace.disable_cache = False trace1", "of # them. if len(trace1.sched_wakeup.data_frame) != 1: raise RuntimeError('Test bug:", "since this is a first time parse GenericFTrace.disable_cache = False", "80.001337] # - cached dataframe timestamps: # [76.402064999999993, 80.402064999999993, 82.001337000000007]", "trace in traces: trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file", "chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc) # Parse a trace, this should", "number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1) # Generate trace", "csv string timestamps: # [76.402065, 80.402065, 80.001337] # - cached", "\"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args, **kwargs) def test_cache_created(self): \"\"\"Test", "trappy.ftrace import GenericFTrace from trappy.systrace import SysTrace class TestCaching(utils_tests.SetupDirectory): def", "default, the str to float conversion done when reading from", "\"\"\"Test a cache with a bad checksum is overwritten\"\"\" #", "we think we're testing The trace # contains 2 sched_wakeup", "(\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args, **kwargs) def test_cache_created(self): \"\"\"Test cache", "False traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces: trace_path", "import chr import os import json import shutil import sys", "cache is used or not. GenericFTrace.disable_cache = False uncached_trace =", "self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1) # Generate", "string timestamps: # [76.402065, 80.402065, 80.001337] # - cached dataframe", "the modified md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid ftrace", "type, call the constructor again, and check # that the", "is overwritten\"\"\" # This is a directory so we can't", "call the constructor again, and check # that the newly", "and # limitations under the License. # from __future__ import", "in cached_dfr.iterrows()] == [r[1].pid for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for", "# Change 1 character of the stored checksum md5sum =", "break normalize_time\"\"\" GenericFTrace.disable_cache = False # Times in trace_sched.txt start_time", "= int(trace._version) cpus = int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus, 6) def", "1) # Generate trace again, should regenerate only the missing", "# parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame)", "same conversion method as the trace.txt parser, which results in", "= '.' + trace_file + '.cache' number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)),", "a cache GenericFTrace.disable_cache = False trace1 = trappy.FTrace() # Check", "is # parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 = trappy.FTrace()", "80.402064999999993, 82.001337000000007] # # To fix this, the timestamps read", "raise RuntimeError('Test bug: found unexpected event in trace') # Now", "None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self): \"\"\" Test that applying a", "described in test_compare_cache_vs_uncached, reading from cache # results in slightly", "test_cache_normalize_time(self): \"\"\"Test that caching doesn't break normalize_time\"\"\" GenericFTrace.disable_cache = False", "in compliance with the License. # You may obtain a", "trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file = os.path.basename(trace_path) cache_dir", "directory so we can't use the files_to_copy arg of #", "self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self): \"\"\"Test that caching keeps trace metadata\"\"\"", "trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c in trace.trace_classes: if isinstance(c, trace.class_definitions['sched_wakeup']):", "software # distributed under the License is distributed on an", "cache are converted using # the same conversion method as", "TestCaching(utils_tests.SetupDirectory): def __init__(self, *args, **kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\",", "[76.402065000000007, 80.402065000000007, 82.001337000000007] # # - csv string timestamps: #", "that the modified md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid", "r in cached_dfr.iterrows()] == [r[1].comm for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio", "trace_file + '.cache' number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv'))", "again, should regenerate only the missing item trace = trappy.FTrace()", "float conversion done when reading from csv is # different", "json.load(f) def write_md5(md5): metadata = read_metadata() metadata[\"md5sum\"] = md5 with", "= os.path.join(cache_path, \"metadata.json\") def read_metadata(): with open(metadata_path, \"r\") as f:", "the str to float conversion done when reading from csv", "verifies that applying windows results in identical # dataframes whether", "import SysTrace class TestCaching(utils_tests.SetupDirectory): def __init__(self, *args, **kwargs): super(TestCaching, self).__init__(", "testing The trace # contains 2 sched_wakeup events; this window", "without the window trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame),", "are same \"\"\" # Build the cache, but the actual", "r in cached_dfr.iterrows()] == [r[1].pid for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm", "= '.' + trace_file + '.cache' self.assertTrue(cache_dir in os.listdir(trace_dir)) def", "= trappy.FTrace() # Check we're actually testing what we think", "= trappy.FTrace() # Check that the modified md5sum was overwritten", "as f: json.dump(metadata, f) # Change 1 character of the", "Change 1 character of the stored checksum md5sum = read_metadata()[\"md5sum\"]", "this window should get rid of one of # them.", "test_cache_window_broad(self): \"\"\"Test that caching doesn't break the 'window' parameter\"\"\" GenericFTrace.disable_cache", "for r in cached_dfr.iterrows()] uncached_times = [r[0] for r in", "def write_md5(md5): metadata = read_metadata() metadata[\"md5sum\"] = md5 with open(metadata_path,", "\"The invalid ftrace cache wasn't overwritten\") def test_cache_dynamic_events(self): \"\"\"Test that", "trappy.FTrace() version = int(trace._version) cpus = int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus,", "trace # contains 2 sched_wakeup events; this window should get", "json import shutil import sys import unittest import utils_tests import", "bug: found unexpected event in trace') # Now register a", "just do it ourselves. cache_path = \".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY,", "f: return json.load(f) def write_md5(md5): metadata = read_metadata() metadata[\"md5sum\"] =", "used or not. GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() trace", "= False trace = trappy.FTrace() trace_path = os.path.abspath(trace.trace_path) trace_dir =", "new event parsers have been registered\"\"\" # Parse the trace", "SetUpDirectory, just do it ourselves. cache_path = \".trace.txt.cache\" src =", "the trace.txt file. # # Here's an example: # -", "for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] ==", "2) def test_cache_window_narrow(self): \"\"\" Test that applying a window to", "= True traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces:", "\"\"\"Test that caching doesn't break the 'window' parameter\"\"\" GenericFTrace.disable_cache =", "cache_dir = '.' + trace_file + '.cache' self.assertFalse(cache_dir in os.listdir(trace_dir))", "so we can't use the files_to_copy arg of # SetUpDirectory,", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "(\"trace_systrace.html\", \"trace.html\")], *args, **kwargs) def test_cache_created(self): \"\"\"Test cache creation when", "in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\" Test that the cached and", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "trace.txt string timestamps: # [76.402065, 80.402065, 80.001337] # - parsed", "+ '.cache' number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)),", "in the cache) is # parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\")", "self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse with normalized time trace2 = trappy.FTrace(events=['cpu_frequency',", "trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, 1)) # Check that we're", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "found unexpected event in trace') # Now register a new", "to in writing, software # distributed under the License is", "trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2)", "cached_dfr = cached_trace.sched_wakeup.data_frame # By default, the str to float", "# See the License for the specific language governing permissions", "results in identical # dataframes whether cache is used or", "def test_compare_cached_vs_uncached(self): \"\"\" Test that the cached and uncached traces", "+ trace_file + '.cache' self.assertTrue(cache_dir in os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test", "cache) is # parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 =", "language governing permissions and # limitations under the License. #", "\"\"\" Test that applying a window to a cached trace", "events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self): \"\"\" Test that", "or agreed to in writing, software # distributed under the", "trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test", "we can't use the files_to_copy arg of # SetUpDirectory, just", "required by applicable law or agreed to in writing, software", "# dataframes whether cache is used or not. GenericFTrace.disable_cache =", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "normalized time trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time -", "them. if len(trace1.sched_wakeup.data_frame) != 1: raise RuntimeError('Test bug: bad sched_wakeup", "in trace.trace_classes: if isinstance(c, trace.class_definitions['sched_wakeup']): self.assertEqual(c.cached, False) continue self.assertEqual(c.cached, True)", "with the License. # You may obtain a copy of", "if len(trace1.sched_wakeup.data_frame) != 1: raise RuntimeError('Test bug: bad sched_wakeup event", "constructor again, and check # that the newly added event", "trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame),", "[76.402064999999993, 80.402064999999993, 82.001337000000007] # # To fix this, the timestamps", "caching works if new event parsers have been registered\"\"\" #", "normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def", "the timestamps read from the cache are converted using #", "the constructor again, and check # that the newly added", "from cache # results in slightly different timestamps # #", "is a first time parse GenericFTrace.disable_cache = False uncached_trace =", "be parsed # fresh since this is a first time", "trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse with normalized time", "trace again, should regenerate only the missing item trace =", "super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args, **kwargs)", "\"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args, **kwargs) def test_cache_created(self): \"\"\"Test cache creation", "Check that the modified md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The", "__future__ import unicode_literals from __future__ import division from __future__ import", "# - trace.txt string timestamps: # [76.402065, 80.402065, 80.001337] #", "# [76.402065000000007, 80.402065000000007, 82.001337000000007] # # - csv string timestamps:", "bad checksum is overwritten\"\"\" # This is a directory so", "think we are if hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test bug: found", "as f: return json.load(f) def write_md5(md5): metadata = read_metadata() metadata[\"md5sum\"]", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "trace returns EXACTLY what is expected \"\"\" # As described", "distributed under the License is distributed on an \"AS IS\"", "not present in the cache) is # parsed. parse_class =", "trace.txt file. # # Here's an example: # - trace.txt", "cached_dfr.iterrows()] == [r[1].prio for r in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test", "trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse with", "= trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c in trace.trace_classes: if isinstance(c,", "import sys import unittest import utils_tests import trappy from trappy.ftrace", "self.assertTrue(cache_dir in os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test that cache should not", "= trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test that", "normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time) def test_cache_window_broad(self): \"\"\"Test that caching", "that the newly added event (which is not present in", "in cached_dfr.iterrows()] == [r[1].comm for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for", "as well self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] == [r[1].pid for", "License. # from __future__ import unicode_literals from __future__ import division", "# them. if len(trace1.sched_wakeup.data_frame) != 1: raise RuntimeError('Test bug: bad", "cache-read timestamps being identical to trace-read timestamps. # # This", "**kwargs) def test_cache_created(self): \"\"\"Test cache creation when enabled\"\"\" GenericFTrace.disable_cache =", "express or implied. # See the License for the specific", "= cached_trace.sched_wakeup.data_frame # By default, the str to float conversion", "= read_metadata() metadata[\"md5sum\"] = md5 with open(metadata_path, \"w\") as f:", "except in compliance with the License. # You may obtain", "returns EXACTLY what is expected \"\"\" # As described in", "80.001337] # - parsed dataframe timestamps: # [76.402065000000007, 80.402065000000007, 82.001337000000007]", "actually testing what we think we are if hasattr(trace1, 'dynamic_event'):", "= False uncached_trace = trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame # Now", "1) def test_ftrace_metadata(self): \"\"\"Test that caching keeps trace metadata\"\"\" GenericFTrace.disable_cache", "int(trace._version) cpus = int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus, 6) def test_cache_delete_single(self):", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "sys import unittest import utils_tests import trappy from trappy.ftrace import", "# Parse with normalized time trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True)", "# This test ensures that this stays true. cached_times =", "not use this file except in compliance with the License.", "\"r\") as f: return json.load(f) def write_md5(md5): metadata = read_metadata()", "Now register a new event type, call the constructor again,", "The trace # contains 2 sched_wakeup events; this window should", "ftrace cache wasn't overwritten\") def test_cache_dynamic_events(self): \"\"\"Test that caching works", "r in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test a cache with a", "= os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache' number_of_trace_categories", "permissions and # limitations under the License. # from __future__", "Copyright 2015-2017 ARM Limited, Google and contributors # # Licensed", "Now read from previously parsed cache by reusing the path", "writing, software # distributed under the License is distributed on", "using # the same conversion method as the trace.txt parser,", "timestamps. # # This test ensures that this stays true.", "one of # them. if len(trace1.sched_wakeup.data_frame) != 1: raise RuntimeError('Test", "you may not use this file except in compliance with", "Times in trace_sched.txt start_time = 6550.018511 first_freq_event_time = 6550.056870 #", "__init__(self, *args, **kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\",", "Parse with normalized time trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0],", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "# - cached dataframe timestamps: # [76.402064999999993, 80.402064999999993, 82.001337000000007] #", "open(metadata_path, \"r\") as f: return json.load(f) def write_md5(md5): metadata =", "wasn't overwritten\") def test_cache_dynamic_events(self): \"\"\"Test that caching works if new", "registered\"\"\" # Parse the trace to create a cache GenericFTrace.disable_cache", "test_compare_cache_vs_uncached, reading from cache # results in slightly different timestamps", "timestamps # # This test verifies that applying windows results", "# # Here's an example: # - trace.txt string timestamps:", "# Build the cache, but the actual trace will be", "this is a first time parse GenericFTrace.disable_cache = False uncached_trace", "# By default, the str to float conversion done when", "was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid ftrace cache wasn't overwritten\")", "cache wasn't overwritten\") def test_cache_dynamic_events(self): \"\"\"Test that caching works if", "rid of one of # them. if len(trace1.sched_wakeup.data_frame) != 1:", "dataframe timestamps: # [76.402064999999993, 80.402064999999993, 82.001337000000007] # # To fix", "Check we're actually testing what we think we are if", "CONDITIONS OF ANY KIND, either express or implied. # See", "Here's an example: # - trace.txt string timestamps: # [76.402065,", "which results in # cache-read timestamps being identical to trace-read", "files_to_copy arg of # SetUpDirectory, just do it ourselves. cache_path", "metadata\"\"\" GenericFTrace.disable_cache = False self.test_cache_created() trace = trappy.FTrace() version =", "SysTrace class TestCaching(utils_tests.SetupDirectory): def __init__(self, *args, **kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\",", "a bad checksum is overwritten\"\"\" # This is a directory", "windows results in identical # dataframes whether cache is used", "not. GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= \".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path =", "for r in uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times) # compare other", "the window trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2)", "# Here's an example: # - trace.txt string timestamps: #", "from __future__ import unicode_literals from __future__ import division from __future__", "should not be created when disabled \"\"\" GenericFTrace.disable_cache = True", "self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] == [r[1].pid for r in", "+ '.cache' self.assertTrue(cache_dir in os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test that cache", "# This is a directory so we can't use the", "the cached and uncached traces are same \"\"\" # Build", "newly added event (which is not present in the cache)", "is expected \"\"\" # As described in test_compare_cache_vs_uncached, reading from", "arg of # SetUpDirectory, just do it ourselves. cache_path =", "create a cache GenericFTrace.disable_cache = False trace1 = trappy.FTrace() #", "limitations under the License. # from __future__ import unicode_literals from", "uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] == [r[1].comm for r", "test_invalid_cache_overwritten(self): \"\"\"Test a cache with a bad checksum is overwritten\"\"\"", "def test_cache_created(self): \"\"\"Test cache creation when enabled\"\"\" GenericFTrace.disable_cache = False", "cache should not be created when disabled \"\"\" GenericFTrace.disable_cache =", "def test_invalid_cache_overwritten(self): \"\"\"Test a cache with a bad checksum is", "delete and overwrite the invalidated cache GenericFTrace.disable_cache = False trace", "expected \"\"\" # As described in test_compare_cache_vs_uncached, reading from cache", "character of the stored checksum md5sum = read_metadata()[\"md5sum\"] md5sum_inc =", "# Parse the trace to create a cache GenericFTrace.disable_cache =", "from __future__ import division from __future__ import print_function from builtins", "trace_file + '.cache' self.assertFalse(cache_dir in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\" Test", "previously parsed cache by reusing the path cached_trace = trappy.FTrace(uncached_trace.trace_path)", "class TestCaching(utils_tests.SetupDirectory): def __init__(self, *args, **kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"),", "f: json.dump(metadata, f) # Change 1 character of the stored", "OR CONDITIONS OF ANY KIND, either express or implied. #", "results in # cache-read timestamps being identical to trace-read timestamps.", "the invalidated cache GenericFTrace.disable_cache = False trace = trappy.FTrace() #", "cache by reusing the path cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr =", "invalidated cache GenericFTrace.disable_cache = False trace = trappy.FTrace() # Check", "events=['sched_wakeup'], window=(0, 1)) # Check that we're testing what we", "of one of # them. if len(trace1.sched_wakeup.data_frame) != 1: raise", "the License is distributed on an \"AS IS\" BASIS, #", "when reading from the trace.txt file. # # Here's an", "def test_cache_delete_single(self): GenericFTrace.disable_cache = False trace = trappy.FTrace() trace_path =", "be created when disabled \"\"\" GenericFTrace.disable_cache = True traces =", "parsed dataframe timestamps: # [76.402065000000007, 80.402065000000007, 82.001337000000007] # # -", "trappy.FTrace( events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self): \"\"\" Test", "*args, **kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")],", "= os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache' self.assertTrue(cache_dir", "trappy.FTrace() # Check that the modified md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"],", "self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test that caching doesn't", "# from __future__ import unicode_literals from __future__ import division from", "in os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test that cache should not be", "from trappy.systrace import SysTrace class TestCaching(utils_tests.SetupDirectory): def __init__(self, *args, **kwargs):", "Check that we're testing what we think we're testing The", "first_freq_event_time) # Parse with normalized time trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],", "unexpected event in trace') # Now register a new event", "start_time) def test_cache_window_broad(self): \"\"\"Test that caching doesn't break the 'window'", "normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse with normalized time trace2 =", "with open(metadata_path, \"w\") as f: json.dump(metadata, f) # Change 1", "trappy.SysTrace(path='./trace.html')) for trace in traces: trace_path = os.path.abspath(trace.trace_path) trace_dir =", "that applying windows results in identical # dataframes whether cache", "identical to trace-read timestamps. # # This test ensures that", "reusing the path cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame #", "checksum md5sum = read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) +", "but the actual trace will be parsed # fresh since", "cached dataframe timestamps: # [76.402064999999993, 80.402064999999993, 82.001337000000007] # # To", "uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] == [r[1].prio for r", "trace1 = trappy.FTrace() # Check we're actually testing what we", "dataframes whether cache is used or not. GenericFTrace.disable_cache = False", "metadata[\"md5sum\"] = md5 with open(metadata_path, \"w\") as f: json.dump(metadata, f)", "law or agreed to in writing, software # distributed under", "import GenericFTrace from trappy.systrace import SysTrace class TestCaching(utils_tests.SetupDirectory): def __init__(self,", "checksum is overwritten\"\"\" # This is a directory so we", "- trace.txt string timestamps: # [76.402065, 80.402065, 80.001337] # -", "\"\"\"Test that caching doesn't break normalize_time\"\"\" GenericFTrace.disable_cache = False #", "what we think we're testing The trace # contains 2", "window to a cached trace returns EXACTLY what is expected", "count') # Parse again without the window trace1 = trappy.FTrace(", "to create a cache GenericFTrace.disable_cache = False trace1 = trappy.FTrace()", "in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test a cache with a bad", "are if hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test bug: found unexpected event", "== uncached_times) # compare other columns as well self.assertTrue([r[1].pid for", "we're testing what we think we're testing The trace #", "\"\"\"Test that caching keeps trace metadata\"\"\" GenericFTrace.disable_cache = False self.test_cache_created()", "overwritten\"\"\" # This is a directory so we can't use", "normalize_time\"\"\" GenericFTrace.disable_cache = False # Times in trace_sched.txt start_time =", "the one used when reading from the trace.txt file. #", "for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] ==", "False self.test_cache_created() trace = trappy.FTrace() version = int(trace._version) cpus =", "'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1) # Generate trace again, should", "time trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse", "traces are same \"\"\" # Build the cache, but the", "ARM Limited, Google and contributors # # Licensed under the", "= os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache' self.assertFalse(cache_dir", "the missing item trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c", "do it ourselves. cache_path = \".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\")", "'.cache' self.assertFalse(cache_dir in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\" Test that the", "uncached_trace = trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame # Now read from", "'dynamic_event'): raise RuntimeError('Test bug: found unexpected event in trace') #", "trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test that caching", "for r in cached_dfr.iterrows()] == [r[1].pid for r in uncached_dfr.iterrows()])", "may obtain a copy of the License at # #", "2 sched_wakeup events; this window should get rid of one", "# Parse again without the window trace1 = trappy.FTrace( events=['sched_wakeup'],", "under the License. # from __future__ import unicode_literals from __future__", "window trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def", "in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] == [r[1].prio for", "when disabled \"\"\" GenericFTrace.disable_cache = True traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))", "False uncached_trace = trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002))", "example: # - trace.txt string timestamps: # [76.402065, 80.402065, 80.001337]", "r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] == [r[1].comm", "# SetUpDirectory, just do it ourselves. cache_path = \".trace.txt.cache\" src", "trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame # By default, the str to", "an example: # - trace.txt string timestamps: # [76.402065, 80.402065,", "self.assertEqual(version, 6) self.assertEqual(cpus, 6) def test_cache_delete_single(self): GenericFTrace.disable_cache = False trace", "actual trace will be parsed # fresh since this is", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "test_cache_delete_single(self): GenericFTrace.disable_cache = False trace = trappy.FTrace() trace_path = os.path.abspath(trace.trace_path)", "or not. GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() trace =", "the cache are converted using # the same conversion method", "Build the cache, but the actual trace will be parsed", "# results in slightly different timestamps # # This test", "import json import shutil import sys import unittest import utils_tests", "time trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time)", "This is a directory so we can't use the files_to_copy", "may not use this file except in compliance with the", "1: raise RuntimeError('Test bug: bad sched_wakeup event count') # Parse", "def __init__(self, *args, **kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"),", "timestamps: # [76.402065000000007, 80.402065000000007, 82.001337000000007] # # - csv string", "# the same conversion method as the trace.txt parser, which", "= False # Times in trace_sched.txt start_time = 6550.018511 first_freq_event_time", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "metadata = read_metadata() metadata[\"md5sum\"] = md5 with open(metadata_path, \"w\") as", "a cached trace returns EXACTLY what is expected \"\"\" #", "GenericFTrace.disable_cache = False trace1 = trappy.FTrace() # Check we're actually", "disabled \"\"\" GenericFTrace.disable_cache = True traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for", "test verifies that applying windows results in identical # dataframes", "= trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse with normalized", "this file except in compliance with the License. # You", "c in trace.trace_classes: if isinstance(c, trace.class_definitions['sched_wakeup']): self.assertEqual(c.cached, False) continue self.assertEqual(c.cached,", "def test_ftrace_metadata(self): \"\"\"Test that caching keeps trace metadata\"\"\" GenericFTrace.disable_cache =", "ourselves. cache_path = \".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path)", "import division from __future__ import print_function from builtins import chr", "invalid ftrace cache wasn't overwritten\") def test_cache_dynamic_events(self): \"\"\"Test that caching", "trace = trappy.FTrace() trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file", "uncached_dfr = uncached_trace.sched_wakeup.data_frame # Now read from previously parsed cache", "= os.path.dirname(trace_path) trace_file = os.path.basename(trace_path) cache_dir = '.' + trace_file", "can't use the files_to_copy arg of # SetUpDirectory, just do", "cache with a bad checksum is overwritten\"\"\" # This is", "if hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test bug: found unexpected event in", "os import json import shutil import sys import unittest import", "trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test that caching doesn't break normalize_time\"\"\" GenericFTrace.disable_cache", "this should delete and overwrite the invalidated cache GenericFTrace.disable_cache =", "with normalized time trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time", "trappy.FTrace( events=['sched_wakeup'], window=(0, 1)) # Check that we're testing what", "md5 with open(metadata_path, \"w\") as f: json.dump(metadata, f) # Change", "parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1)", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "GenericFTrace.disable_cache = False trace = trappy.FTrace() trace_path = os.path.abspath(trace.trace_path) trace_dir", "import os import json import shutil import sys import unittest", "= 6550.056870 # Parse without normalizing time trace1 = trappy.FTrace(events=['cpu_frequency',", "fix this, the timestamps read from the cache are converted", "# # Licensed under the Apache License, Version 2.0 (the", "return json.load(f) def write_md5(md5): metadata = read_metadata() metadata[\"md5sum\"] = md5", "version = int(trace._version) cpus = int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus, 6)", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "being identical to trace-read timestamps. # # This test ensures", "creation when enabled\"\"\" GenericFTrace.disable_cache = False traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))", "\"\"\" GenericFTrace.disable_cache = True traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace", "trace.txt parser, which results in # cache-read timestamps being identical", "window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self): \"\"\" Test that applying", "by reusing the path cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame", "parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) ==", "sched_wakeup events; this window should get rid of one of", "uncached_trace = trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(),", "cache creation when enabled\"\"\" GenericFTrace.disable_cache = False traces = (trappy.FTrace(),", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "# fresh since this is a first time parse GenericFTrace.disable_cache", "2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self): \"\"\"Test that caching keeps trace", "test_cache_dynamic_events(self): \"\"\"Test that caching works if new event parsers have", "# Parse a trace, this should delete and overwrite the", "trace will be parsed # fresh since this is a", "event type, call the constructor again, and check # that", "= False trace1 = trappy.FTrace() # Check we're actually testing", "# compare other columns as well self.assertTrue([r[1].pid for r in", "\"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path = os.path.join(cache_path, \"metadata.json\") def read_metadata(): with", "in trace_sched.txt start_time = 6550.018511 first_freq_event_time = 6550.056870 # Parse", "utils_tests import trappy from trappy.ftrace import GenericFTrace from trappy.systrace import", "number_of_trace_categories - 1) # Generate trace again, should regenerate only", "GenericFTrace.disable_cache = False traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in", "os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache' number_of_trace_categories =", "think we're testing The trace # contains 2 sched_wakeup events;", "bug: bad sched_wakeup event count') # Parse again without the", "Parse a trace, this should delete and overwrite the invalidated", "in # cache-read timestamps being identical to trace-read timestamps. #", "read_metadata(): with open(metadata_path, \"r\") as f: return json.load(f) def write_md5(md5):", "read_metadata() metadata[\"md5sum\"] = md5 with open(metadata_path, \"w\") as f: json.dump(metadata,", "= '.' + trace_file + '.cache' self.assertFalse(cache_dir in os.listdir(trace_dir)) def", "other columns as well self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] ==", "and uncached traces are same \"\"\" # Build the cache,", "in identical # dataframes whether cache is used or not.", "# that the newly added event (which is not present", "= [r[0] for r in cached_dfr.iterrows()] uncached_times = [r[0] for", "is used or not. GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace()", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "__future__ import print_function from builtins import chr import os import", "use the files_to_copy arg of # SetUpDirectory, just do it", "'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) # Parse with normalized time trace2", "EXACTLY what is expected \"\"\" # As described in test_compare_cache_vs_uncached,", "self.assertEqual(cpus, 6) def test_cache_delete_single(self): GenericFTrace.disable_cache = False trace = trappy.FTrace()", "in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] == [r[1].comm for", "= os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path = os.path.join(cache_path, \"metadata.json\") def", "same \"\"\" # Build the cache, but the actual trace", "we think we are if hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test bug:", "read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc) #", "or implied. # See the License for the specific language", "caching keeps trace metadata\"\"\" GenericFTrace.disable_cache = False self.test_cache_created() trace =", "again, and check # that the newly added event (which", "and contributors # # Licensed under the Apache License, Version", "80.402065000000007, 82.001337000000007] # # - csv string timestamps: # [76.402065,", "sched_wakeup event count') # Parse again without the window trace1", "GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False,", "works if new event parsers have been registered\"\"\" # Parse", "trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1)", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "= 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1)", "unicode_literals from __future__ import division from __future__ import print_function from", "'.' + trace_file + '.cache' number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)", "event (which is not present in the cache) is #", "[76.402065, 80.402065, 80.001337] # - parsed dataframe timestamps: # [76.402065000000007,", "file. # # Here's an example: # - trace.txt string", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "will be parsed # fresh since this is a first", "False # Times in trace_sched.txt start_time = 6550.018511 first_freq_event_time =", "that the cached and uncached traces are same \"\"\" #", "as the trace.txt parser, which results in # cache-read timestamps", "trace_sched.txt start_time = 6550.018511 first_freq_event_time = 6550.056870 # Parse without", "different from the one used when reading from the trace.txt", "should get rid of one of # them. if len(trace1.sched_wakeup.data_frame)", "+ chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc) # Parse a trace, this", "trace, this should delete and overwrite the invalidated cache GenericFTrace.disable_cache", "from the cache are converted using # the same conversion", "self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time) def test_cache_window_broad(self): \"\"\"Test that caching doesn't", "Parse again without the window trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0,", "r in cached_dfr.iterrows()] uncached_times = [r[0] for r in uncached_dfr.iterrows()]", "r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] == [r[1].prio", "write_md5(md5): metadata = read_metadata() metadata[\"md5sum\"] = md5 with open(metadata_path, \"w\")", "- 1) # Generate trace again, should regenerate only the", "the stored checksum md5sum = read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1] +", "(the \"License\"); # you may not use this file except", "82.001337000000007] # # To fix this, the timestamps read from", "whether cache is used or not. GenericFTrace.disable_cache = False uncached_trace", "self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self): \"\"\"Test that", "trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame # Now read from previously parsed", "# you may not use this file except in compliance", "test_cache_created(self): \"\"\"Test cache creation when enabled\"\"\" GenericFTrace.disable_cache = False traces", "cached_dfr.iterrows()] == [r[1].comm for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r", "+ trace_file + '.cache' self.assertFalse(cache_dir in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\"", "overwritten\") def test_cache_dynamic_events(self): \"\"\"Test that caching works if new event", "have been registered\"\"\" # Parse the trace to create a", "hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test bug: found unexpected event in trace')", "a window to a cached trace returns EXACTLY what is", "= trappy.FTrace() trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file =", "GenericFTrace from trappy.systrace import SysTrace class TestCaching(utils_tests.SetupDirectory): def __init__(self, *args,", "traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces: trace_path =", "contributors # # Licensed under the Apache License, Version 2.0", "cached and uncached traces are same \"\"\" # Build the", "event parsers have been registered\"\"\" # Parse the trace to", "def test_cache_window_narrow(self): \"\"\" Test that applying a window to a", "timestamps being identical to trace-read timestamps. # # This test", "present in the cache) is # parsed. parse_class = trappy.register_dynamic_ftrace(\"DynamicEvent\",", "+ '.cache' self.assertFalse(cache_dir in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\" Test that", "the cache, but the actual trace will be parsed #", "reading from the trace.txt file. # # Here's an example:", "division from __future__ import print_function from builtins import chr import", "cache, but the actual trace will be parsed # fresh", "register a new event type, call the constructor again, and", "of the stored checksum md5sum = read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1]", "md5sum = read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1)", "if new event parsers have been registered\"\"\" # Parse the", "parameter\"\"\" GenericFTrace.disable_cache = False trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, 1))", "# # Unless required by applicable law or agreed to", "cache_dir = '.' + trace_file + '.cache' self.assertTrue(cache_dir in os.listdir(trace_dir))", "for r in cached_dfr.iterrows()] == [r[1].comm for r in uncached_dfr.iterrows()])", "'.cache' number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories", "[r[1].pid for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r in cached_dfr.iterrows()]", "first_freq_event_time = 6550.056870 # Parse without normalizing time trace1 =", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "testing what we think we're testing The trace # contains", "trappy.FTrace() trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file = os.path.basename(trace_path)", "= md5sum[:-1] + chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc) # Parse a", "Version 2.0 (the \"License\"); # you may not use this", "'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time) def test_cache_window_broad(self): \"\"\"Test that", "stored checksum md5sum = read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1])", "*args, **kwargs) def test_cache_created(self): \"\"\"Test cache creation when enabled\"\"\" GenericFTrace.disable_cache", "= trappy.FTrace( events=['sched_wakeup'], window=(0, 1)) # Check that we're testing", "self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid ftrace cache wasn't overwritten\") def test_cache_dynamic_events(self):", "print_function from builtins import chr import os import json import", "time parse GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() uncached_dfr =", "os.path.dirname(trace_path) trace_file = os.path.basename(trace_path) cache_dir = '.' + trace_file +", "shutil.copytree(src, cache_path) metadata_path = os.path.join(cache_path, \"metadata.json\") def read_metadata(): with open(metadata_path,", "# This test verifies that applying windows results in identical", "implied. # See the License for the specific language governing", "item trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c in trace.trace_classes:", "read from the cache are converted using # the same", "under the Apache License, Version 2.0 (the \"License\"); # you", "md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid ftrace cache wasn't", "we are if hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test bug: found unexpected", "def read_metadata(): with open(metadata_path, \"r\") as f: return json.load(f) def", "stays true. cached_times = [r[0] for r in cached_dfr.iterrows()] uncached_times", "what is expected \"\"\" # As described in test_compare_cache_vs_uncached, reading", "# - csv string timestamps: # [76.402065, 80.402065, 80.001337] #", "from builtins import chr import os import json import shutil", "parse GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame", "builtins import chr import os import json import shutil import", "+ 1) write_md5(md5sum_inc) # Parse a trace, this should delete", "by applicable law or agreed to in writing, software #", "been registered\"\"\" # Parse the trace to create a cache", "reading from csv is # different from the one used", "compare other columns as well self.assertTrue([r[1].pid for r in cached_dfr.iterrows()]", "Test that applying a window to a cached trace returns", "self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c in trace.trace_classes: if isinstance(c, trace.class_definitions['sched_wakeup']): self.assertEqual(c.cached,", "self.test_cache_created() trace = trappy.FTrace() version = int(trace._version) cpus = int(trace._cpus)", "contains 2 sched_wakeup events; this window should get rid of", "src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path = os.path.join(cache_path, \"metadata.json\")", "number_of_trace_categories = 31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories -", "(which is not present in the cache) is # parsed.", "that caching doesn't break the 'window' parameter\"\"\" GenericFTrace.disable_cache = False", "slightly different timestamps # # This test verifies that applying", "Test that the cached and uncached traces are same \"\"\"", "first time parse GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() uncached_dfr", "uncached_trace.sched_wakeup.data_frame # Now read from previously parsed cache by reusing", "1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test that caching doesn't break normalize_time\"\"\"", "def test_cache_normalize_time(self): \"\"\"Test that caching doesn't break normalize_time\"\"\" GenericFTrace.disable_cache =", "cache # results in slightly different timestamps # # This", "the path cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame # By", "modified md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid ftrace cache", "that caching works if new event parsers have been registered\"\"\"", "shutil import sys import unittest import utils_tests import trappy from", "[r[0] for r in cached_dfr.iterrows()] uncached_times = [r[0] for r", "# To fix this, the timestamps read from the cache", "what we think we are if hasattr(trace1, 'dynamic_event'): raise RuntimeError('Test", "False trace = trappy.FTrace() trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path)", "self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1) # Generate trace again, should regenerate", "= trappy.FTrace() version = int(trace._version) cpus = int(trace._cpus) self.assertEqual(version, 6)", "in test_compare_cache_vs_uncached, reading from cache # results in slightly different", "= read_metadata()[\"md5sum\"] md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc)", "md5sum[:-1] + chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc) # Parse a trace,", "timestamps read from the cache are converted using # the", "# # This test verifies that applying windows results in", "test ensures that this stays true. cached_times = [r[0] for", "import print_function from builtins import chr import os import json", "This test verifies that applying windows results in identical #", "a new event type, call the constructor again, and check", "int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus, 6) def test_cache_delete_single(self): GenericFTrace.disable_cache = False", "identical # dataframes whether cache is used or not. GenericFTrace.disable_cache", "RuntimeError('Test bug: found unexpected event in trace') # Now register", "for r in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test a cache with", "cache_path = \".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path", "parser, which results in # cache-read timestamps being identical to", "applying windows results in identical # dataframes whether cache is", "in uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times) # compare other columns as", "= md5 with open(metadata_path, \"w\") as f: json.dump(metadata, f) #", "'.' + trace_file + '.cache' self.assertFalse(cache_dir in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self):", "2015-2017 ARM Limited, Google and contributors # # Licensed under", "Google and contributors # # Licensed under the Apache License,", "31 self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1) #", "import unittest import utils_tests import trappy from trappy.ftrace import GenericFTrace", "GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame #", "trace metadata\"\"\" GenericFTrace.disable_cache = False self.test_cache_created() trace = trappy.FTrace() version", "1 character of the stored checksum md5sum = read_metadata()[\"md5sum\"] md5sum_inc", "is # different from the one used when reading from", "break the 'window' parameter\"\"\" GenericFTrace.disable_cache = False trace1 = trappy.FTrace(", "columns as well self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] == [r[1].pid", "== 1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self): \"\"\"Test that caching doesn't break", "unittest import utils_tests import trappy from trappy.ftrace import GenericFTrace from", "and check # that the newly added event (which is", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "cpus = int(trace._cpus) self.assertEqual(version, 6) self.assertEqual(cpus, 6) def test_cache_delete_single(self): GenericFTrace.disable_cache", "Unless required by applicable law or agreed to in writing,", "\"\"\" # Build the cache, but the actual trace will", "# Times in trace_sched.txt start_time = 6550.018511 first_freq_event_time = 6550.056870", "\".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path = os.path.join(cache_path,", "True traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces: trace_path", "# # This test ensures that this stays true. cached_times", "to trace-read timestamps. # # This test ensures that this", "overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc, \"The invalid ftrace cache wasn't overwritten\") def", "= False trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, 1)) # Check", "not be created when disabled \"\"\" GenericFTrace.disable_cache = True traces", "metadata_path = os.path.join(cache_path, \"metadata.json\") def read_metadata(): with open(metadata_path, \"r\") as", "# different from the one used when reading from the", "import unicode_literals from __future__ import division from __future__ import print_function", "the specific language governing permissions and # limitations under the", "that cache should not be created when disabled \"\"\" GenericFTrace.disable_cache", "\"\"\"Test cache creation when enabled\"\"\" GenericFTrace.disable_cache = False traces =", "overwrite the invalidated cache GenericFTrace.disable_cache = False trace = trappy.FTrace()", "chr import os import json import shutil import sys import", "applicable law or agreed to in writing, software # distributed", "to float conversion done when reading from csv is #", "with open(metadata_path, \"r\") as f: return json.load(f) def write_md5(md5): metadata", "string timestamps: # [76.402065, 80.402065, 80.001337] # - parsed dataframe", "number_of_trace_categories) for c in trace.trace_classes: if isinstance(c, trace.class_definitions['sched_wakeup']): self.assertEqual(c.cached, False)", "\"metadata.json\") def read_metadata(): with open(metadata_path, \"r\") as f: return json.load(f)", "a directory so we can't use the files_to_copy arg of", "new event type, call the constructor again, and check #", "# Check that we're testing what we think we're testing", "csv is # different from the one used when reading", "in traces: trace_path = os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file =", "= trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame # By default, the str", "well self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] == [r[1].pid for r", "the License. # from __future__ import unicode_literals from __future__ import", "a cache with a bad checksum is overwritten\"\"\" # This", "# Check we're actually testing what we think we are", "- parsed dataframe timestamps: # [76.402065000000007, 80.402065000000007, 82.001337000000007] # #", "check # that the newly added event (which is not", "= trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame),", "abs_window=(6550.100000, 6552.000002)) self.assertAlmostEquals(trace.get_duration(), 1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self):", "To fix this, the timestamps read from the cache are", "from csv is # different from the one used when", "in writing, software # distributed under the License is distributed", "r in uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times) # compare other columns", "done when reading from csv is # different from the", "trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class) def", "# Copyright 2015-2017 ARM Limited, Google and contributors # #", "[r[0] for r in uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times) # compare", "doesn't break the 'window' parameter\"\"\" GenericFTrace.disable_cache = False trace1 =", "get rid of one of # them. if len(trace1.sched_wakeup.data_frame) !=", "method as the trace.txt parser, which results in # cache-read", "'window' parameter\"\"\" GenericFTrace.disable_cache = False trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0,", "6) self.assertEqual(cpus, 6) def test_cache_delete_single(self): GenericFTrace.disable_cache = False trace =", "\"w\") as f: json.dump(metadata, f) # Change 1 character of", "the 'window' parameter\"\"\" GenericFTrace.disable_cache = False trace1 = trappy.FTrace( events=['sched_wakeup'],", "for r in cached_dfr.iterrows()] == [r[1].prio for r in uncached_dfr.iterrows()])", "that caching doesn't break normalize_time\"\"\" GenericFTrace.disable_cache = False # Times", "# # To fix this, the timestamps read from the", "md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1) write_md5(md5sum_inc) # Parse", "'.' + trace_file + '.cache' self.assertTrue(cache_dir in os.listdir(trace_dir)) def test_cache_not_created(self):", "fresh since this is a first time parse GenericFTrace.disable_cache =", "a trace, this should delete and overwrite the invalidated cache", "self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args, **kwargs) def", "this, the timestamps read from the cache are converted using", "test_ftrace_metadata(self): \"\"\"Test that caching keeps trace metadata\"\"\" GenericFTrace.disable_cache = False", "# Generate trace again, should regenerate only the missing item", "cached_trace.sched_wakeup.data_frame # By default, the str to float conversion done", "converted using # the same conversion method as the trace.txt", "- csv string timestamps: # [76.402065, 80.402065, 80.001337] # -", "for c in trace.trace_classes: if isinstance(c, trace.class_definitions['sched_wakeup']): self.assertEqual(c.cached, False) continue", "the actual trace will be parsed # fresh since this", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "trappy.systrace import SysTrace class TestCaching(utils_tests.SetupDirectory): def __init__(self, *args, **kwargs): super(TestCaching,", "License, Version 2.0 (the \"License\"); # you may not use", "# Now read from previously parsed cache by reusing the", "6550.018511 first_freq_event_time = 6550.056870 # Parse without normalizing time trace1", "# You may obtain a copy of the License at", "from previously parsed cache by reusing the path cached_trace =", "import trappy from trappy.ftrace import GenericFTrace from trappy.systrace import SysTrace", "uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self): \"\"\"Test a cache with a bad checksum", "== [r[1].pid for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r in", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "# # - csv string timestamps: # [76.402065, 80.402065, 80.001337]", "conversion method as the trace.txt parser, which results in #", "uncached_times = [r[0] for r in uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times)", "import utils_tests import trappy from trappy.ftrace import GenericFTrace from trappy.systrace", "to a cached trace returns EXACTLY what is expected \"\"\"", "that caching keeps trace metadata\"\"\" GenericFTrace.disable_cache = False self.test_cache_created() trace", "json.dump(metadata, f) # Change 1 character of the stored checksum", "caching doesn't break normalize_time\"\"\" GenericFTrace.disable_cache = False # Times in", "= trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame # Now read from previously", "= trappy.register_dynamic_ftrace(\"DynamicEvent\", \"dynamic_test_key\") trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class)", "missing item trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c in", "the License for the specific language governing permissions and #", "regenerate only the missing item trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)", "is a directory so we can't use the files_to_copy arg", "Apache License, Version 2.0 (the \"License\"); # you may not", "normalizing time trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0], first_freq_event_time) #", "# - parsed dataframe timestamps: # [76.402065000000007, 80.402065000000007, 82.001337000000007] #", "either express or implied. # See the License for the", "os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\" Test that the cached and uncached", "should delete and overwrite the invalidated cache GenericFTrace.disable_cache = False", "trace') # Now register a new event type, call the", "when enabled\"\"\" GenericFTrace.disable_cache = False traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for", "one used when reading from the trace.txt file. # #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "test_compare_cached_vs_uncached(self): \"\"\" Test that the cached and uncached traces are", "from the one used when reading from the trace.txt file.", "uncached_dfr.iterrows()] self.assertTrue(cached_times == uncached_times) # compare other columns as well", "it ourselves. cache_path = \".trace.txt.cache\" src = os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src,", "# Parse without normalizing time trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False)", "cached_times = [r[0] for r in cached_dfr.iterrows()] uncached_times = [r[0]", "test_cache_window_narrow(self): \"\"\" Test that applying a window to a cached", "governing permissions and # limitations under the License. # from", "GenericFTrace.disable_cache = True traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in", "parsed # fresh since this is a first time parse", "in cached_dfr.iterrows()] == [r[1].prio for r in uncached_dfr.iterrows()]) def test_invalid_cache_overwritten(self):", "\"\"\"Test that caching works if new event parsers have been", "trace_file + '.cache' self.assertTrue(cache_dir in os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test that", "test_cache_not_created(self): \"\"\"Test that cache should not be created when disabled", "start_time = 6550.018511 first_freq_event_time = 6550.056870 # Parse without normalizing", "cache_path) metadata_path = os.path.join(cache_path, \"metadata.json\") def read_metadata(): with open(metadata_path, \"r\")", "enabled\"\"\" GenericFTrace.disable_cache = False traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace", "6550.056870 # Parse without normalizing time trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],", "RuntimeError('Test bug: bad sched_wakeup event count') # Parse again without", "os.path.basename(trace_path) cache_dir = '.' + trace_file + '.cache' self.assertTrue(cache_dir in", "self.assertTrue(cached_times == uncached_times) # compare other columns as well self.assertTrue([r[1].pid", "'.cache' self.assertTrue(cache_dir in os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test that cache should", "False uncached_trace = trappy.FTrace() uncached_dfr = uncached_trace.sched_wakeup.data_frame # Now read", "= False uncached_trace = trappy.FTrace() trace = trappy.FTrace(uncached_trace.trace_path, normalize_time=False, abs_window=(6550.100000,", "the same conversion method as the trace.txt parser, which results", "keeps trace metadata\"\"\" GenericFTrace.disable_cache = False self.test_cache_created() trace = trappy.FTrace()", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "False trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, 1)) # Check that", "used when reading from the trace.txt file. # # Here's", "when reading from csv is # different from the one", "in trace') # Now register a new event type, call", "results in slightly different timestamps # # This test verifies", "trappy from trappy.ftrace import GenericFTrace from trappy.systrace import SysTrace class", "= (trappy.FTrace(), trappy.SysTrace(path='./trace.html')) for trace in traces: trace_path = os.path.abspath(trace.trace_path)", "= os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file = os.path.basename(trace_path) cache_dir =", "and overwrite the invalidated cache GenericFTrace.disable_cache = False trace =", "- cached dataframe timestamps: # [76.402064999999993, 80.402064999999993, 82.001337000000007] # #", "self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self): \"\"\"Test that caching keeps", "os.path.join(utils_tests.TESTS_DIRECTORY, \"trace_sched.txt.cache\") shutil.copytree(src, cache_path) metadata_path = os.path.join(cache_path, \"metadata.json\") def read_metadata():", "len(trace1.sched_wakeup.data_frame) != 1: raise RuntimeError('Test bug: bad sched_wakeup event count')", "with a bad checksum is overwritten\"\"\" # This is a", "GenericFTrace.disable_cache = False # Times in trace_sched.txt start_time = 6550.018511", "Parse the trace to create a cache GenericFTrace.disable_cache = False", "trace_dir = os.path.dirname(trace_path) trace_file = os.path.basename(trace_path) cache_dir = '.' +", "doesn't break normalize_time\"\"\" GenericFTrace.disable_cache = False # Times in trace_sched.txt", "from __future__ import print_function from builtins import chr import os", "os.path.abspath(trace.trace_path) trace_dir = os.path.dirname(trace_path) trace_file = os.path.basename(trace_path) cache_dir = '.'", "cache GenericFTrace.disable_cache = False trace = trappy.FTrace() # Check that", "open(metadata_path, \"w\") as f: json.dump(metadata, f) # Change 1 character", "trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self):", "82.001337000000007] # # - csv string timestamps: # [76.402065, 80.402065,", "bad sched_wakeup event count') # Parse again without the window", "parsed cache by reusing the path cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr", "cached trace returns EXACTLY what is expected \"\"\" # As", "# [76.402064999999993, 80.402064999999993, 82.001337000000007] # # To fix this, the", "this stays true. cached_times = [r[0] for r in cached_dfr.iterrows()]", "# Check that the modified md5sum was overwritten self.assertNotEqual(read_metadata()[\"md5sum\"], md5sum_inc,", "are converted using # the same conversion method as the", "added event (which is not present in the cache) is", "\"License\"); # you may not use this file except in", "- start_time) def test_cache_window_broad(self): \"\"\"Test that caching doesn't break the", "[76.402065, 80.402065, 80.001337] # - cached dataframe timestamps: # [76.402064999999993,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "raise RuntimeError('Test bug: bad sched_wakeup event count') # Parse again", "\"\"\"Test that cache should not be created when disabled \"\"\"", "different timestamps # # This test verifies that applying windows", "self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] == [r[1].prio for r in", "6) def test_cache_delete_single(self): GenericFTrace.disable_cache = False trace = trappy.FTrace() trace_path", "cached_dfr.iterrows()] uncached_times = [r[0] for r in uncached_dfr.iterrows()] self.assertTrue(cached_times ==", "[(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args, **kwargs) def test_cache_created(self):", "conversion done when reading from csv is # different from", "1.900002) self.assertEqual(len(trace.sched_wakeup.data_frame), 2) self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1) def test_ftrace_metadata(self): \"\"\"Test that caching", "str to float conversion done when reading from csv is", "= trappy.FTrace( events=['sched_wakeup'], window=(0, None)) self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self): \"\"\"", "write_md5(md5sum_inc) # Parse a trace, this should delete and overwrite", "# distributed under the License is distributed on an \"AS", "that this stays true. cached_times = [r[0] for r in", "= 6550.018511 first_freq_event_time = 6550.056870 # Parse without normalizing time", "trace = trappy.FTrace() version = int(trace._version) cpus = int(trace._cpus) self.assertEqual(version,", "trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time) def", "# [76.402065, 80.402065, 80.001337] # - parsed dataframe timestamps: #", "# Unless required by applicable law or agreed to in", "should regenerate only the missing item trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)),", "**kwargs): super(TestCaching, self).__init__( [(\"trace_sched.txt\", \"trace.txt\"), (\"trace_sched.txt\", \"trace.raw.txt\"), (\"trace_systrace.html\", \"trace.html\")], *args,", "self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] == [r[1].comm for r in", "r in cached_dfr.iterrows()] == [r[1].prio for r in uncached_dfr.iterrows()]) def", "parsers have been registered\"\"\" # Parse the trace to create", "md5sum_inc, \"The invalid ftrace cache wasn't overwritten\") def test_cache_dynamic_events(self): \"\"\"Test", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "\"\"\" # As described in test_compare_cache_vs_uncached, reading from cache #", "true. cached_times = [r[0] for r in cached_dfr.iterrows()] uncached_times =", "timestamps: # [76.402065, 80.402065, 80.001337] # - cached dataframe timestamps:", "!= 1: raise RuntimeError('Test bug: bad sched_wakeup event count') #", "os.remove(os.path.join(cache_dir, 'SchedWakeup.csv')) self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1) # Generate trace again,", "first_freq_event_time - start_time) def test_cache_window_broad(self): \"\"\"Test that caching doesn't break", "that applying a window to a cached trace returns EXACTLY", "in slightly different timestamps # # This test verifies that", "trace = trappy.FTrace() # Check that the modified md5sum was", "self.assertFalse(cache_dir in os.listdir(trace_dir)) def test_compare_cached_vs_uncached(self): \"\"\" Test that the cached", "import shutil import sys import unittest import utils_tests import trappy", "cached_trace = trappy.FTrace(uncached_trace.trace_path) cached_dfr = cached_trace.sched_wakeup.data_frame # By default, the", "You may obtain a copy of the License at #", "in cached_dfr.iterrows()] uncached_times = [r[0] for r in uncached_dfr.iterrows()] self.assertTrue(cached_times", "= uncached_trace.sched_wakeup.data_frame # Now read from previously parsed cache by", "again without the window trace1 = trappy.FTrace( events=['sched_wakeup'], window=(0, None))", "event count') # Parse again without the window trace1 =", "cached_dfr.iterrows()] == [r[1].pid for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].comm for r", "cache GenericFTrace.disable_cache = False trace1 = trappy.FTrace() # Check we're", "trace = trappy.FTrace() self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories) for c in trace.trace_classes: if", "reading from cache # results in slightly different timestamps #", "a first time parse GenericFTrace.disable_cache = False uncached_trace = trappy.FTrace()", "# limitations under the License. # from __future__ import unicode_literals", "80.402065, 80.001337] # - cached dataframe timestamps: # [76.402064999999993, 80.402064999999993,", "os.listdir(trace_dir)) def test_cache_not_created(self): \"\"\"Test that cache should not be created", "we're actually testing what we think we are if hasattr(trace1,", "event in trace') # Now register a new event type,", "self.assertEqual(len(trace1.sched_wakeup.data_frame), 2) def test_cache_window_narrow(self): \"\"\" Test that applying a window", "trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time) def test_cache_window_broad(self): \"\"\"Test", "the Apache License, Version 2.0 (the \"License\"); # you may", "= trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True) self.assertEqual(trace2.cpu_frequency.data_frame.index[0], first_freq_event_time - start_time) def test_cache_window_broad(self):", "Parse without normalizing time trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False) self.assertEqual(trace1.cpu_frequency.data_frame.index[0],", "applying a window to a cached trace returns EXACTLY what", "[r[1].comm for r in uncached_dfr.iterrows()]) self.assertTrue([r[1].prio for r in cached_dfr.iterrows()]", "\"dynamic_test_key\") trace2 = trappy.FTrace() self.assertTrue(len(trace2.dynamic_event.data_frame) == 1) trappy.unregister_dynamic_ftrace(parse_class) def test_cache_normalize_time(self):" ]
[ "the output is a feature set that has encoded past", "forecast target, at a specified point in the future. Unlike", "A. Forecast Specification B. Stationarization C. Feature Generation D. Feature", "signals in the frequency domain, extracting historic lags that will", "stages of a nonlinear pipeline are as follows: A. Forecast", "feature_discovery = \"feature_discovery\" signal_encoding = \"signal_encoding\" stationarisation = \"stationarisation\" time_series_regression", "D. Feature Filtering E. Linear Backtesting F. Linear Prediction Feature", "generate forecasts that are at a specific target in the", "selection with a nonlinear regressor to generate forecasts that are", "Linear Regressor The stages of a linear pipeline are as", "the feature selection pipeline, it does not assume that the", "pre-encoded signal set. Small Data Forecasting =============================================================================================================== Time-series pipeline for", "Refinement G. Linear Prediction Variational Forecasting =============================================================================================================== Creates a stacked", "Prediction Variational Forecasting =============================================================================================================== Creates a stacked lag-embedding matrix by", "feature selection are then used in order to maximize predictive", "Prediction Linear =============================================================================================================== A nonlinear pipeline combines nonlinear feature generation", "does not include Feature Refinement. The stages of a linear", "that can be exported from the platform The stages of", "and selection with a nonlinear regressor to generate forecasts that", "selection pipeline, it does not assume that the signal set", "specific target in the future. The regressor used is a", "Unlike the feature selection pipeline, it does not assume that", "combining a two-stage feature generation and selection process, with lag-only", "linear = \"linear\" fast_forecasting = \"fast_forecast\" feature_selection = \"feature_selection\" feature_discovery", "a signal encoding pipeline are as follows: A. Forecast Specification", "feature set on specified future points for a given target", "on specified future points for a given target The stages", "within them. The signal encoding pipeline allows for this functionality", "generation with a nonlinear regressor to generate forecasts that are", "regression sense to map to a target value at a", "does not assume that the signal set has already encoded", "process, with lag-only feature generation. A. Forecast Specification B. Stationarization", "Obs3 ------------------------------------- t-2 ... ..................................... ... ..................................... ObsN ------------------------------------- t-N", "enum import Enum class BlueprintType(Enum): \"\"\" A blueprint is a", "\"\"\" nonlinear = \"nonlinear\" linear = \"linear\" fast_forecasting = \"fast_forecast\"", "\"feature_discovery\" signal_encoding = \"signal_encoding\" stationarisation = \"stationarisation\" time_series_regression = \"regression\"", "set that has encoded past information about a signal that", "domain, extracting historic lags that will efficiently represent the information", "ObsN ------------------------------------- t-N Two stages of feature selection are then", "stages of a signal encoding pipeline are as follows: A.", "pipeline, it does not assume that the signal set has", "Feature Generation D. Feature Filtering E. Linear Backtesting F. Linear", "A nonlinear pipeline combines nonlinear feature generation and selection with", "Specification B. Stationarization C. Nonlinear Feature Generation D. Feature Filtering", "where the output is a feature set that has encoded", "Linear Feature Generation F. Feature Filtering G. Linear Backtesting H.", "= \"feature_discovery\" signal_encoding = \"signal_encoding\" stationarisation = \"stationarisation\" time_series_regression =", "... ..................................... ... ..................................... ObsN ------------------------------------- t-N Two stages of", "them. The signal encoding pipeline allows for this functionality to", "Refinement F. Linear Backtesting G. Linear Prediction Fast Forecasting ===============================================================================================================", "feature generation methods is to encode signals in the frequency", "historical information about the original data's past. The stages of", "... ..................................... ObsN ------------------------------------- t-N Two stages of feature selection", "= \"fast_forecast\" feature_selection = \"feature_selection\" feature_discovery = \"feature_discovery\" signal_encoding =", "a pipeline template in horizon, and must be specified when", "Feature Generation D. Feature Filtering E. Feature Refinement G. Linear", "recommended nonlinear regressor of choice. 2. XG Boost 3. Random", "A number of different regressor types are available here: 1.", "follows: A. Forecast Specification B. Feature Filtering E. Feature Refinement", "the information contained within them. The signal encoding pipeline allows", "matrix by combining a two-stage feature generation and selection process,", "to be used as a quick assessment of a dataset's", "The feature selection pipeline assumes that the input data set", "a forecast specification stage for adding stages manually. N.B. There", "Dicky Fuller analysis, and a detrending method for the specified", "horizontal observation vector may be used in a traditional regression", "Run Horizon's regression algorithms on a pre-encoded signal set. Small", "Variational Forecasting =============================================================================================================== Creates a stacked lag-embedding matrix by combining", "specified target. The stages of a stationarization pipeline are as", "a stationarization pipeline are as follows: A. Forecast Specification B.", "set already encodes information about a signal's past, such that", "specified target using Augmented Dicky Fuller analysis, and a detrending", "Stationarization C. Linear Feature Generation D. Feature Filtering E. Feature", "Linear Feature Generation D. Feature Filtering E. Feature Refinement G.", "this functionality to be isolated, where the output is a", "the input data set already encodes information about a signal's", "Stationarization C. Feature Generation D. Feature Filtering E. Feature Refinement", "that the signal set has already encoded historical information about", "Feature Refinement G. Linear Prediction Variational Forecasting =============================================================================================================== Creates a", "2. XG Boost 3. Random Forest. The stages of a", "Feature Filtering Stationarization =============================================================================================================== Stationarize a signal set and specified", "a new pipeline Nonlinear =============================================================================================================== A nonlinear pipeline combines nonlinear", "a dataset's predictive performance It is identical to the linear", "pipeline are as follows: A. Forecast Specification B. Feature Generation", "Time-Series Regression =============================================================================================================== Run Horizon's regression algorithms on a pre-encoded", "nonlinear feature generation and selection with a nonlinear regressor to", "that are at a specific target in the future. A", "identical to the linear pipeline, but does not include Feature", "C. Feature Filtering Stationarization =============================================================================================================== Stationarize a signal set and", "lag-embedding matrix by combining a two-stage feature generation and selection", "feature generation and selection with a nonlinear regressor to generate", "blueprint is a pipeline template in horizon, and must be", "be exported from the platform The stages of a signal", "\"fast_forecast\" feature_selection = \"feature_selection\" feature_discovery = \"feature_discovery\" signal_encoding = \"signal_encoding\"", "of feature selection are then used in order to maximize", "are as follows: A. Forecast Specification B. Stationarization Time-Series Regression", "Filtering E. Linear Backtesting F. Linear Prediction Feature Selection ===============================================================================================================", "pipeline are as follows: A. Forecast Specification B. Stationarization C.", "to the linear pipeline, but does not include Feature Refinement.", "information contained within them. The signal encoding pipeline allows for", "about a signal that can be exported from the platform", "Generation D. Feature Filtering E. Feature Refinement F. Linear Backtesting", "| .... | FeatP Obs1 ------------------------------------- t Obs2 ------------------------------------- t-1", "types are available here: 1. Mondrian Forest. An adaptation of", "extracting historic lags that will efficiently represent the information contained", "G. Nonlinear Prediction Linear =============================================================================================================== A nonlinear pipeline combines nonlinear", "=============================================================================================================== Run Horizon's regression algorithms on a pre-encoded signal set.", "has encoded past information about a signal that can be", "t-1 Obs3 ------------------------------------- t-2 ... ..................................... ... ..................................... ObsN -------------------------------------", "target. The stages of a stationarization pipeline are as follows:", "feature generation with a nonlinear regressor to generate forecasts that", "Linear Prediction Variational Forecasting =============================================================================================================== Creates a stacked lag-embedding matrix", "Linear Feature Generation D. Feature Filtering E. Linear Feature Generation", "Forecast Specification B. Stationarization C. Feature Generation D. Feature Filtering", "Linear Prediction Fast Forecasting =============================================================================================================== The fast forecasting pipeline is", "feature_selection = \"feature_selection\" feature_discovery = \"feature_discovery\" signal_encoding = \"signal_encoding\" stationarisation", "D. Feature Filtering E. Feature Refinement F. Linear Backtesting G.", "observation vector may be used in a traditional regression sense", "the platform The stages of a signal encoding pipeline are", "the future. Unlike the feature selection pipeline, it does not", "time_series_regression = \"regression\" variational_forecasting = \"variational_forecasting\" custom = \"custom\" small_data", "creating a new pipeline Nonlinear =============================================================================================================== A nonlinear pipeline combines", "Specification B. Stationarization C. Linear Feature Generation D. Feature Filtering", "Discovery =============================================================================================================== The feature discovery pipeline discovers features to maximize", "in the frequency domain, extracting historic lags that will efficiently", "=============================================================================================================== Creates a stacked lag-embedding matrix by combining a two-stage", "C. Linear Feature Generation D. Feature Filtering E. Linear Feature", "when creating a new pipeline Nonlinear =============================================================================================================== A nonlinear pipeline", "future. Feat1 | Feat2 | Feat3 | .... | FeatP", "Obs1 ------------------------------------- t Obs2 ------------------------------------- t-1 Obs3 ------------------------------------- t-2 ...", "C. Nonlinear Feature Generation D. Feature Filtering E. Linear Backtesting", "used in a traditional regression sense to map to a", "B. Feature Filtering E. Feature Refinement Feature Discovery =============================================================================================================== The", "feature discovery pipeline discovers features to maximize performance for a", "pipeline template in horizon, and must be specified when creating", "for adding stages manually. N.B. There is no validation on", "original data's past. The stages of a feature discovery pipeline", "for this functionality to be isolated, where the output is", "stages of a linear pipeline are as follows: A. Forecast", "Feature Refinement Signal Encoding =============================================================================================================== One of Horizon's feature generation", "discovery pipeline are as follows: A. Forecast Specification B. Feature", "to maximize performance for a particular forecast target, at a", "Feature Generation D. Feature Filtering E. Linear Feature Generation F.", "as follows: A. Forecast Specification B. Stationarization Time-Series Regression ===============================================================================================================", "is a pipeline template in horizon, and must be specified", "include Feature Refinement. The stages of a linear pipeline are", "training. A. Forecast Specification B. Stationarization C. Linear Feature Generation", "horizon, and must be specified when creating a new pipeline", "in the future. The regressor used is a Variational Bayesian", "Filtering E. Feature Refinement Feature Discovery =============================================================================================================== The feature discovery", "| FeatP Obs1 ------------------------------------- t Obs2 ------------------------------------- t-1 Obs3 -------------------------------------", "Forecast Specification B. Stationarization Time-Series Regression =============================================================================================================== Run Horizon's regression", "linear pipeline, but does not include Feature Refinement. The stages", "by combining a two-stage feature generation and selection process, with", "as follows: A. Forecast Specification B. Feature Filtering E. Feature", "G. Linear Backtesting H. Linear Prediction Custom =============================================================================================================== Advanced: Contains", "adding stages manually. N.B. There is no validation on stage", "a nonlinear regressor to generate forecasts that are at a", "as a quick assessment of a dataset's predictive performance It", "at a specified point in the future. Unlike the feature", "G. Linear Prediction Fast Forecasting =============================================================================================================== The fast forecasting pipeline", "Feature Filtering E. Feature Refinement F. Linear Backtesting G. Linear", "at a specific target in the future. A number of", "Feature Filtering E. Feature Refinement Feature Discovery =============================================================================================================== The feature", "Feature Discovery =============================================================================================================== The feature discovery pipeline discovers features to", "past. The stages of a feature discovery pipeline are as", "a specified point in the future. Unlike the feature selection", "Feature Refinement Feature Discovery =============================================================================================================== The feature discovery pipeline discovers", "in the future. Feat1 | Feat2 | Feat3 | ....", "signal set. Small Data Forecasting =============================================================================================================== Time-series pipeline for small", "C. Nonlinear Feature Generation D. Feature Filtering E. Feature Refinement", "is no validation on stage addition. \"\"\" nonlinear = \"nonlinear\"", "template in horizon, and must be specified when creating a", "- https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error bounds, and is our recommended", "Generation C. Feature Filtering Stationarization =============================================================================================================== Stationarize a signal set", "= \"signal_encoding\" stationarisation = \"stationarisation\" time_series_regression = \"regression\" variational_forecasting =", "combines nonlinear feature generation and selection with a nonlinear regressor", "follows: A. Forecast Specification B. Stationarization Time-Series Regression =============================================================================================================== Run", "Feat2 | Feat3 | .... | FeatP Obs1 ------------------------------------- t", "performance for a particular forecast target, at a specified point", "lags that will efficiently represent the information contained within them.", "are at a specific target in the future. A number", "is our recommended nonlinear regressor of choice. 2. XG Boost", "import Enum class BlueprintType(Enum): \"\"\" A blueprint is a pipeline", "about the original data's past. The stages of a feature", "Filtering Stationarization =============================================================================================================== Stationarize a signal set and specified target", "assume that the signal set has already encoded historical information", "------------------------------------- t-2 ... ..................................... ... ..................................... ObsN ------------------------------------- t-N Two", "stages of a stationarization pipeline are as follows: A. Forecast", "it does not assume that the signal set has already", "point in the future. Unlike the feature selection pipeline, it", ".... | FeatP Obs1 ------------------------------------- t Obs2 ------------------------------------- t-1 Obs3", "a stacked lag-embedding matrix by combining a two-stage feature generation", "=============================================================================================================== Time-series pipeline for small data. Does not contain any", "is intended to be used as a quick assessment of", "..................................... ObsN ------------------------------------- t-N Two stages of feature selection are", "be used as a quick assessment of a dataset's predictive", "adaptation of the probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673 Provides", "E. Feature Refinement G. Linear Prediction Variational Forecasting =============================================================================================================== Creates", "E. Feature Refinement F. Linear Backtesting G. Linear Prediction Fast", "be isolated, where the output is a feature set that", "target using Augmented Dicky Fuller analysis, and a detrending method", "B. Stationarization C. Linear Feature Generation D. Feature Filtering E.", "The stages of a nonlinear pipeline are as follows: A.", "Variational Bayesian Linear Regressor The stages of a linear pipeline", "to map to a target value at a point in", "here: 1. Mondrian Forest. An adaptation of the probabilistic Mondrian", "specified point in the future. Unlike the feature selection pipeline,", "The stages of a stationarization pipeline are as follows: A.", "generation and selection with a nonlinear regressor to generate forecasts", "discovery pipeline discovers features to maximize performance for a particular", "and must be specified when creating a new pipeline Nonlinear", "H. Linear Prediction Custom =============================================================================================================== Advanced: Contains only a forecast", "input data set already encodes information about a signal's past,", "sense to map to a target value at a point", "for a given target The stages of a linear pipeline", "Feature Filtering E. Feature Refinement G. Linear Prediction Variational Forecasting", "set and specified target using Augmented Dicky Fuller analysis, and", "Stationarization C. Nonlinear Feature Generation D. Feature Filtering E. Feature", "the data for model training. A. Forecast Specification B. Stationarization", "Forest. The stages of a nonlinear pipeline are as follows:", "..................................... ... ..................................... ObsN ------------------------------------- t-N Two stages of feature", "G. Linear Prediction Variational Forecasting =============================================================================================================== Creates a stacked lag-embedding", "used is a Variational Bayesian Linear Regressor The stages of", "already encodes information about a signal's past, such that a", "past information about a signal that can be exported from", "are then used in order to maximize predictive performance of", "Feat3 | .... | FeatP Obs1 ------------------------------------- t Obs2 -------------------------------------", "frequency domain, extracting historic lags that will efficiently represent the", "pipeline for small data. Does not contain any backtesting, and", "addition. \"\"\" nonlinear = \"nonlinear\" linear = \"linear\" fast_forecasting =", "target in the future. The regressor used is a Variational", "choice. 2. XG Boost 3. Random Forest. The stages of", "The stages of a linear pipeline are as follows: A.", "feature set that has encoded past information about a signal", "maximize predictive performance of the feature set on specified future", "Nonlinear Feature Generation D. Feature Filtering E. Feature Refinement F.", "all the data for model training. A. Forecast Specification B.", "target, at a specified point in the future. Unlike the", "a signal set and specified target using Augmented Dicky Fuller", "a two-stage feature generation and selection process, with lag-only feature", "=============================================================================================================== The fast forecasting pipeline is intended to be used", "A. Forecast Specification B. Stationarization C. Nonlinear Feature Generation D.", "can be exported from the platform The stages of a", "B. Stationarization Time-Series Regression =============================================================================================================== Run Horizon's regression algorithms on", "must be specified when creating a new pipeline Nonlinear ===============================================================================================================", "be used in a traditional regression sense to map to", "\"feature_selection\" feature_discovery = \"feature_discovery\" signal_encoding = \"signal_encoding\" stationarisation = \"stationarisation\"", "Feature Refinement. The stages of a linear pipeline are as", "of a feature discovery pipeline are as follows: A. Forecast", "F. Nonlinear Backtesting G. Nonlinear Prediction Linear =============================================================================================================== A nonlinear", "then used in order to maximize predictive performance of the", "encoding pipeline allows for this functionality to be isolated, where", "encoded historical information about the original data's past. The stages", "A. Forecast Specification B. Stationarization Time-Series Regression =============================================================================================================== Run Horizon's", "that the input data set already encodes information about a", "future points for a given target The stages of a", "The stages of a feature discovery pipeline are as follows:", "Linear Backtesting F. Linear Prediction Feature Selection =============================================================================================================== The feature", "Regression =============================================================================================================== Run Horizon's regression algorithms on a pre-encoded signal", "specification stage for adding stages manually. N.B. There is no", "Bayesian Linear Regressor The stages of a linear pipeline are", "Generation C. Feature Filtering D. Feature Refinement Signal Encoding ===============================================================================================================", "= \"feature_selection\" feature_discovery = \"feature_discovery\" signal_encoding = \"signal_encoding\" stationarisation =", "Forecasting =============================================================================================================== Time-series pipeline for small data. Does not contain", "Linear Prediction Custom =============================================================================================================== Advanced: Contains only a forecast specification", "a signal that can be exported from the platform The", "stages of feature selection are then used in order to", "specific target in the future. A number of different regressor", "a signal's past, such that a horizontal observation vector may", "regressor types are available here: 1. Mondrian Forest. An adaptation", "signal_encoding = \"signal_encoding\" stationarisation = \"stationarisation\" time_series_regression = \"regression\" variational_forecasting", "| Feat2 | Feat3 | .... | FeatP Obs1 -------------------------------------", "Backtesting G. Nonlinear Prediction Linear =============================================================================================================== A nonlinear pipeline combines", "the future. The regressor used is a Variational Bayesian Linear", "a target value at a point in the future. Feat1", "regressor of choice. 2. XG Boost 3. Random Forest. The", "nonlinear pipeline combines nonlinear feature generation and selection with a", "not assume that the signal set has already encoded historical", "Forecast Specification B. Stationarization C. Linear Feature Generation D. Feature", "Backtesting H. Linear Prediction Custom =============================================================================================================== Advanced: Contains only a", "points for a given target The stages of a linear", "to maximize predictive performance of the feature set on specified", "allows for this functionality to be isolated, where the output", "contain any backtesting, and uses all the data for model", "Prediction Custom =============================================================================================================== Advanced: Contains only a forecast specification stage", "information about the original data's past. The stages of a", "Forecasting =============================================================================================================== Creates a stacked lag-embedding matrix by combining a", "and specified target using Augmented Dicky Fuller analysis, and a", "signal's past, such that a horizontal observation vector may be", "Feat1 | Feat2 | Feat3 | .... | FeatP Obs1", "E. Linear Feature Generation F. Feature Filtering G. Linear Backtesting", "=============================================================================================================== A nonlinear pipeline combines nonlinear feature generation and selection", "that are at a specific target in the future. The", "a quick assessment of a dataset's predictive performance It is", "used in order to maximize predictive performance of the feature", "signal set and specified target using Augmented Dicky Fuller analysis,", "in a traditional regression sense to map to a target", "the feature set on specified future points for a given", "no validation on stage addition. \"\"\" nonlinear = \"nonlinear\" linear", "information about a signal that can be exported from the", "signal encoding pipeline allows for this functionality to be isolated,", "a specific target in the future. The regressor used is", "specified future points for a given target The stages of", "map to a target value at a point in the", "a Variational Bayesian Linear Regressor The stages of a linear", "Backtesting F. Linear Prediction Feature Selection =============================================================================================================== The feature selection", "Selection =============================================================================================================== The feature selection pipeline assumes that the input", "D. Feature Filtering E. Linear Feature Generation F. Feature Filtering", "=============================================================================================================== Advanced: Contains only a forecast specification stage for adding", "future. The regressor used is a Variational Bayesian Linear Regressor", "isolated, where the output is a feature set that has", "D. Feature Filtering E. Feature Refinement F. Nonlinear Backtesting G.", "only a forecast specification stage for adding stages manually. N.B.", "C. Feature Generation D. Feature Filtering E. Feature Refinement F.", "A nonlinear pipeline combines nonlinear feature generation with a nonlinear", "set on specified future points for a given target The", "Refinement F. Nonlinear Backtesting G. Nonlinear Prediction Linear =============================================================================================================== A", "Nonlinear Prediction Linear =============================================================================================================== A nonlinear pipeline combines nonlinear feature", "Encoding =============================================================================================================== One of Horizon's feature generation methods is to", "Stationarize a signal set and specified target using Augmented Dicky", "as follows: A. Forecast Specification B. Stationarization C. Nonlinear Feature", "N.B. There is no validation on stage addition. \"\"\" nonlinear", "regressor used is a Variational Bayesian Linear Regressor The stages", "nonlinear pipeline combines nonlinear feature generation with a nonlinear regressor", "B. Feature Generation C. Feature Filtering D. Feature Refinement Signal", "our recommended nonlinear regressor of choice. 2. XG Boost 3.", "nonlinear regressor to generate forecasts that are at a specific", "a linear pipeline are as follows: A. Forecast Specification B.", "Contains only a forecast specification stage for adding stages manually.", "linear pipeline are as follows: A. Forecast Specification B. Feature", "discovers features to maximize performance for a particular forecast target,", "BlueprintType(Enum): \"\"\" A blueprint is a pipeline template in horizon,", "Forecast Specification B. Stationarization C. Nonlinear Feature Generation D. Feature", "Horizon's regression algorithms on a pre-encoded signal set. Small Data", "Generation D. Feature Filtering E. Feature Refinement F. Nonlinear Backtesting", "but does not include Feature Refinement. The stages of a", "is a feature set that has encoded past information about", "methods is to encode signals in the frequency domain, extracting", "Feature Refinement F. Nonlinear Backtesting G. Nonlinear Prediction Linear ===============================================================================================================", "Data Forecasting =============================================================================================================== Time-series pipeline for small data. Does not", "method for the specified target. The stages of a stationarization", "=============================================================================================================== The feature discovery pipeline discovers features to maximize performance", "of a signal encoding pipeline are as follows: A. Forecast", "stationarisation = \"stationarisation\" time_series_regression = \"regression\" variational_forecasting = \"variational_forecasting\" custom", "Bayesian-esque error bounds, and is our recommended nonlinear regressor of", "predictive performance of the feature set on specified future points", "features to maximize performance for a particular forecast target, at", "algorithms on a pre-encoded signal set. Small Data Forecasting ===============================================================================================================", "regressor to generate forecasts that are at a specific target", "pipeline are as follows: A. Forecast Specification B. Stationarization Time-Series", "Augmented Dicky Fuller analysis, and a detrending method for the", "= \"regression\" variational_forecasting = \"variational_forecasting\" custom = \"custom\" small_data =", "are as follows: A. Forecast Specification B. Stationarization C. Nonlinear", "Refinement Feature Discovery =============================================================================================================== The feature discovery pipeline discovers features", "exported from the platform The stages of a signal encoding", "a feature discovery pipeline are as follows: A. Forecast Specification", "is a Variational Bayesian Linear Regressor The stages of a", "on a pre-encoded signal set. Small Data Forecasting =============================================================================================================== Time-series", "given target The stages of a linear pipeline are as", "The regressor used is a Variational Bayesian Linear Regressor The", "of choice. 2. XG Boost 3. Random Forest. The stages", "A. Forecast Specification B. Feature Generation C. Feature Filtering Stationarization", "vector may be used in a traditional regression sense to", "are as follows: A. Forecast Specification B. Stationarization C. Feature", "follows: A. Forecast Specification B. Stationarization C. Feature Generation D.", "future. Unlike the feature selection pipeline, it does not assume", "about a signal's past, such that a horizontal observation vector", "data. Does not contain any backtesting, and uses all the", "and uses all the data for model training. A. Forecast", "Does not contain any backtesting, and uses all the data", "selection pipeline assumes that the input data set already encodes", "the future. A number of different regressor types are available", "Generation D. Feature Filtering E. Linear Feature Generation F. Feature", "Custom =============================================================================================================== Advanced: Contains only a forecast specification stage for", "Mondrian Forest. An adaptation of the probabilistic Mondrian Forest algorithm", "a given target The stages of a linear pipeline are", "pipeline combines nonlinear feature generation and selection with a nonlinear", "Specification B. Feature Generation C. Feature Filtering D. Feature Refinement", "It is identical to the linear pipeline, but does not", "particular forecast target, at a specified point in the future.", "the original data's past. The stages of a feature discovery", "Refinement Signal Encoding =============================================================================================================== One of Horizon's feature generation methods", "= \"stationarisation\" time_series_regression = \"regression\" variational_forecasting = \"variational_forecasting\" custom =", "for a particular forecast target, at a specified point in", "traditional regression sense to map to a target value at", "F. Linear Backtesting G. Linear Prediction Fast Forecasting =============================================================================================================== The", "generation methods is to encode signals in the frequency domain,", "the future. Feat1 | Feat2 | Feat3 | .... |", "data set already encodes information about a signal's past, such", "\"signal_encoding\" stationarisation = \"stationarisation\" time_series_regression = \"regression\" variational_forecasting = \"variational_forecasting\"", "as follows: A. Forecast Specification B. Stationarization C. Feature Generation", "the specified target. The stages of a stationarization pipeline are", "nonlinear pipeline are as follows: A. Forecast Specification B. Stationarization", "is identical to the linear pipeline, but does not include", "| Feat3 | .... | FeatP Obs1 ------------------------------------- t Obs2", "class BlueprintType(Enum): \"\"\" A blueprint is a pipeline template in", "1. Mondrian Forest. An adaptation of the probabilistic Mondrian Forest", "\"nonlinear\" linear = \"linear\" fast_forecasting = \"fast_forecast\" feature_selection = \"feature_selection\"", "A blueprint is a pipeline template in horizon, and must", "a particular forecast target, at a specified point in the", "functionality to be isolated, where the output is a feature", "a nonlinear pipeline are as follows: A. Forecast Specification B.", "=============================================================================================================== A nonlinear pipeline combines nonlinear feature generation with a", "stages of a feature discovery pipeline are as follows: A.", "stationarization pipeline are as follows: A. Forecast Specification B. Stationarization", "combines nonlinear feature generation with a nonlinear regressor to generate", "t-N Two stages of feature selection are then used in", "nonlinear regressor of choice. 2. XG Boost 3. Random Forest.", "selection process, with lag-only feature generation. A. Forecast Specification B.", "Feature Filtering G. Linear Backtesting H. Linear Prediction Custom ===============================================================================================================", "any backtesting, and uses all the data for model training.", "\"stationarisation\" time_series_regression = \"regression\" variational_forecasting = \"variational_forecasting\" custom = \"custom\"", "Generation D. Feature Filtering E. Linear Backtesting F. Linear Prediction", "B. Stationarization C. Nonlinear Feature Generation D. Feature Filtering E.", "Filtering E. Feature Refinement G. Linear Prediction Variational Forecasting ===============================================================================================================", "Generation F. Feature Filtering G. Linear Backtesting H. Linear Prediction", "probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error bounds,", "used as a quick assessment of a dataset's predictive performance", "Feature Generation C. Feature Filtering Stationarization =============================================================================================================== Stationarize a signal", "=============================================================================================================== Stationarize a signal set and specified target using Augmented", "Fuller analysis, and a detrending method for the specified target.", "algorithm - https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error bounds, and is our", "regression algorithms on a pre-encoded signal set. Small Data Forecasting", "Specification B. Feature Filtering E. Feature Refinement Feature Discovery ===============================================================================================================", "different regressor types are available here: 1. Mondrian Forest. An", "intended to be used as a quick assessment of a", "of the feature set on specified future points for a", "Prediction Fast Forecasting =============================================================================================================== The fast forecasting pipeline is intended", "past, such that a horizontal observation vector may be used", "set. Small Data Forecasting =============================================================================================================== Time-series pipeline for small data.", "performance It is identical to the linear pipeline, but does", "D. Feature Refinement Signal Encoding =============================================================================================================== One of Horizon's feature", "a traditional regression sense to map to a target value", "a pre-encoded signal set. Small Data Forecasting =============================================================================================================== Time-series pipeline", "predictive performance It is identical to the linear pipeline, but", "The signal encoding pipeline allows for this functionality to be", "pipeline discovers features to maximize performance for a particular forecast", "pipeline combines nonlinear feature generation with a nonlinear regressor to", "analysis, and a detrending method for the specified target. The", "a horizontal observation vector may be used in a traditional", "the frequency domain, extracting historic lags that will efficiently represent", "pipeline, but does not include Feature Refinement. The stages of", "target The stages of a linear pipeline are as follows:", "in order to maximize predictive performance of the feature set", "be specified when creating a new pipeline Nonlinear =============================================================================================================== A", "feature discovery pipeline are as follows: A. Forecast Specification B.", "Forecast Specification B. Feature Generation C. Feature Filtering D. Feature", "manually. N.B. There is no validation on stage addition. \"\"\"", "Provides Bayesian-esque error bounds, and is our recommended nonlinear regressor", "nonlinear feature generation with a nonlinear regressor to generate forecasts", "of a dataset's predictive performance It is identical to the", "selection are then used in order to maximize predictive performance", "in the future. Unlike the feature selection pipeline, it does", "Specification B. Stationarization Time-Series Regression =============================================================================================================== Run Horizon's regression algorithms", "Refinement. The stages of a linear pipeline are as follows:", "historic lags that will efficiently represent the information contained within", "to generate forecasts that are at a specific target in", "B. Stationarization C. Feature Generation D. Feature Filtering E. Feature", "Feature Filtering D. Feature Refinement Signal Encoding =============================================================================================================== One of", "not include Feature Refinement. The stages of a linear pipeline", "Creates a stacked lag-embedding matrix by combining a two-stage feature", "Filtering G. Linear Backtesting H. Linear Prediction Custom =============================================================================================================== Advanced:", "Feature Filtering E. Linear Backtesting F. Linear Prediction Feature Selection", "Obs2 ------------------------------------- t-1 Obs3 ------------------------------------- t-2 ... ..................................... ... .....................................", "encodes information about a signal's past, such that a horizontal", "from the platform The stages of a signal encoding pipeline", "Linear Backtesting H. Linear Prediction Custom =============================================================================================================== Advanced: Contains only", "from enum import Enum class BlueprintType(Enum): \"\"\" A blueprint is", "Nonlinear Backtesting G. Nonlinear Prediction Linear =============================================================================================================== A nonlinear pipeline", "assumes that the input data set already encodes information about", "order to maximize predictive performance of the feature set on", "has already encoded historical information about the original data's past.", "------------------------------------- t-1 Obs3 ------------------------------------- t-2 ... ..................................... ... ..................................... ObsN", "that has encoded past information about a signal that can", "pipeline are as follows: A. Forecast Specification B. Feature Filtering", "follows: A. Forecast Specification B. Stationarization C. Nonlinear Feature Generation", "signal that can be exported from the platform The stages", "=============================================================================================================== The feature selection pipeline assumes that the input data", "with a nonlinear regressor to generate forecasts that are at", "to a target value at a point in the future.", "value at a point in the future. Feat1 | Feat2", "Forecasting =============================================================================================================== The fast forecasting pipeline is intended to be", "pipeline allows for this functionality to be isolated, where the", "the probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error", "Feature Filtering E. Linear Feature Generation F. Feature Filtering G.", "linear pipeline are as follows: A. Forecast Specification B. Stationarization", "signal set has already encoded historical information about the original", "dataset's predictive performance It is identical to the linear pipeline,", "Feature Generation F. Feature Filtering G. Linear Backtesting H. Linear", "represent the information contained within them. The signal encoding pipeline", "model training. A. Forecast Specification B. Stationarization C. Linear Feature", "Generation D. Feature Filtering E. Feature Refinement G. Linear Prediction", "of Horizon's feature generation methods is to encode signals in", "Forecast Specification B. Feature Generation C. Feature Filtering Stationarization ===============================================================================================================", "------------------------------------- t Obs2 ------------------------------------- t-1 Obs3 ------------------------------------- t-2 ... .....................................", "pipeline assumes that the input data set already encodes information", "= \"nonlinear\" linear = \"linear\" fast_forecasting = \"fast_forecast\" feature_selection =", "Backtesting G. Linear Prediction Fast Forecasting =============================================================================================================== The fast forecasting", "Nonlinear =============================================================================================================== A nonlinear pipeline combines nonlinear feature generation and", "contained within them. The signal encoding pipeline allows for this", "of a nonlinear pipeline are as follows: A. Forecast Specification", "fast_forecasting = \"fast_forecast\" feature_selection = \"feature_selection\" feature_discovery = \"feature_discovery\" signal_encoding", "and is our recommended nonlinear regressor of choice. 2. XG", "number of different regressor types are available here: 1. Mondrian", "is to encode signals in the frequency domain, extracting historic", "Filtering E. Linear Feature Generation F. Feature Filtering G. Linear", "=============================================================================================================== One of Horizon's feature generation methods is to encode", "C. Linear Feature Generation D. Feature Filtering E. Feature Refinement", "Forest. An adaptation of the probabilistic Mondrian Forest algorithm -", "A. Forecast Specification B. Stationarization C. Linear Feature Generation D.", "= \"linear\" fast_forecasting = \"fast_forecast\" feature_selection = \"feature_selection\" feature_discovery =", "for small data. Does not contain any backtesting, and uses", "feature selection pipeline assumes that the input data set already", "output is a feature set that has encoded past information", "C. Feature Filtering D. Feature Refinement Signal Encoding =============================================================================================================== One", "stages manually. N.B. There is no validation on stage addition.", "There is no validation on stage addition. \"\"\" nonlinear =", "target value at a point in the future. Feat1 |", "forecasts that are at a specific target in the future.", "may be used in a traditional regression sense to map", "A. Forecast Specification B. Feature Filtering E. Feature Refinement Feature", "and selection process, with lag-only feature generation. A. Forecast Specification", "set has already encoded historical information about the original data's", "Prediction Feature Selection =============================================================================================================== The feature selection pipeline assumes that", "of different regressor types are available here: 1. Mondrian Forest.", "future. A number of different regressor types are available here:", "feature selection pipeline, it does not assume that the signal", "feature generation and selection process, with lag-only feature generation. A.", "Linear =============================================================================================================== A nonlinear pipeline combines nonlinear feature generation with", "pipeline Nonlinear =============================================================================================================== A nonlinear pipeline combines nonlinear feature generation", "Feature Generation D. Feature Filtering E. Feature Refinement F. Nonlinear", "in the future. A number of different regressor types are", "performance of the feature set on specified future points for", "pipeline is intended to be used as a quick assessment", "stage addition. \"\"\" nonlinear = \"nonlinear\" linear = \"linear\" fast_forecasting", "nonlinear = \"nonlinear\" linear = \"linear\" fast_forecasting = \"fast_forecast\" feature_selection", "the linear pipeline, but does not include Feature Refinement. The", "The feature discovery pipeline discovers features to maximize performance for", "The fast forecasting pipeline is intended to be used as", "will efficiently represent the information contained within them. The signal", "forecast specification stage for adding stages manually. N.B. There is", "D. Feature Filtering E. Feature Refinement G. Linear Prediction Variational", "https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error bounds, and is our recommended nonlinear", "Stationarization Time-Series Regression =============================================================================================================== Run Horizon's regression algorithms on a", "using Augmented Dicky Fuller analysis, and a detrending method for", "are available here: 1. Mondrian Forest. An adaptation of the", "a detrending method for the specified target. The stages of", "generation and selection process, with lag-only feature generation. A. Forecast", "of a linear pipeline are as follows: A. Forecast Specification", "follows: A. Forecast Specification B. Feature Generation C. Feature Filtering", "of the probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673 Provides Bayesian-esque", "Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error bounds, and", "the signal set has already encoded historical information about the", "efficiently represent the information contained within them. The signal encoding", "in horizon, and must be specified when creating a new", "to be isolated, where the output is a feature set", "maximize performance for a particular forecast target, at a specified", "are as follows: A. Forecast Specification B. Feature Generation C.", "Specification B. Stationarization C. Feature Generation D. Feature Filtering E.", "for model training. A. Forecast Specification B. Stationarization C. Linear", "\"\"\" A blueprint is a pipeline template in horizon, and", "------------------------------------- t-N Two stages of feature selection are then used", "Advanced: Contains only a forecast specification stage for adding stages", "Linear Backtesting G. Linear Prediction Fast Forecasting =============================================================================================================== The fast", "and a detrending method for the specified target. The stages", "Feature Generation D. Feature Filtering E. Feature Refinement F. Linear", "Boost 3. Random Forest. The stages of a nonlinear pipeline", "Feature Selection =============================================================================================================== The feature selection pipeline assumes that the", "Linear Prediction Feature Selection =============================================================================================================== The feature selection pipeline assumes", "Forest algorithm - https://arxiv.org/abs/1406.2673 Provides Bayesian-esque error bounds, and is", "Fast Forecasting =============================================================================================================== The fast forecasting pipeline is intended to", "forecasting pipeline is intended to be used as a quick", "Time-series pipeline for small data. Does not contain any backtesting,", "for the specified target. The stages of a stationarization pipeline", "signal encoding pipeline are as follows: A. Forecast Specification B.", "detrending method for the specified target. The stages of a", "of a stationarization pipeline are as follows: A. Forecast Specification", "with lag-only feature generation. A. Forecast Specification B. Stationarization C.", "encoding pipeline are as follows: A. Forecast Specification B. Feature", "as follows: A. Forecast Specification B. Feature Generation C. Feature", "bounds, and is our recommended nonlinear regressor of choice. 2.", "already encoded historical information about the original data's past. The", "Small Data Forecasting =============================================================================================================== Time-series pipeline for small data. Does", "3. Random Forest. The stages of a nonlinear pipeline are", "target in the future. A number of different regressor types", "One of Horizon's feature generation methods is to encode signals", "fast forecasting pipeline is intended to be used as a", "are as follows: A. Forecast Specification B. Feature Filtering E.", "uses all the data for model training. A. Forecast Specification", "validation on stage addition. \"\"\" nonlinear = \"nonlinear\" linear =", "Enum class BlueprintType(Enum): \"\"\" A blueprint is a pipeline template", "E. Linear Backtesting F. Linear Prediction Feature Selection =============================================================================================================== The", "Specification B. Feature Generation C. Feature Filtering Stationarization =============================================================================================================== Stationarize", "\"regression\" variational_forecasting = \"variational_forecasting\" custom = \"custom\" small_data = \"small_data\"", "such that a horizontal observation vector may be used in", "data's past. The stages of a feature discovery pipeline are", "FeatP Obs1 ------------------------------------- t Obs2 ------------------------------------- t-1 Obs3 ------------------------------------- t-2", "two-stage feature generation and selection process, with lag-only feature generation.", "a point in the future. Feat1 | Feat2 | Feat3", "stacked lag-embedding matrix by combining a two-stage feature generation and", "Two stages of feature selection are then used in order", "Horizon's feature generation methods is to encode signals in the", "backtesting, and uses all the data for model training. A.", "Nonlinear Feature Generation D. Feature Filtering E. Linear Backtesting F.", "at a point in the future. Feat1 | Feat2 |", "E. Feature Refinement F. Nonlinear Backtesting G. Nonlinear Prediction Linear", "available here: 1. Mondrian Forest. An adaptation of the probabilistic", "information about a signal's past, such that a horizontal observation", "The stages of a signal encoding pipeline are as follows:", "data for model training. A. Forecast Specification B. Stationarization C.", "Signal Encoding =============================================================================================================== One of Horizon's feature generation methods is", "\"linear\" fast_forecasting = \"fast_forecast\" feature_selection = \"feature_selection\" feature_discovery = \"feature_discovery\"", "XG Boost 3. Random Forest. The stages of a nonlinear", "t-2 ... ..................................... ... ..................................... ObsN ------------------------------------- t-N Two stages", "Stationarization C. Linear Feature Generation D. Feature Filtering E. Linear", "that will efficiently represent the information contained within them. The", "Stationarization =============================================================================================================== Stationarize a signal set and specified target using", "a feature set that has encoded past information about a", "F. Feature Filtering G. Linear Backtesting H. Linear Prediction Custom", "encoded past information about a signal that can be exported", "error bounds, and is our recommended nonlinear regressor of choice.", "F. Linear Prediction Feature Selection =============================================================================================================== The feature selection pipeline", "B. Feature Generation C. Feature Filtering Stationarization =============================================================================================================== Stationarize a", "Random Forest. The stages of a nonlinear pipeline are as", "quick assessment of a dataset's predictive performance It is identical", "Feature Generation C. Feature Filtering D. Feature Refinement Signal Encoding", "platform The stages of a signal encoding pipeline are as", "encode signals in the frequency domain, extracting historic lags that", "point in the future. Feat1 | Feat2 | Feat3 |", "Filtering D. Feature Refinement Signal Encoding =============================================================================================================== One of Horizon's", "t Obs2 ------------------------------------- t-1 Obs3 ------------------------------------- t-2 ... ..................................... ...", "assessment of a dataset's predictive performance It is identical to", "Stationarization C. Nonlinear Feature Generation D. Feature Filtering E. Linear", "not contain any backtesting, and uses all the data for", "are at a specific target in the future. The regressor", "at a specific target in the future. The regressor used", "feature generation. A. Forecast Specification B. Stationarization C. Linear Feature", "to encode signals in the frequency domain, extracting historic lags", "Feature Filtering E. Feature Refinement F. Nonlinear Backtesting G. Nonlinear", "Feature Refinement F. Linear Backtesting G. Linear Prediction Fast Forecasting", "small data. Does not contain any backtesting, and uses all", "Forecast Specification B. Feature Filtering E. Feature Refinement Feature Discovery", "lag-only feature generation. A. Forecast Specification B. Stationarization C. Linear", "specified when creating a new pipeline Nonlinear =============================================================================================================== A nonlinear", "new pipeline Nonlinear =============================================================================================================== A nonlinear pipeline combines nonlinear feature", "Filtering E. Feature Refinement F. Nonlinear Backtesting G. Nonlinear Prediction", "on stage addition. \"\"\" nonlinear = \"nonlinear\" linear = \"linear\"", "E. Feature Refinement Feature Discovery =============================================================================================================== The feature discovery pipeline", "Regressor The stages of a linear pipeline are as follows:", "generation. A. Forecast Specification B. Stationarization C. Linear Feature Generation", "a specific target in the future. A number of different", "A. Forecast Specification B. Feature Generation C. Feature Filtering D.", "stage for adding stages manually. N.B. There is no validation", "Filtering E. Feature Refinement F. Linear Backtesting G. Linear Prediction", "that a horizontal observation vector may be used in a", "An adaptation of the probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673" ]
[ "username = request.POST.get ( 'username' ) user = User.objects.filter (", "( 'olaf:resend_activation_email' ) ) def resend_activation_email ( request ): if", "): if ( request.POST.get ( 'game_id' ) is not None", ") return form_operation ( request, 'password_reset_request' ) def reset_password_action (", "def move ( request ): proccess_move ( request ) return", "{}, 'index', { 'message' : \"An email containing the password", "): args [ 'message' ] = message if ( request.user.is_authenticated", "( request ): if ( request.user.is_authenticated ): return HttpResponseRedirect (", "( 'olaf:reset_password' ) ) def activate_account ( request, token ):", "= lambda a : str ( a.date () ) +", ": ( usertools.login_user, LoginForm, 'olaf/login.html', {}, 'index', { 'message' :", ") ) return form_operation ( request, 'password_reset_request' ) def reset_password_action", "'message' : \"You're logged in. :)\"} ), 'register' : (", "token = token ).first () if ( tk is None", "[ 'game_board' ] = None return render ( request, 'olaf/index_logged_in.html',", "( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index', { 'message' : \"An", "'username' ) user = User.objects.filter ( username = username ).first", "reverse ( 'index' ) ) return form_operation ( request, 'register'", "if ( request.method == 'POST' ): form = FORM (", "HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) ) def resend_activation_email (", "HttpResponseRedirect, HttpResponse from django.utils import timezone from olaf.models import *", "import proccess_move def index ( request ): args = {}", "= request.POST.get ( 'game_id' ) if ( game_id == '-1'", "{}, 'index', { 'message' : \"An activation email has been", "( reverse ( 'index' ) ) else: if ( timezone.now", "try getting a new one\" return HttpResponseRedirect ( reverse (", "[ 'game_id' ] = game_id else: request.session.pop ( 'game_id', default", "] = RegisterForm () args [ 'score' ] = list", ") def reset_password_action ( request, token ): if ( request.user.is_authenticated", "): return form_operation ( request, 'reset_password', token ) else: request.session", "sent to your email\" } ), } def form_operation (", "if ( timezone.now () <= tk.expiration_time ): return form_operation (", "token ) else: request.session [ 'message' ] = \"Link expired,", "): args [ 'game_board' ] = usertools.get_translated_game_board ( request )", ": \"Activation email successfully sent to your email\" } ),", "() ) + \" - \" + str ( a.hour", "*args ): func, FORM, fail_template, fail_args, success_url, success_args = form_operation_dict", "form, *args ) for key in success_args: request.session [ key", "): fail_args [ 'message' ] = message fail_args [ 'form'", "= form_operation_dict [ oper ] if ( request.method == 'POST'", "'game_list' ] = list ([str ( game.id ), f (", "): form = FORM ( request.POST ) if ( form.is_valid", "new one\" return HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) )", "def activate_account ( request, token ): if ( request.user.is_authenticated ):", "( request ): if ( request.method == 'POST' ): username", "if ( request.user.is_authenticated ): return HttpResponseRedirect ( reverse ( 'index'", "== 'POST' ): username = request.POST.get ( 'username' ) user", ") if ( form.is_valid () ): func ( request, form,", "( request.user.is_authenticated ): args [ 'logged_in' ] = True return", "successfully sent to your email\" } ), } def form_operation", "return form_operation ( request, 'login' ) def register_user ( request", "\":\" + str ( a.minute ) + \":\" + str", "activated successfully\" return HttpResponseRedirect ( reverse ( 'olaf:login' ) )", "] = usertools.get_translated_game_board ( request ) else: args [ 'game_board'", "to you\" } ), 'password_reset_request' : ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html',", "= usertools.new_game ( request ) request.session [ 'game_id' ] =", ").order_by ( '-creation_time' ) ) if ( request.session.get ( 'game_id'", "getting a new one\" return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email'", "a.second ) args [ 'game_list' ] = list ([str (", "\"Password successfully changed, you can login now\" } ), 'resend_activation_email'", "\"Account already active\" return HttpResponseRedirect ( reverse ( 'index' )", "email successfully sent to your email\" } ), } def", "request.method == 'POST' ): if ( request.POST.get ( 'game_id' )", "( usertools.register_user, RegisterForm, 'olaf/register.html', {}, 'index', { 'message' : \"An", "*args ) for key in success_args: request.session [ key ]", "args ) def move ( request ): proccess_move ( request", "'message' ] = message fail_args [ 'form' ] = form", "+ \" - \" + str ( a.hour ) +", "= (username, ) ) ) else: args = {} message", "import User from django.shortcuts import render from django.urls import reverse", "'message' ] = \"Account already active\" return HttpResponseRedirect ( reverse", "from django.shortcuts import render from django.urls import reverse from django.http", "RegisterForm, 'olaf/register.html', {}, 'index', { 'message' : \"An activation email", "( request ) else: args [ 'game_board' ] = None", "() message = request.session.pop ( 'message', default = None )", "): game_id = request.POST.get ( 'game_id' ) if ( game_id", "] = LoginForm () args [ 'register_form' ] = RegisterForm", "request.user.is_authenticated ): if ( request.method == 'POST' ): if (", "= FORM ( request.POST ) if ( form.is_valid () ):", "HttpResponseRedirect ( reverse ( 'index' ) ) return form_operation (", "] = \"Broken link\" return HttpResponseRedirect ( reverse ( 'index'", "'olaf/scoreboard.html', args ) def move ( request ): proccess_move (", "{ 'message' : \"Activation email successfully sent to your email\"", "): request.session [ 'message' ] = \"Broken link\" return HttpResponseRedirect", "[ key ] return HttpResponseRedirect ( reverse ( success_url )", "( [user.master.username, user.wins, user.loses, user.ties] for user in UserData.objects.filter (", "+ str ( a.minute ) + \":\" + str (", "form_operation ( request, 'register' ) def password_reset_request ( request ):", "): return HttpResponseRedirect ( reverse ( 'index' ) ) tk", "] = \"Goodbye :)\" return HttpResponseRedirect ( reverse ( 'index'", "'message' ] = \"Link expired, try getting a new one\"", "render ( request, 'olaf/scoreboard.html', args ) def move ( request", "(user.master.username, user.wins, user.loses, user.ties) for user in UserData.objects.filter ( is_active", ") #view functions def login_user ( request ): if (", ") ) def activate_account ( request, token ): if (", "( game_id == '-1' ): game_id = usertools.new_game ( request", "request.POST ) if ( form.is_valid () ): func ( request,", "from django.contrib.auth.models import User from django.shortcuts import render from django.urls", "[ 'register_form' ] = RegisterForm () args [ 'score' ]", "} ), } def form_operation ( request, oper, *args ):", "form_operation ( request, 'password_reset_request' ) def reset_password_action ( request, token", "if ( tk is None ): request.session [ 'message' ]", "( request.user.is_authenticated ): if ( request.method == 'POST' ): if", "'game_id' ) if ( game_id == '-1' ): game_id =", "): if ( request.user.is_authenticated ): return HttpResponseRedirect ( reverse (", "] = None return render ( request, 'olaf/index_logged_in.html', args )", "request, 'password_reset_request' ) def reset_password_action ( request, token ): if", ") is not None ): args [ 'game_board' ] =", ") return form_operation ( request, 'login' ) def register_user (", "reset link will be sent to your email\"} ), 'reset_password'", ") args [ 'game_list' ] = list ([str ( game.id", "] = form return render ( request, fail_template, fail_args )", "( request, 'login' ) def register_user ( request ): if", ") ) else: if ( timezone.now () <= tk.expiration_time ):", "args [ 'register_form' ] = RegisterForm () args [ 'score'", "'reset_password', token ) else: request.session [ 'message' ] = \"Link", "scoreboard ( request ): if ( request.method == 'POST' ):", "): func, FORM, fail_template, fail_args, success_url, success_args = form_operation_dict [", "( timezone.now () <= tk.expiration_time ): if ( tk.user.is_active ):", "import * from olaf.utility import usertools from olaf.chess.controller import proccess_move", "if ( user is None ): request.session [ 'message' ]", ").first () if ( user is None ): request.session [", "'password_reset_request' ) def reset_password_action ( request, token ): if (", "key in success_args: request.session [ key ] = success_args [", "oper, *args ): func, FORM, fail_template, fail_args, success_url, success_args =", "django.utils import timezone from olaf.models import * from olaf.forms import", "( request, 'password_reset_request' ) def reset_password_action ( request, token ):", "( request, oper, *args ): func, FORM, fail_template, fail_args, success_url,", "game in request.user.userdata.game_history.filter ( result = 0 ).order_by ( '-creation_time'", "( 'index' ) ) else: if ( timezone.now () <=", "UserData.objects.filter ( is_active = True ) ] args [ 'lst'", "sent to your email\"} ), 'reset_password' : ( usertools.reset_password_action, PasswordChangeForm,", ") if ( game_id == '-1' ): game_id = usertools.new_game", "message lst = [ (user.master.username, user.wins, user.loses, user.ties) for user", "return form_operation ( request, 'password_reset_request' ) def reset_password_action ( request,", ") return form_operation ( request, 'register' ) def password_reset_request (", ") def move ( request ): proccess_move ( request )", "request ) else: args [ 'game_board' ] = None return", "* from olaf.forms import * from olaf.utility import usertools from", "'olaf/index_logged_in.html', args ) else: args [ 'login_form' ] = LoginForm", "game_id = usertools.new_game ( request ) request.session [ 'game_id' ]", "request, 'register' ) def password_reset_request ( request ): if (", "return HttpResponseRedirect ( reverse ( 'olaf:login' ) ) else: request.session", "success_url ) ) else: form = FORM () message =", "return HttpResponseRedirect ( reverse ( 'index' ) ) return form_operation", "\"User not found\" return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' )", "request.session [ 'message' ] = \"Account already active\" return HttpResponseRedirect", "login_user ( request ): if ( request.user.is_authenticated ): return HttpResponseRedirect", "[ (user.master.username, user.wins, user.loses, user.ties) for user in UserData.objects.filter (", "def resend_activation_email ( request ): if ( request.user.is_authenticated ): return", "= request.POST.get ( 'username' ) user = User.objects.filter ( username", "return HttpResponseRedirect ( reverse ( 'index' ) ) tk =", "= lst if ( request.user.is_authenticated ): args [ 'logged_in' ]", "from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.shortcuts", "): if ( request.method == 'POST' ): if ( request.POST.get", "return form_operation ( request, 'reset_password', token ) else: request.session [", "( request.POST ) if ( form.is_valid () ): func (", "( 'index' ) ) return form_operation ( request, 'login' )", "request.session.pop ( 'message', default = None ) if ( message", ") else: args [ 'login_form' ] = LoginForm () args", ":)\"} ), 'register' : ( usertools.register_user, RegisterForm, 'olaf/register.html', {}, 'index',", "( request, fail_template, fail_args ) #view functions def login_user (", "request.session [ 'message' ] = \"Link expired, try getting a", "\"Your account has been activated successfully\" return HttpResponseRedirect ( reverse", "message is not None ): args [ 'message' ] =", "from olaf.models import * from olaf.forms import * from olaf.utility", "key ] = success_args [ key ] return HttpResponseRedirect (", "email\"} ), 'reset_password' : ( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login',", "return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args = (username, )", ") + \":\" + str ( a.minute ) + \":\"", "+ \":\" + str ( a.second ) args [ 'game_list'", "from olaf.forms import * from olaf.utility import usertools from olaf.chess.controller", "[ 'message' ] = \"Broken link\" return HttpResponseRedirect ( reverse", "() if ( user is None ): request.session [ 'message'", "now\" } ), 'resend_activation_email' : ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {},", "request.POST.get ( 'game_id' ) if ( game_id == '-1' ):", "f = lambda a : str ( a.date () )", "request.session [ 'message' ] = \"Broken link\" return HttpResponseRedirect (", "( request ): usertools.logout_user ( request ) request.session [ 'message'", "request ): usertools.logout_user ( request ) request.session [ 'message' ]", "return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) ) else: return", ") else: args = {} message = request.session.pop ( 'message',", "reverse from django.http import HttpResponseRedirect, HttpResponse from django.utils import timezone", "render ( request, 'olaf/index_not_logged_in.html', args ) form_operation_dict = { 'login'", "( request ): args = {} message = request.session.pop (", "'-1' ): game_id = usertools.new_game ( request ) request.session [", "if ( message is not None ): args [ 'message'", "'game_board' ] = usertools.get_translated_game_board ( request ) else: args [", "render from django.urls import reverse from django.http import HttpResponseRedirect, HttpResponse", "index ( request ): args = {} message = request.session.pop", "{}, 'index', { 'message' : \"You're logged in. :)\"} ),", "request.session [ key ] = success_args [ key ] return", "password reset link will be sent to your email\"} ),", "), 'password_reset_request' : ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index', {", "( game.creation_time )] for game in request.user.userdata.game_history.filter ( result =", "FORM ( request.POST ) if ( form.is_valid () ): func", "request.user.is_authenticated ): return HttpResponseRedirect ( reverse ( 'index' ) )", "FORM, fail_template, fail_args, success_url, success_args = form_operation_dict [ oper ]", "= RegisterForm () args [ 'score' ] = list (", "args ) form_operation_dict = { 'login' : ( usertools.login_user, LoginForm,", "been activated successfully\" return HttpResponseRedirect ( reverse ( 'olaf:login' )", ") else: userdata = tk.user userdata.is_active = True userdata.save ()", "True ) ) return render ( request, 'olaf/index_not_logged_in.html', args )", "else: userdata = tk.user userdata.is_active = True userdata.save () request.session", "None ): fail_args [ 'message' ] = message fail_args [", "resend_activation_email ( request ): if ( request.user.is_authenticated ): return HttpResponseRedirect", "to your email\" } ), } def form_operation ( request,", ":)\" return HttpResponseRedirect ( reverse ( 'index' ) ) def", "request.session [ 'message' ] = \"Goodbye :)\" return HttpResponseRedirect (", "import HttpResponseRedirect, HttpResponse from django.utils import timezone from olaf.models import", "( message is not None ): args [ 'message' ]", "None ): request.session [ 'message' ] = \"User not found\"", "form = FORM () message = request.session.pop ( 'message', default", "else: args [ 'login_form' ] = LoginForm () args [", "): if ( tk.user.is_active ): request.session [ 'message' ] =", "in success_args: request.session [ key ] = success_args [ key", "FORM () message = request.session.pop ( 'message', default = None", "= \"Your account has been activated successfully\" return HttpResponseRedirect (", "else: args [ 'game_board' ] = None return render (", "args [ 'logged_in' ] = True return render ( request,", "\"You're logged in. :)\"} ), 'register' : ( usertools.register_user, RegisterForm,", "( request.POST.get ( 'game_id' ) is not None ): game_id", "( request.method == 'POST' ): if ( request.POST.get ( 'game_id'", "( is_active = True ) ) return render ( request,", "( is_active = True ) ] args [ 'lst' ]", "import login_required from django.contrib.auth.models import User from django.shortcuts import render", "request ): proccess_move ( request ) return HttpResponseRedirect ( reverse", "lambda a : str ( a.date () ) + \"", "if ( request.method == 'POST' ): username = request.POST.get (", "username ).first () if ( user is None ): request.session", "( request, form, *args ) for key in success_args: request.session", "( 'olaf:scoreboard' ) ) else: return HttpResponseRedirect ( reverse (", "request, fail_template, fail_args ) #view functions def login_user ( request", "'message' ] = message if ( request.user.is_authenticated ): if (", "not None ): args [ 'message' ] = message if", "request.user.is_authenticated ): args [ 'logged_in' ] = True return render", "] if ( request.method == 'POST' ): form = FORM", "( reverse ( success_url ) ) else: form = FORM", "( usertools.login_user, LoginForm, 'olaf/login.html', {}, 'index', { 'message' : \"You're", "] = success_args [ key ] return HttpResponseRedirect ( reverse", ") def activate_account ( request, token ): if ( request.user.is_authenticated", "getting a new one\" return HttpResponseRedirect ( reverse ( 'olaf:reset_password'", "() args [ 'score' ] = list ( [user.master.username, user.wins,", "a new one\" return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' )", "message fail_args [ 'form' ] = form return render (", "} ), 'resend_activation_email' : ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index',", "not None ): fail_args [ 'message' ] = message fail_args", ") ) return render ( request, 'olaf/index_not_logged_in.html', args ) form_operation_dict", "'index', { 'message' : \"An activation email has been sent", "def reset_password_action ( request, token ): if ( request.user.is_authenticated ):", "= tk.user userdata.is_active = True userdata.save () request.session [ 'message'", "else: return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args = (username,", "request, 'login' ) def register_user ( request ): if (", "tk.expiration_time ): return form_operation ( request, 'reset_password', token ) else:", "proccess_move def index ( request ): args = {} message", "return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) ) def resend_activation_email", "for user in UserData.objects.filter ( is_active = True ) ]", "request, 'resend_activation_email' ) def logout_user ( request ): usertools.logout_user (", "( 'game_id' ) is not None ): game_id = request.POST.get", ") tk = ExpirableTokenField.objects.filter ( token = token ).first ()", "args = {} message = request.session.pop ( 'message', default =", "be sent to your email\"} ), 'reset_password' : ( usertools.reset_password_action,", "from django.utils import timezone from olaf.models import * from olaf.forms", "): args = {} message = request.session.pop ( 'message', default", "successfully\" return HttpResponseRedirect ( reverse ( 'olaf:login' ) ) else:", "usertools.register_user, RegisterForm, 'olaf/register.html', {}, 'index', { 'message' : \"An activation", "(username, ) ) ) else: args = {} message =", "( request, 'olaf/index_logged_in.html', args ) else: args [ 'login_form' ]", "'resend_activation_email' ) def logout_user ( request ): usertools.logout_user ( request", "= User.objects.filter ( username = username ).first () if (", "'index' ) ) return form_operation ( request, 'login' ) def", "): args [ 'logged_in' ] = True return render (", "([str ( game.id ), f ( game.creation_time )] for game", "usertools from olaf.chess.controller import proccess_move def index ( request ):", "( reverse ( 'index' ) ) def scoreboard ( request", "] = message fail_args [ 'form' ] = form return", "None ): request.session [ 'message' ] = \"Broken link\" return", "= True ) ) return render ( request, 'olaf/index_not_logged_in.html', args", "'message' : \"Password successfully changed, you can login now\" }", ") ) else: return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args", "request.method == 'POST' ): username = request.POST.get ( 'username' )", ") return render ( request, 'olaf/index_not_logged_in.html', args ) form_operation_dict =", "found\" return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) ) else:", "message if ( request.user.is_authenticated ): if ( request.method == 'POST'", "user.wins, user.loses, user.ties] for user in UserData.objects.filter ( is_active =", "import timezone from olaf.models import * from olaf.forms import *", "): func ( request, form, *args ) for key in", "from olaf.chess.controller import proccess_move def index ( request ): args", ")] for game in request.user.userdata.game_history.filter ( result = 0 ).order_by", "[ 'message' ] = \"Your account has been activated successfully\"", "userdata.is_active = True userdata.save () request.session [ 'message' ] =", "olaf.chess.controller import proccess_move def index ( request ): args =", "def logout_user ( request ): usertools.logout_user ( request ) request.session", "reverse ( 'olaf:scoreboard' ) ) else: return HttpResponseRedirect ( reverse", "{}, 'index', { 'message' : \"Activation email successfully sent to", "str ( a.minute ) + \":\" + str ( a.second", "is None ): request.session [ 'message' ] = \"User not", "form return render ( request, fail_template, fail_args ) #view functions", "lst = [ (user.master.username, user.wins, user.loses, user.ties) for user in", "( a.date () ) + \" - \" + str", ") ) ) else: args = {} message = request.session.pop", ") ) else: form = FORM () message = request.session.pop", "func ( request, form, *args ) for key in success_args:", "register_user ( request ): if ( request.user.is_authenticated ): return HttpResponseRedirect", "user.loses, user.ties) for user in UserData.objects.filter ( is_active = True", "HttpResponseRedirect ( reverse ( 'index' ) ) tk = ExpirableTokenField.objects.filter", "if ( request.session.get ( 'game_id' ) is not None ):", "'score' ] = list ( [user.master.username, user.wins, user.loses, user.ties] for", "'password_reset_request' : ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index', { 'message'", ") ) return form_operation ( request, 'register' ) def password_reset_request", "return HttpResponseRedirect ( reverse ( 'index' ) ) def scoreboard", "request ) request.session [ 'game_id' ] = game_id else: request.session.pop", "( 'index' ) ) return form_operation ( request, 'password_reset_request' )", ") return form_operation ( request, 'resend_activation_email' ) def logout_user (", "= success_args [ key ] return HttpResponseRedirect ( reverse (", "User from django.shortcuts import render from django.urls import reverse from", "[ key ] = success_args [ key ] return HttpResponseRedirect", "HttpResponseRedirect ( reverse ( success_url ) ) else: form =", "= \"Goodbye :)\" return HttpResponseRedirect ( reverse ( 'index' )", "already active\" return HttpResponseRedirect ( reverse ( 'index' ) )", "args [ 'login_form' ] = LoginForm () args [ 'register_form'", "def login_user ( request ): if ( request.user.is_authenticated ): return", "( tk.user.is_active ): request.session [ 'message' ] = \"Account already", "{} message = request.session.pop ( 'message', default = None )", "( 'index' ) ) return form_operation ( request, 'register' )", ") ] args [ 'lst' ] = lst if (", "request ) return HttpResponseRedirect ( reverse ( 'index' ) )", "request.session [ 'message' ] = \"User not found\" return HttpResponseRedirect", "email containing the password reset link will be sent to", "request, 'reset_password', token ) else: request.session [ 'message' ] =", ") else: request.session [ 'message' ] = \"Link expired, try", "( 'olaf:login' ) ) else: request.session [ 'message' ] =", "has been activated successfully\" return HttpResponseRedirect ( reverse ( 'olaf:login'", "( 'index' ) ) return form_operation ( request, 'resend_activation_email' )", "email has been sent to you\" } ), 'password_reset_request' :", "= FORM () message = request.session.pop ( 'message', default =", "+ str ( a.second ) args [ 'game_list' ] =", "form_operation ( request, 'reset_password', token ) else: request.session [ 'message'", "= message lst = [ (user.master.username, user.wins, user.loses, user.ties) for", "{ 'login' : ( usertools.login_user, LoginForm, 'olaf/login.html', {}, 'index', {", "render ( request, 'olaf/index_logged_in.html', args ) else: args [ 'login_form'", "\"Broken link\" return HttpResponseRedirect ( reverse ( 'index' ) )", "): usertools.logout_user ( request ) request.session [ 'message' ] =", "olaf.utility import usertools from olaf.chess.controller import proccess_move def index (", "{}, 'olaf:login', { 'message' : \"Password successfully changed, you can", "reverse ( 'index' ) ) return form_operation ( request, 'resend_activation_email'", "is_active = True ) ] args [ 'lst' ] =", "None ): args [ 'message' ] = message if (", "[ 'login_form' ] = LoginForm () args [ 'register_form' ]", "containing the password reset link will be sent to your", "): request.session [ 'message' ] = \"Account already active\" return", "\":\" + str ( a.second ) args [ 'game_list' ]", "): proccess_move ( request ) return HttpResponseRedirect ( reverse (", "{ 'message' : \"An activation email has been sent to", "str ( a.hour ) + \":\" + str ( a.minute", ") def password_reset_request ( request ): if ( request.user.is_authenticated ):", "( reverse ( 'olaf:resend_activation_email' ) ) def resend_activation_email ( request", "UserData.objects.filter ( is_active = True ) ) return render (", "reset_password_action ( request, token ): if ( request.user.is_authenticated ): return", "\" - \" + str ( a.hour ) + \":\"", "[ 'game_list' ] = list ([str ( game.id ), f", "\"An activation email has been sent to you\" } ),", "( reverse ( 'olaf:login' ) ) else: request.session [ 'message'", "game_id == '-1' ): game_id = usertools.new_game ( request )", "form_operation ( request, oper, *args ): func, FORM, fail_template, fail_args,", "( request, 'olaf/index_not_logged_in.html', args ) form_operation_dict = { 'login' :", "( request.session.get ( 'game_id' ) is not None ): args", "message is not None ): fail_args [ 'message' ] =", "return form_operation ( request, 'resend_activation_email' ) def logout_user ( request", "ExpirableTokenField.objects.filter ( token = token ).first () if ( tk", "'index' ) ) else: if ( timezone.now () <= tk.expiration_time", "login now\" } ), 'resend_activation_email' : ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html',", "render ( request, fail_template, fail_args ) #view functions def login_user", "if ( tk.user.is_active ): request.session [ 'message' ] = \"Account", "() <= tk.expiration_time ): return form_operation ( request, 'reset_password', token", "olaf.models import * from olaf.forms import * from olaf.utility import", "<= tk.expiration_time ): if ( tk.user.is_active ): request.session [ 'message'", "login_required from django.contrib.auth.models import User from django.shortcuts import render from", "= form return render ( request, fail_template, fail_args ) #view", "a.hour ) + \":\" + str ( a.minute ) +", ": ( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login', { 'message' :", ") else: form = FORM () message = request.session.pop (", "= \"Broken link\" return HttpResponseRedirect ( reverse ( 'index' )", "'resend_activation_email' : ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index', { 'message'", "( 'index' ) ) else: userdata = tk.user userdata.is_active =", "tk is None ): request.session [ 'message' ] = \"Broken", ") request.session [ 'message' ] = \"Goodbye :)\" return HttpResponseRedirect", "- \" + str ( a.hour ) + \":\" +", "success_args: request.session [ key ] = success_args [ key ]", "is None ): request.session [ 'message' ] = \"Broken link\"", "( username = username ).first () if ( user is", "reverse ( 'olaf:user_profile', args = (username, ) ) ) else:", "activation email has been sent to you\" } ), 'password_reset_request'", "() if ( tk is None ): request.session [ 'message'", "[ 'logged_in' ] = True return render ( request, 'olaf/scoreboard.html',", "( a.second ) args [ 'game_list' ] = list ([str", ") if ( message is not None ): fail_args [", "not None ): args [ 'message' ] = message lst", "( request ) request.session [ 'message' ] = \"Goodbye :)\"", "'login' ) def register_user ( request ): if ( request.user.is_authenticated", "in UserData.objects.filter ( is_active = True ) ] args [", "None ) if ( message is not None ): fail_args", "'index' ) ) return form_operation ( request, 'password_reset_request' ) def", "to your email\"} ), 'reset_password' : ( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html',", "move ( request ): proccess_move ( request ) return HttpResponseRedirect", "from olaf.utility import usertools from olaf.chess.controller import proccess_move def index", "in. :)\"} ), 'register' : ( usertools.register_user, RegisterForm, 'olaf/register.html', {},", "[ 'lst' ] = lst if ( request.user.is_authenticated ): args", "request ): if ( request.method == 'POST' ): username =", "fail_template, fail_args ) #view functions def login_user ( request ):", "userdata.save () request.session [ 'message' ] = \"Your account has", "you\" } ), 'password_reset_request' : ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {},", "] = list ([str ( game.id ), f ( game.creation_time", ": \"You're logged in. :)\"} ), 'register' : ( usertools.register_user,", "), 'resend_activation_email' : ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index', {", "'index' ) ) else: userdata = tk.user userdata.is_active = True", "is not None ): args [ 'game_board' ] = usertools.get_translated_game_board", "#view functions def login_user ( request ): if ( request.user.is_authenticated", "expired, try getting a new one\" return HttpResponseRedirect ( reverse", "import reverse from django.http import HttpResponseRedirect, HttpResponse from django.utils import", "[ 'message' ] = message fail_args [ 'form' ] =", "] = list ( [user.master.username, user.wins, user.loses, user.ties] for user", "game.id ), f ( game.creation_time )] for game in request.user.userdata.game_history.filter", "usertools.login_user, LoginForm, 'olaf/login.html', {}, 'index', { 'message' : \"You're logged", "'message' ] = message lst = [ (user.master.username, user.wins, user.loses,", "token ).first () if ( tk is None ): request.session", "not None ): args [ 'game_board' ] = usertools.get_translated_game_board (", "= [ (user.master.username, user.wins, user.loses, user.ties) for user in UserData.objects.filter", "), 'reset_password' : ( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login', {", "= True userdata.save () request.session [ 'message' ] = \"Your", "] = message lst = [ (user.master.username, user.wins, user.loses, user.ties)", ") ) def resend_activation_email ( request ): if ( request.user.is_authenticated", "import * from olaf.forms import * from olaf.utility import usertools", "'message' : \"An email containing the password reset link will", "= \"User not found\" return HttpResponseRedirect ( reverse ( 'olaf:scoreboard'", "fail_args, success_url, success_args = form_operation_dict [ oper ] if (", "( game.id ), f ( game.creation_time )] for game in", "LoginForm () args [ 'register_form' ] = RegisterForm () args", "'olaf:resend_activation_email' ) ) def resend_activation_email ( request ): if (", "== 'POST' ): form = FORM ( request.POST ) if", "( a.minute ) + \":\" + str ( a.second )", "+ str ( a.hour ) + \":\" + str (", "form_operation ( request, 'resend_activation_email' ) def logout_user ( request ):", ") def scoreboard ( request ): if ( request.method ==", "def password_reset_request ( request ): if ( request.user.is_authenticated ): return", ") user = User.objects.filter ( username = username ).first ()", "usertools.logout_user ( request ) request.session [ 'message' ] = \"Goodbye", "( request, 'olaf/scoreboard.html', args ) def move ( request ):", "reverse ( 'index' ) ) return form_operation ( request, 'password_reset_request'", "f ( game.creation_time )] for game in request.user.userdata.game_history.filter ( result", "account has been activated successfully\" return HttpResponseRedirect ( reverse (", "password_reset_request ( request ): if ( request.user.is_authenticated ): return HttpResponseRedirect", "fail_args ) #view functions def login_user ( request ): if", "will be sent to your email\"} ), 'reset_password' : (", ") f = lambda a : str ( a.date ()", "( request, 'register' ) def password_reset_request ( request ): if", ": \"An activation email has been sent to you\" }", "( message is not None ): fail_args [ 'message' ]", ": ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index', { 'message' :", "( request, 'resend_activation_email' ) def logout_user ( request ): usertools.logout_user", "request.POST.get ( 'game_id' ) is not None ): game_id =", "your email\" } ), } def form_operation ( request, oper,", "[ oper ] if ( request.method == 'POST' ): form", "= message if ( request.user.is_authenticated ): if ( request.method ==", "oper ] if ( request.method == 'POST' ): form =", "'form' ] = form return render ( request, fail_template, fail_args", "usertools.new_game ( request ) request.session [ 'game_id' ] = game_id", "= True return render ( request, 'olaf/scoreboard.html', args ) def", "\" + str ( a.hour ) + \":\" + str", "request, 'olaf/index_not_logged_in.html', args ) form_operation_dict = { 'login' : (", "( tk is None ): request.session [ 'message' ] =", "usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login', { 'message' : \"Password successfully", "<= tk.expiration_time ): return form_operation ( request, 'reset_password', token )", "= game_id else: request.session.pop ( 'game_id', default = None )", "def register_user ( request ): if ( request.user.is_authenticated ): return", "'olaf:scoreboard' ) ) else: return HttpResponseRedirect ( reverse ( 'olaf:user_profile',", ") + \" - \" + str ( a.hour )", "User.objects.filter ( username = username ).first () if ( user", "'lst' ] = lst if ( request.user.is_authenticated ): args [", "LoginForm, 'olaf/login.html', {}, 'index', { 'message' : \"You're logged in.", "= username ).first () if ( user is None ):", "[ 'message' ] = \"Account already active\" return HttpResponseRedirect (", "), 'register' : ( usertools.register_user, RegisterForm, 'olaf/register.html', {}, 'index', {", "[ 'message' ] = \"Link expired, try getting a new", "() <= tk.expiration_time ): if ( tk.user.is_active ): request.session [", "( reverse ( 'olaf:scoreboard' ) ) else: return HttpResponseRedirect (", "else: args = {} message = request.session.pop ( 'message', default", "fail_args [ 'message' ] = message fail_args [ 'form' ]", "usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index', { 'message' : \"Activation email", "a.minute ) + \":\" + str ( a.second ) args", "True ) ] args [ 'lst' ] = lst if", "game_id else: request.session.pop ( 'game_id', default = None ) f", "( 'index' ) ) def scoreboard ( request ): if", "[ 'form' ] = form return render ( request, fail_template,", "'olaf/reset_password.html', {}, 'olaf:login', { 'message' : \"Password successfully changed, you", "( form.is_valid () ): func ( request, form, *args )", "if ( message is not None ): fail_args [ 'message'", "'olaf:user_profile', args = (username, ) ) ) else: args =", "] = \"Account already active\" return HttpResponseRedirect ( reverse (", "timezone from olaf.models import * from olaf.forms import * from", "), f ( game.creation_time )] for game in request.user.userdata.game_history.filter (", "from django.http import HttpResponseRedirect, HttpResponse from django.utils import timezone from", "( '-creation_time' ) ) if ( request.session.get ( 'game_id' )", "= usertools.get_translated_game_board ( request ) else: args [ 'game_board' ]", "'message' : \"An activation email has been sent to you\"", "= None ) f = lambda a : str (", "def index ( request ): args = {} message =", ") if ( message is not None ): args [", "= list ([str ( game.id ), f ( game.creation_time )]", "'-creation_time' ) ) if ( request.session.get ( 'game_id' ) is", "def form_operation ( request, oper, *args ): func, FORM, fail_template,", "else: form = FORM () message = request.session.pop ( 'message',", "if ( request.user.is_authenticated ): args [ 'logged_in' ] = True", "request, token ): if ( request.user.is_authenticated ): return HttpResponseRedirect (", "request.user.userdata.game_history.filter ( result = 0 ).order_by ( '-creation_time' ) )", "'game_id' ) is not None ): args [ 'game_board' ]", "'index' ) ) return form_operation ( request, 'register' ) def", "= True ) ] args [ 'lst' ] = lst", "a : str ( a.date () ) + \" -", "'olaf:login' ) ) else: request.session [ 'message' ] = \"Link", "success_args = form_operation_dict [ oper ] if ( request.method ==", "list ([str ( game.id ), f ( game.creation_time )] for", "tk.user userdata.is_active = True userdata.save () request.session [ 'message' ]", "link will be sent to your email\"} ), 'reset_password' :", "( reverse ( 'index' ) ) return form_operation ( request,", "): username = request.POST.get ( 'username' ) user = User.objects.filter", "from django.urls import reverse from django.http import HttpResponseRedirect, HttpResponse from", "link\" return HttpResponseRedirect ( reverse ( 'index' ) ) else:", "+ \":\" + str ( a.minute ) + \":\" +", "is not None ): args [ 'message' ] = message", "HttpResponse from django.utils import timezone from olaf.models import * from", "user.wins, user.loses, user.ties) for user in UserData.objects.filter ( is_active =", ": str ( a.date () ) + \" - \"", "changed, you can login now\" } ), 'resend_activation_email' : (", "request.POST.get ( 'username' ) user = User.objects.filter ( username =", "you can login now\" } ), 'resend_activation_email' : ( usertools.resend_activation_email,", ").first () if ( tk is None ): request.session [", "): return HttpResponseRedirect ( reverse ( 'index' ) ) return", "'register_form' ] = RegisterForm () args [ 'score' ] =", "your email\"} ), 'reset_password' : ( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {},", "HttpResponseRedirect ( reverse ( 'index' ) ) def scoreboard (", "[ 'score' ] = list ( [user.master.username, user.wins, user.loses, user.ties]", "fail_args [ 'form' ] = form return render ( request,", ") form_operation_dict = { 'login' : ( usertools.login_user, LoginForm, 'olaf/login.html',", "] = \"User not found\" return HttpResponseRedirect ( reverse (", "successfully changed, you can login now\" } ), 'resend_activation_email' :", "'logged_in' ] = True return render ( request, 'olaf/scoreboard.html', args", "for key in success_args: request.session [ key ] = success_args", "'olaf/resend_activation_email.html', {}, 'index', { 'message' : \"Activation email successfully sent", "[ 'message' ] = \"Goodbye :)\" return HttpResponseRedirect ( reverse", "== 'POST' ): if ( request.POST.get ( 'game_id' ) is", "game.creation_time )] for game in request.user.userdata.game_history.filter ( result = 0", "reverse ( success_url ) ) else: form = FORM ()", "} ), 'password_reset_request' : ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index',", "), } def form_operation ( request, oper, *args ): func,", "() ): func ( request, form, *args ) for key", "args [ 'game_board' ] = None return render ( request,", "'index', { 'message' : \"You're logged in. :)\"} ), 'register'", "email\" } ), } def form_operation ( request, oper, *args", "def scoreboard ( request ): if ( request.method == 'POST'", "request ) request.session [ 'message' ] = \"Goodbye :)\" return", "] = \"Link expired, try getting a new one\" return", "( a.hour ) + \":\" + str ( a.minute )", "user.ties) for user in UserData.objects.filter ( is_active = True )", "* from olaf.utility import usertools from olaf.chess.controller import proccess_move def", "): args [ 'message' ] = message lst = [", "'message' ] = \"User not found\" return HttpResponseRedirect ( reverse", "== '-1' ): game_id = usertools.new_game ( request ) request.session", "( reverse ( 'index' ) ) else: userdata = tk.user", ") request.session [ 'game_id' ] = game_id else: request.session.pop (", "( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index', { 'message' : \"Activation", "( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login', { 'message' : \"Password", "] = True return render ( request, 'olaf/scoreboard.html', args )", "been sent to you\" } ), 'password_reset_request' : ( usertools.init_pass_reset_token,", ": \"Password successfully changed, you can login now\" } ),", "return HttpResponseRedirect ( reverse ( success_url ) ) else: form", "user.loses, user.ties] for user in UserData.objects.filter ( is_active = True", "( timezone.now () <= tk.expiration_time ): return form_operation ( request,", "request.session [ 'game_id' ] = game_id else: request.session.pop ( 'game_id',", "args [ 'score' ] = list ( [user.master.username, user.wins, user.loses,", "result = 0 ).order_by ( '-creation_time' ) ) if (", "( request ) return HttpResponseRedirect ( reverse ( 'index' )", "user = User.objects.filter ( username = username ).first () if", "return render ( request, fail_template, fail_args ) #view functions def", "user is None ): request.session [ 'message' ] = \"User", "( request ) request.session [ 'game_id' ] = game_id else:", "( 'message', default = None ) if ( message is", "str ( a.date () ) + \" - \" +", "success_args [ key ] return HttpResponseRedirect ( reverse ( success_url", "request.session.get ( 'game_id' ) is not None ): args [", "the password reset link will be sent to your email\"}", "default = None ) f = lambda a : str", ") def logout_user ( request ): usertools.logout_user ( request )", "= list ( [user.master.username, user.wins, user.loses, user.ties] for user in", "{ 'message' : \"Password successfully changed, you can login now\"", "None return render ( request, 'olaf/index_logged_in.html', args ) else: args", "else: request.session [ 'message' ] = \"Link expired, try getting", "( 'index' ) ) tk = ExpirableTokenField.objects.filter ( token =", "= token ).first () if ( tk is None ):", "django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.shortcuts import", "request, form, *args ) for key in success_args: request.session [", "PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login', { 'message' : \"Password successfully changed,", "[ 'game_board' ] = usertools.get_translated_game_board ( request ) else: args", "0 ).order_by ( '-creation_time' ) ) if ( request.session.get (", "( request.method == 'POST' ): form = FORM ( request.POST", "( request.user.is_authenticated ): return HttpResponseRedirect ( reverse ( 'index' )", "( request, token ): if ( request.user.is_authenticated ): return HttpResponseRedirect", "one\" return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) ) def", ") ) return form_operation ( request, 'login' ) def register_user", "success_url, success_args = form_operation_dict [ oper ] if ( request.method", "): game_id = usertools.new_game ( request ) request.session [ 'game_id'", "in request.user.userdata.game_history.filter ( result = 0 ).order_by ( '-creation_time' )", "list ( [user.master.username, user.wins, user.loses, user.ties] for user in UserData.objects.filter", ": ( usertools.register_user, RegisterForm, 'olaf/register.html', {}, 'index', { 'message' :", "one\" return HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) ) def", "( 'game_id', default = None ) f = lambda a", "username = username ).first () if ( user is None", "timezone.now () <= tk.expiration_time ): return form_operation ( request, 'reset_password',", "form = FORM ( request.POST ) if ( form.is_valid ()", "'game_id' ) is not None ): game_id = request.POST.get (", "RegisterForm () args [ 'score' ] = list ( [user.master.username,", "HttpResponseRedirect ( reverse ( 'index' ) ) else: userdata =", "HttpResponseRedirect ( reverse ( 'olaf:user_profile', args = (username, ) )", "None ) if ( message is not None ): args", "tk.expiration_time ): if ( tk.user.is_active ): request.session [ 'message' ]", "is not None ): game_id = request.POST.get ( 'game_id' )", "reverse ( 'index' ) ) def scoreboard ( request ):", "= 0 ).order_by ( '-creation_time' ) ) if ( request.session.get", "] = \"Your account has been activated successfully\" return HttpResponseRedirect", "return form_operation ( request, 'register' ) def password_reset_request ( request", "is_active = True ) ) return render ( request, 'olaf/index_not_logged_in.html',", "() args [ 'register_form' ] = RegisterForm () args [", "game_id = request.POST.get ( 'game_id' ) if ( game_id ==", "= LoginForm () args [ 'register_form' ] = RegisterForm ()", "() request.session [ 'message' ] = \"Your account has been", "reverse ( 'index' ) ) else: userdata = tk.user userdata.is_active", "[ 'message' ] = message lst = [ (user.master.username, user.wins,", "a.date () ) + \" - \" + str (", ") + \":\" + str ( a.second ) args [", ": ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index', { 'message' :", "'olaf/index_not_logged_in.html', args ) form_operation_dict = { 'login' : ( usertools.login_user,", "'index', { 'message' : \"An email containing the password reset", "} def form_operation ( request, oper, *args ): func, FORM,", "( 'olaf:user_profile', args = (username, ) ) ) else: args", "reverse ( 'olaf:reset_password' ) ) def activate_account ( request, token", "return render ( request, 'olaf/index_logged_in.html', args ) else: args [", "request, oper, *args ): func, FORM, fail_template, fail_args, success_url, success_args", "form_operation ( request, 'login' ) def register_user ( request ):", "None ) f = lambda a : str ( a.date", "lst if ( request.user.is_authenticated ): args [ 'logged_in' ] =", ") else: if ( timezone.now () <= tk.expiration_time ): return", "'POST' ): username = request.POST.get ( 'username' ) user =", "token ): if ( request.user.is_authenticated ): return HttpResponseRedirect ( reverse", "'olaf/register.html', {}, 'index', { 'message' : \"An activation email has", "'olaf/login.html', {}, 'index', { 'message' : \"You're logged in. :)\"}", "\"Goodbye :)\" return HttpResponseRedirect ( reverse ( 'index' ) )", "= message fail_args [ 'form' ] = form return render", "not found\" return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) )", "not None ): game_id = request.POST.get ( 'game_id' ) if", "None ): game_id = request.POST.get ( 'game_id' ) if (", "return HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) ) def activate_account", "= \"Link expired, try getting a new one\" return HttpResponseRedirect", "'login_form' ] = LoginForm () args [ 'register_form' ] =", "( reverse ( 'index' ) ) tk = ExpirableTokenField.objects.filter (", "] = lst if ( request.user.is_authenticated ): args [ 'logged_in'", "None ): args [ 'game_board' ] = usertools.get_translated_game_board ( request", "for game in request.user.userdata.game_history.filter ( result = 0 ).order_by (", "request ): if ( request.user.is_authenticated ): return HttpResponseRedirect ( reverse", "( user is None ): request.session [ 'message' ] =", "[ 'message' ] = \"User not found\" return HttpResponseRedirect (", "tk = ExpirableTokenField.objects.filter ( token = token ).first () if", "request.session [ 'message' ] = \"Your account has been activated", "args [ 'lst' ] = lst if ( request.user.is_authenticated ):", "str ( a.second ) args [ 'game_list' ] = list", "args [ 'game_list' ] = list ([str ( game.id ),", ": \"An email containing the password reset link will be", "reverse ( 'index' ) ) else: if ( timezone.now ()", ") ) else: userdata = tk.user userdata.is_active = True userdata.save", "] = game_id else: request.session.pop ( 'game_id', default = None", "message = request.session.pop ( 'message', default = None ) if", "'index', { 'message' : \"Activation email successfully sent to your", "None ): args [ 'message' ] = message lst =", "return HttpResponseRedirect ( reverse ( 'index' ) ) else: userdata", "proccess_move ( request ) return HttpResponseRedirect ( reverse ( 'index'", "activate_account ( request, token ): if ( request.user.is_authenticated ): return", "in UserData.objects.filter ( is_active = True ) ) return render", "args [ 'game_board' ] = usertools.get_translated_game_board ( request ) else:", "'olaf:login', { 'message' : \"Password successfully changed, you can login", "\"Link expired, try getting a new one\" return HttpResponseRedirect (", "'reset_password' : ( usertools.reset_password_action, PasswordChangeForm, 'olaf/reset_password.html', {}, 'olaf:login', { 'message'", "= \"Account already active\" return HttpResponseRedirect ( reverse ( 'index'", "( result = 0 ).order_by ( '-creation_time' ) ) if", "if ( timezone.now () <= tk.expiration_time ): if ( tk.user.is_active", "if ( game_id == '-1' ): game_id = usertools.new_game (", "usertools.get_translated_game_board ( request ) else: args [ 'game_board' ] =", "True return render ( request, 'olaf/scoreboard.html', args ) def move", "( success_url ) ) else: form = FORM () message", "reverse ( 'index' ) ) tk = ExpirableTokenField.objects.filter ( token", "'POST' ): if ( request.POST.get ( 'game_id' ) is not", "if ( request.user.is_authenticated ): if ( request.method == 'POST' ):", "{ 'message' : \"An email containing the password reset link", "user.ties] for user in UserData.objects.filter ( is_active = True )", "( 'game_id' ) if ( game_id == '-1' ): game_id", "can login now\" } ), 'resend_activation_email' : ( usertools.resend_activation_email, ResendActivationUsernameOrEmailForm,", "'index' ) ) return form_operation ( request, 'resend_activation_email' ) def", "'game_id', default = None ) f = lambda a :", "import render from django.urls import reverse from django.http import HttpResponseRedirect,", "'olaf:reset_password' ) ) def activate_account ( request, token ): if", ") ) return form_operation ( request, 'resend_activation_email' ) def logout_user", "olaf.forms import * from olaf.utility import usertools from olaf.chess.controller import", "if ( request.POST.get ( 'game_id' ) is not None ):", "if ( request.method == 'POST' ): if ( request.POST.get (", "= { 'login' : ( usertools.login_user, LoginForm, 'olaf/login.html', {}, 'index',", "logged in. :)\"} ), 'register' : ( usertools.register_user, RegisterForm, 'olaf/register.html',", "args [ 'message' ] = message if ( request.user.is_authenticated ):", "django.http import HttpResponseRedirect, HttpResponse from django.utils import timezone from olaf.models", "\"Activation email successfully sent to your email\" } ), }", "] = message if ( request.user.is_authenticated ): if ( request.method", "request.method == 'POST' ): form = FORM ( request.POST )", "form_operation_dict [ oper ] if ( request.method == 'POST' ):", "= None return render ( request, 'olaf/index_logged_in.html', args ) else:", ") ) else: request.session [ 'message' ] = \"Link expired,", "func, FORM, fail_template, fail_args, success_url, success_args = form_operation_dict [ oper", "form.is_valid () ): func ( request, form, *args ) for", "args = (username, ) ) ) else: args = {}", "'login' : ( usertools.login_user, LoginForm, 'olaf/login.html', {}, 'index', { 'message'", "logout_user ( request ): usertools.logout_user ( request ) request.session [", "request, 'olaf/index_logged_in.html', args ) else: args [ 'login_form' ] =", "return render ( request, 'olaf/scoreboard.html', args ) def move (", "= {} message = request.session.pop ( 'message', default = None", ") else: args [ 'game_board' ] = None return render", "userdata = tk.user userdata.is_active = True userdata.save () request.session [", "True userdata.save () request.session [ 'message' ] = \"Your account", "ResendActivationUsernameOrEmailForm, 'olaf/resend_activation_email.html', {}, 'index', { 'message' : \"Activation email successfully", "reverse ( 'index' ) ) return form_operation ( request, 'login'", "{ 'message' : \"You're logged in. :)\"} ), 'register' :", "'register' : ( usertools.register_user, RegisterForm, 'olaf/register.html', {}, 'index', { 'message'", "'message' : \"Activation email successfully sent to your email\" }", "usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index', { 'message' : \"An email", "key ] return HttpResponseRedirect ( reverse ( success_url ) )", ") is not None ): game_id = request.POST.get ( 'game_id'", "\"An email containing the password reset link will be sent", ") ) tk = ExpirableTokenField.objects.filter ( token = token ).first", "import usertools from olaf.chess.controller import proccess_move def index ( request", "return HttpResponseRedirect ( reverse ( 'index' ) ) else: if", ") ) def scoreboard ( request ): if ( request.method", ") def register_user ( request ): if ( request.user.is_authenticated ):", "if ( form.is_valid () ): func ( request, form, *args", "has been sent to you\" } ), 'password_reset_request' : (", "] args [ 'lst' ] = lst if ( request.user.is_authenticated", ") ) else: args = {} message = request.session.pop (", "django.shortcuts import render from django.urls import reverse from django.http import", "'olaf/password_reset_request.html', {}, 'index', { 'message' : \"An email containing the", ") if ( request.session.get ( 'game_id' ) is not None", "[user.master.username, user.wins, user.loses, user.ties] for user in UserData.objects.filter ( is_active", "'game_board' ] = None return render ( request, 'olaf/index_logged_in.html', args", "HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) ) else: return HttpResponseRedirect", "( request.method == 'POST' ): username = request.POST.get ( 'username'", "active\" return HttpResponseRedirect ( reverse ( 'index' ) ) else:", ") else: return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args =", "default = None ) if ( message is not None", "else: request.session.pop ( 'game_id', default = None ) f =", "is not None ): fail_args [ 'message' ] = message", "'message', default = None ) if ( message is not", "for user in UserData.objects.filter ( is_active = True ) )", "reverse ( 'olaf:resend_activation_email' ) ) def resend_activation_email ( request ):", "= request.session.pop ( 'message', default = None ) if (", "tk.user.is_active ): request.session [ 'message' ] = \"Account already active\"", "timezone.now () <= tk.expiration_time ): if ( tk.user.is_active ): request.session", "functions def login_user ( request ): if ( request.user.is_authenticated ):", "HttpResponseRedirect ( reverse ( 'olaf:login' ) ) else: request.session [", "user in UserData.objects.filter ( is_active = True ) ) return", ") for key in success_args: request.session [ key ] =", "a new one\" return HttpResponseRedirect ( reverse ( 'olaf:reset_password' )", "'message' ] = \"Goodbye :)\" return HttpResponseRedirect ( reverse (", "[ 'message' ] = message if ( request.user.is_authenticated ): if", "else: if ( timezone.now () <= tk.expiration_time ): if (", "( reverse ( 'olaf:user_profile', args = (username, ) ) )", "args ) else: args [ 'login_form' ] = LoginForm ()", "'message' ] = \"Your account has been activated successfully\" return", "( request ): proccess_move ( request ) return HttpResponseRedirect (", ") else: if ( timezone.now () <= tk.expiration_time ): if", "user in UserData.objects.filter ( is_active = True ) ] args", "request ): args = {} message = request.session.pop ( 'message',", "( 'username' ) user = User.objects.filter ( username = username", "'index' ) ) def scoreboard ( request ): if (", "'message' ] = \"Broken link\" return HttpResponseRedirect ( reverse (", "'register' ) def password_reset_request ( request ): if ( request.user.is_authenticated", "django.contrib.auth.models import User from django.shortcuts import render from django.urls import", "args [ 'message' ] = message lst = [ (user.master.username,", "return render ( request, 'olaf/index_not_logged_in.html', args ) form_operation_dict = {", "sent to you\" } ), 'password_reset_request' : ( usertools.init_pass_reset_token, ForgotPasswordUsernameOrEmailForm,", "( token = token ).first () if ( tk is", "= ExpirableTokenField.objects.filter ( token = token ).first () if (", "'index' ) ) tk = ExpirableTokenField.objects.filter ( token = token", ") ) if ( request.session.get ( 'game_id' ) is not", "new one\" return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) )", "): request.session [ 'message' ] = \"User not found\" return", "( reverse ( 'olaf:reset_password' ) ) def activate_account ( request,", "( 'game_id' ) is not None ): args [ 'game_board'", "else: if ( timezone.now () <= tk.expiration_time ): return form_operation", "= None ) if ( message is not None ):", "django.urls import reverse from django.http import HttpResponseRedirect, HttpResponse from django.utils", "fail_template, fail_args, success_url, success_args = form_operation_dict [ oper ] if", "] return HttpResponseRedirect ( reverse ( success_url ) ) else:", "ForgotPasswordUsernameOrEmailForm, 'olaf/password_reset_request.html', {}, 'index', { 'message' : \"An email containing", "request, 'olaf/scoreboard.html', args ) def move ( request ): proccess_move", ") def resend_activation_email ( request ): if ( request.user.is_authenticated ):", "'game_id' ] = game_id else: request.session.pop ( 'game_id', default =", "( request, 'reset_password', token ) else: request.session [ 'message' ]", "form_operation_dict = { 'login' : ( usertools.login_user, LoginForm, 'olaf/login.html', {},", "reverse ( 'olaf:login' ) ) else: request.session [ 'message' ]", "request.session.pop ( 'game_id', default = None ) f = lambda", "'POST' ): form = FORM ( request.POST ) if (", "): if ( request.method == 'POST' ): username = request.POST.get", "HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) ) def activate_account (", "HttpResponseRedirect ( reverse ( 'index' ) ) else: if (" ]
[ "import nn, optim from torch.nn import functional as F from", "Example') parser.add_argument('--batch-size', type=int, default=128, metavar='N', help='input batch size for training", "= torch.device(\"cuda\" if args.cuda else \"cpu\") writer = SummaryWriter() kwargs", "transforms from torchvision.utils import save_image from torch.utils.tensorboard import SummaryWriter from", "= not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) device = torch.device(\"cuda\" if", "transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size", "import matplotlib.pyplot as plt from torch import nn, optim from", "parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches to wait before", "dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size * 10 ) cevae = MinVae(", "input_size=28 * 28, output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device) trainer =", "type=int, default=10, metavar='N', help='how many batches to wait before logging", "training (default: 128)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs", "type=int, default=128, metavar='N', help='input batch size for training (default: 128)')", "metavar='N', help='how many batches to wait before logging training status')", "writer = SummaryWriter() kwargs = {'num_workers': 1, 'pin_memory': True} if", "import torch import torch.utils.data import matplotlib.pyplot as plt from torch", "from torchvision import datasets, transforms from torchvision.utils import save_image from", "download=True, transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()),", "type=int, default=10, metavar='N', help='number of epochs to train (default: 10)')", "seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches", "128)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs to train", "device = torch.device(\"cuda\" if args.cuda else \"cpu\") writer = SummaryWriter()", "1, 'pin_memory': True} if args.cuda else {} train_sampler = SamplerDatasetWithReplacement(", "as plt from torch import nn, optim from torch.nn import", "metavar='N', help='number of epochs to train (default: 10)') parser.add_argument('--no-cuda', action='store_true',", "from ce_vae_test.trainer.ce_trainer import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser =", "type=int, default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10,", "torchvision.utils import save_image from torch.utils.tensorboard import SummaryWriter from ce_vae_test.networks.min_vae import", "hidden_sizes_dec=[5], device=device ).to(device) trainer = CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler,", "torch.nn import functional as F from torchvision import datasets, transforms", "MNIST Example') parser.add_argument('--batch-size', type=int, default=128, metavar='N', help='input batch size for", "parser = argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size', type=int, default=128, metavar='N', help='input", "= {'num_workers': 1, 'pin_memory': True} if args.cuda else {} train_sampler", "train (default: 10)') parser.add_argument('--no-cuda', action='store_true', default=False, help='enables CUDA training') parser.add_argument('--seed',", "= SummaryWriter() kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda", "help='how many batches to wait before logging training status') args", "metavar='N', help='input batch size for training (default: 128)') parser.add_argument('--epochs', type=int,", "torch.utils.data import matplotlib.pyplot as plt from torch import nn, optim", "True} if args.cuda else {} train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True,", "transform=transforms.ToTensor()), batch_size=args.batch_size * 10 ) cevae = MinVae( input_size=28 *", "batch_size=args.batch_size ) test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size *", "SummaryWriter() kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else", "dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data',", "default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N',", "torch.device(\"cuda\" if args.cuda else \"cpu\") writer = SummaryWriter() kwargs =", "torch import nn, optim from torch.nn import functional as F", "args = parser.parse_args() args.cuda = not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed)", "F from torchvision import datasets, transforms from torchvision.utils import save_image", "from torchvision.utils import save_image from torch.utils.tensorboard import SummaryWriter from ce_vae_test.networks.min_vae", "from torch.utils.tensorboard import SummaryWriter from ce_vae_test.networks.min_vae import MinVae from ce_vae_test.trainer.ce_trainer", "args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) device = torch.device(\"cuda\" if args.cuda else", "else \"cpu\") writer = SummaryWriter() kwargs = {'num_workers': 1, 'pin_memory':", "argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size', type=int, default=128, metavar='N', help='input batch size", "default=10, metavar='N', help='number of epochs to train (default: 10)') parser.add_argument('--no-cuda',", "device=device ).to(device) trainer = CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler, writer=writer,", "28, output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device) trainer = CeVaeTrainer( vae=cevae,", "datasets, transforms from torchvision.utils import save_image from torch.utils.tensorboard import SummaryWriter", "if args.cuda else \"cpu\") writer = SummaryWriter() kwargs = {'num_workers':", "train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False,", "CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler, writer=writer, device=device, alpha=0.90, lamda=0.22 )", "to wait before logging training status') args = parser.parse_args() args.cuda", "{'num_workers': 1, 'pin_memory': True} if args.cuda else {} train_sampler =", "plt from torch import nn, optim from torch.nn import functional", "many batches to wait before logging training status') args =", "= argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size', type=int, default=128, metavar='N', help='input batch", "not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) device = torch.device(\"cuda\" if args.cuda", "latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device) trainer = CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler,", "args.cuda else \"cpu\") writer = SummaryWriter() kwargs = {'num_workers': 1,", "before logging training status') args = parser.parse_args() args.cuda = not", "{} train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size )", "ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size', type=int,", "* 10 ) cevae = MinVae( input_size=28 * 28, output_size=10,", "import functional as F from torchvision import datasets, transforms from", "import save_image from torch.utils.tensorboard import SummaryWriter from ce_vae_test.networks.min_vae import MinVae", "metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how", "(default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches to", "= MinVae( input_size=28 * 28, output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device)", "from __future__ import print_function import argparse import torch import torch.utils.data", "matplotlib.pyplot as plt from torch import nn, optim from torch.nn", "parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int,", "if args.cuda else {} train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True,", "parser.add_argument('--batch-size', type=int, default=128, metavar='N', help='input batch size for training (default:", "torch.manual_seed(args.seed) device = torch.device(\"cuda\" if args.cuda else \"cpu\") writer =", "* 28, output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device) trainer = CeVaeTrainer(", "else {} train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size", "default=128, metavar='N', help='input batch size for training (default: 128)') parser.add_argument('--epochs',", "import argparse import torch import torch.utils.data import matplotlib.pyplot as plt", "import SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size', type=int, default=128,", "<reponame>fgitmichael/SelfSupevisedSkillDiscovery<gh_stars>0 from __future__ import print_function import argparse import torch import", "from torch.nn import functional as F from torchvision import datasets,", "import print_function import argparse import torch import torch.utils.data import matplotlib.pyplot", "= SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size * 10 ) cevae", "SummaryWriter from ce_vae_test.networks.min_vae import MinVae from ce_vae_test.trainer.ce_trainer import CeVaeTrainer from", "= SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler =", "torch import torch.utils.data import matplotlib.pyplot as plt from torch import", "size for training (default: 128)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number", "for training (default: 128)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of", "argparse import torch import torch.utils.data import matplotlib.pyplot as plt from", "save_image from torch.utils.tensorboard import SummaryWriter from ce_vae_test.networks.min_vae import MinVae from", "SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler = SamplerDatasetWithReplacement(", "SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size * 10 ) cevae =", "functional as F from torchvision import datasets, transforms from torchvision.utils", "import SummaryWriter from ce_vae_test.networks.min_vae import MinVae from ce_vae_test.trainer.ce_trainer import CeVaeTrainer", "10)') parser.add_argument('--no-cuda', action='store_true', default=False, help='enables CUDA training') parser.add_argument('--seed', type=int, default=1,", "as F from torchvision import datasets, transforms from torchvision.utils import", "torch.utils.tensorboard import SummaryWriter from ce_vae_test.networks.min_vae import MinVae from ce_vae_test.trainer.ce_trainer import", "torch.cuda.is_available() torch.manual_seed(args.seed) device = torch.device(\"cuda\" if args.cuda else \"cpu\") writer", "10 ) cevae = MinVae( input_size=28 * 28, output_size=10, latent_dim=2,", "MinVae( input_size=28 * 28, output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device) trainer", "args.cuda = not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) device = torch.device(\"cuda\"", "torchvision import datasets, transforms from torchvision.utils import save_image from torch.utils.tensorboard", "(default: 128)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs to", "kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}", "print_function import argparse import torch import torch.utils.data import matplotlib.pyplot as", "cevae = MinVae( input_size=28 * 28, output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device", "optim from torch.nn import functional as F from torchvision import", ") cevae = MinVae( input_size=28 * 28, output_size=10, latent_dim=2, hidden_sizes_dec=[5],", "trainer = CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler, writer=writer, device=device, alpha=0.90,", "CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE MNIST Example')", "train=False, transform=transforms.ToTensor()), batch_size=args.batch_size * 10 ) cevae = MinVae( input_size=28", "SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size', type=int, default=128, metavar='N',", "status') args = parser.parse_args() args.cuda = not args.no_cuda and torch.cuda.is_available()", "to train (default: 10)') parser.add_argument('--no-cuda', action='store_true', default=False, help='enables CUDA training')", "default=False, help='enables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed", "epochs to train (default: 10)') parser.add_argument('--no-cuda', action='store_true', default=False, help='enables CUDA", "and torch.cuda.is_available() torch.manual_seed(args.seed) device = torch.device(\"cuda\" if args.cuda else \"cpu\")", "output_size=10, latent_dim=2, hidden_sizes_dec=[5], device=device ).to(device) trainer = CeVaeTrainer( vae=cevae, num_epochs=300,", "= CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler, writer=writer, device=device, alpha=0.90, lamda=0.22", "parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs to train (default:", "import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE MNIST", "import MinVae from ce_vae_test.trainer.ce_trainer import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement", ").to(device) trainer = CeVaeTrainer( vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler, writer=writer, device=device,", "= parser.parse_args() args.cuda = not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) device", "args.cuda else {} train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()),", "training status') args = parser.parse_args() args.cuda = not args.no_cuda and", "train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=True, download=True, transform=transforms.ToTensor()), batch_size=args.batch_size ) test_sampler", "from ce_vae_test.networks.min_vae import MinVae from ce_vae_test.trainer.ce_trainer import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler", "batch_size=args.batch_size * 10 ) cevae = MinVae( input_size=28 * 28,", "help='input batch size for training (default: 128)') parser.add_argument('--epochs', type=int, default=10,", "action='store_true', default=False, help='enables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random", "test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size * 10 )", "help='enables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default:", "from torch import nn, optim from torch.nn import functional as", "vae=cevae, num_epochs=300, train_loader=train_sampler, test_loader=test_sampler, writer=writer, device=device, alpha=0.90, lamda=0.22 ) trainer.run()", "parser.parse_args() args.cuda = not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) device =", "logging training status') args = parser.parse_args() args.cuda = not args.no_cuda", "nn, optim from torch.nn import functional as F from torchvision", "'pin_memory': True} if args.cuda else {} train_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data',", "batch size for training (default: 128)') parser.add_argument('--epochs', type=int, default=10, metavar='N',", ") test_sampler = SamplerDatasetWithReplacement( dataset=datasets.MNIST('../data', train=False, transform=transforms.ToTensor()), batch_size=args.batch_size * 10", "import torch.utils.data import matplotlib.pyplot as plt from torch import nn,", "wait before logging training status') args = parser.parse_args() args.cuda =", "batches to wait before logging training status') args = parser.parse_args()", "(default: 10)') parser.add_argument('--no-cuda', action='store_true', default=False, help='enables CUDA training') parser.add_argument('--seed', type=int,", "CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)')", "training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval',", "MinVae from ce_vae_test.trainer.ce_trainer import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser", "of epochs to train (default: 10)') parser.add_argument('--no-cuda', action='store_true', default=False, help='enables", "__future__ import print_function import argparse import torch import torch.utils.data import", "from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE MNIST Example') parser.add_argument('--batch-size',", "ce_vae_test.trainer.ce_trainer import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement parser = argparse.ArgumentParser(description='VAE", "\"cpu\") writer = SummaryWriter() kwargs = {'num_workers': 1, 'pin_memory': True}", "parser.add_argument('--no-cuda', action='store_true', default=False, help='enables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S',", "1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches to wait", "ce_vae_test.networks.min_vae import MinVae from ce_vae_test.trainer.ce_trainer import CeVaeTrainer from ce_vae_test.sampler.dataset_sampler import", "help='number of epochs to train (default: 10)') parser.add_argument('--no-cuda', action='store_true', default=False,", "help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many", "import datasets, transforms from torchvision.utils import save_image from torch.utils.tensorboard import", "default=10, metavar='N', help='how many batches to wait before logging training" ]
[ "name = 'logout' help_message = \"logout\" def __init__(self, options): super(LogoutCmd,", "self.package = \"%s/%s\" % (namespace, pname) elif namespace: self.package =", "creds only logout for them\") def _call(self): client = self.RegistryClient(self.registry_host)", "% self.registry_host def _render_dict(self): return {\"status\": self.status, 'host': self.registry_host, \"scope\":", "@classmethod def _add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None, action=PackageSplit, help=\"registry", "self.registry_host, \"scope\": self.package} def _render_console(self): return \" >>> %s\" %", "% (namespace, pname) elif namespace: self.package = namespace @classmethod def", "quay.io[/namespace][/repo]\\n\" + \"If namespace and/or repo are passed, creds only", "self.package_parts.get('package', None) namespace = self.package_parts.get('namespace', None) self.package = None if", "namespace = self.package_parts.get('namespace', None) self.package = None if pname: self.package", "options.package_parts pname = self.package_parts.get('package', None) namespace = self.package_parts.get('namespace', None) self.package", "__future__ import absolute_import, division, print_function from appr.auth import ApprAuth from", "= options.package_parts pname = self.package_parts.get('package', None) namespace = self.package_parts.get('namespace', None)", "complete\" if self.registry_host != '*': self.status += \" from %s\"", "namespace: self.package = namespace @classmethod def _add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry',", "self).__init__(options) self.status = None self.registry_host = options.registry_host self.package_parts = options.package_parts", "parser.add_argument('registry', nargs='?', default=None, action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\" + \"If namespace", "passed, creds only logout for them\") def _call(self): client =", "+ \"If namespace and/or repo are passed, creds only logout", "self.status = \"Logout complete\" if self.registry_host != '*': self.status +=", "_call(self): client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status = \"Logout complete\"", "self.status, 'host': self.registry_host, \"scope\": self.package} def _render_console(self): return \" >>>", "self.status += \" from %s\" % self.registry_host def _render_dict(self): return", "_render_dict(self): return {\"status\": self.status, 'host': self.registry_host, \"scope\": self.package} def _render_console(self):", "super(LogoutCmd, self).__init__(options) self.status = None self.registry_host = options.registry_host self.package_parts =", "__init__(self, options): super(LogoutCmd, self).__init__(options) self.status = None self.registry_host = options.registry_host", "url: quay.io[/namespace][/repo]\\n\" + \"If namespace and/or repo are passed, creds", "= 'logout' help_message = \"logout\" def __init__(self, options): super(LogoutCmd, self).__init__(options)", "def __init__(self, options): super(LogoutCmd, self).__init__(options) self.status = None self.registry_host =", "= \"Logout complete\" if self.registry_host != '*': self.status += \"", "are passed, creds only logout for them\") def _call(self): client", "for them\") def _call(self): client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status", "from appr.commands.command_base import CommandBase, PackageSplit class LogoutCmd(CommandBase): name = 'logout'", "print_function from appr.auth import ApprAuth from appr.commands.command_base import CommandBase, PackageSplit", "pname = self.package_parts.get('package', None) namespace = self.package_parts.get('namespace', None) self.package =", "self.package = None if pname: self.package = \"%s/%s\" % (namespace,", "namespace and/or repo are passed, creds only logout for them\")", "action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\" + \"If namespace and/or repo are", "CommandBase, PackageSplit class LogoutCmd(CommandBase): name = 'logout' help_message = \"logout\"", "help_message = \"logout\" def __init__(self, options): super(LogoutCmd, self).__init__(options) self.status =", "help=\"registry url: quay.io[/namespace][/repo]\\n\" + \"If namespace and/or repo are passed,", "PackageSplit class LogoutCmd(CommandBase): name = 'logout' help_message = \"logout\" def", "cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None, action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\" + \"If", "_add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None, action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\"", "= None self.registry_host = options.registry_host self.package_parts = options.package_parts pname =", "namespace @classmethod def _add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None, action=PackageSplit,", "ApprAuth from appr.commands.command_base import CommandBase, PackageSplit class LogoutCmd(CommandBase): name =", "None) namespace = self.package_parts.get('namespace', None) self.package = None if pname:", "ApprAuth().delete_token(client.host, scope=self.package) self.status = \"Logout complete\" if self.registry_host != '*':", "None if pname: self.package = \"%s/%s\" % (namespace, pname) elif", "self.registry_host != '*': self.status += \" from %s\" % self.registry_host", "appr.auth import ApprAuth from appr.commands.command_base import CommandBase, PackageSplit class LogoutCmd(CommandBase):", "logout for them\") def _call(self): client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package)", "+= \" from %s\" % self.registry_host def _render_dict(self): return {\"status\":", "pname: self.package = \"%s/%s\" % (namespace, pname) elif namespace: self.package", "return {\"status\": self.status, 'host': self.registry_host, \"scope\": self.package} def _render_console(self): return", "import absolute_import, division, print_function from appr.auth import ApprAuth from appr.commands.command_base", "appr.commands.command_base import CommandBase, PackageSplit class LogoutCmd(CommandBase): name = 'logout' help_message", "class LogoutCmd(CommandBase): name = 'logout' help_message = \"logout\" def __init__(self,", "if self.registry_host != '*': self.status += \" from %s\" %", "None) self.package = None if pname: self.package = \"%s/%s\" %", "self.status = None self.registry_host = options.registry_host self.package_parts = options.package_parts pname", "scope=self.package) self.status = \"Logout complete\" if self.registry_host != '*': self.status", "client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status = \"Logout complete\" if", "!= '*': self.status += \" from %s\" % self.registry_host def", "options): super(LogoutCmd, self).__init__(options) self.status = None self.registry_host = options.registry_host self.package_parts", "absolute_import, division, print_function from appr.auth import ApprAuth from appr.commands.command_base import", "<filename>appr/commands/logout.py from __future__ import absolute_import, division, print_function from appr.auth import", "self.registry_host def _render_dict(self): return {\"status\": self.status, 'host': self.registry_host, \"scope\": self.package}", "'logout' help_message = \"logout\" def __init__(self, options): super(LogoutCmd, self).__init__(options) self.status", "= options.registry_host self.package_parts = options.package_parts pname = self.package_parts.get('package', None) namespace", "if pname: self.package = \"%s/%s\" % (namespace, pname) elif namespace:", "= self.package_parts.get('namespace', None) self.package = None if pname: self.package =", "{\"status\": self.status, 'host': self.registry_host, \"scope\": self.package} def _render_console(self): return \"", "= namespace @classmethod def _add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None,", "self.registry_host = options.registry_host self.package_parts = options.package_parts pname = self.package_parts.get('package', None)", "from appr.auth import ApprAuth from appr.commands.command_base import CommandBase, PackageSplit class", "= \"logout\" def __init__(self, options): super(LogoutCmd, self).__init__(options) self.status = None", "self.package_parts = options.package_parts pname = self.package_parts.get('package', None) namespace = self.package_parts.get('namespace',", "= self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status = \"Logout complete\" if self.registry_host", "\"If namespace and/or repo are passed, creds only logout for", "only logout for them\") def _call(self): client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host,", "elif namespace: self.package = namespace @classmethod def _add_arguments(cls, parser): cls._add_registryhost_option(parser)", "None self.registry_host = options.registry_host self.package_parts = options.package_parts pname = self.package_parts.get('package',", "default=None, action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\" + \"If namespace and/or repo", "and/or repo are passed, creds only logout for them\") def", "\"scope\": self.package} def _render_console(self): return \" >>> %s\" % self.status", "def _render_dict(self): return {\"status\": self.status, 'host': self.registry_host, \"scope\": self.package} def", "nargs='?', default=None, action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\" + \"If namespace and/or", "= None if pname: self.package = \"%s/%s\" % (namespace, pname)", "\"Logout complete\" if self.registry_host != '*': self.status += \" from", "'*': self.status += \" from %s\" % self.registry_host def _render_dict(self):", "from __future__ import absolute_import, division, print_function from appr.auth import ApprAuth", "\"%s/%s\" % (namespace, pname) elif namespace: self.package = namespace @classmethod", "import ApprAuth from appr.commands.command_base import CommandBase, PackageSplit class LogoutCmd(CommandBase): name", "pname) elif namespace: self.package = namespace @classmethod def _add_arguments(cls, parser):", "import CommandBase, PackageSplit class LogoutCmd(CommandBase): name = 'logout' help_message =", "self.package_parts.get('namespace', None) self.package = None if pname: self.package = \"%s/%s\"", "def _add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None, action=PackageSplit, help=\"registry url:", "division, print_function from appr.auth import ApprAuth from appr.commands.command_base import CommandBase,", "def _call(self): client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status = \"Logout", "= self.package_parts.get('package', None) namespace = self.package_parts.get('namespace', None) self.package = None", "LogoutCmd(CommandBase): name = 'logout' help_message = \"logout\" def __init__(self, options):", "them\") def _call(self): client = self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status =", "%s\" % self.registry_host def _render_dict(self): return {\"status\": self.status, 'host': self.registry_host,", "\" from %s\" % self.registry_host def _render_dict(self): return {\"status\": self.status,", "repo are passed, creds only logout for them\") def _call(self):", "'host': self.registry_host, \"scope\": self.package} def _render_console(self): return \" >>> %s\"", "options.registry_host self.package_parts = options.package_parts pname = self.package_parts.get('package', None) namespace =", "self.package = namespace @classmethod def _add_arguments(cls, parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?',", "(namespace, pname) elif namespace: self.package = namespace @classmethod def _add_arguments(cls,", "from %s\" % self.registry_host def _render_dict(self): return {\"status\": self.status, 'host':", "parser): cls._add_registryhost_option(parser) parser.add_argument('registry', nargs='?', default=None, action=PackageSplit, help=\"registry url: quay.io[/namespace][/repo]\\n\" +", "= \"%s/%s\" % (namespace, pname) elif namespace: self.package = namespace", "\"logout\" def __init__(self, options): super(LogoutCmd, self).__init__(options) self.status = None self.registry_host", "self.RegistryClient(self.registry_host) ApprAuth().delete_token(client.host, scope=self.package) self.status = \"Logout complete\" if self.registry_host !=" ]
[ "<reponame>webnowone/albumMusical<gh_stars>1-10 from django.apps import AppConfig class MusicaConfig(AppConfig): name = 'musica'" ]
[ "f: featureCollection = json.loads(f.read().decode(\"utf-8\")) return featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn a", "and pulling values/files this_dir, this_filename = os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir)", "this_dir, this_filename = os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE", "- time zone computation from latitude/longitude. Ordinarily this is loaded", "possibleTimezones: if len(possibleTimezones) == 1: return possibleTimezones.pop() else: for tzname", "tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0 # By default,", "# numpy arrays rather than tuples, if numpy is installed.", "this during initialization arleady ''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen =", "/ self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet = set(lngTzOptions.keys()) possibleTimezones =", "def __forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions, queryPoint): distances = [] if", "pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys)", "polyIndex in polyIndices: poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d = poly.distance(queryPoint)", "shortcut_lat) degree = minLng while degree <= maxLng: if degree", "this_filename = os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE =", "= read_json return reader(path) def read_json(path): with gzip.open(path, \"rb\") as", "for looking up polygons. Much faster than using an r-tree", "in self.timezoneLongitudeShortcuts.keys(): for tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname])", "in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree +", "self.timezoneLongitudeShortcuts.keys(): for tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def", "tzLats = [] for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]): lngs =", "forceTZ=False): ''' Initializes the tzwhere class. @forceTZ: If you want", "numpy WRAP = numpy.asarray COLLECTION_TYPE = numpy.ndarray except ImportError: WRAP", "set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )]", "try: import numpy WRAP = numpy.asarray COLLECTION_TYPE = numpy.ndarray except", "you need to specify this during initialization arleady ''' featureCollection", "= \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in self.timezoneLongitudeShortcuts.keys(): for tzname in", "map(lambda p: prepared.prep( geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set(", "json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): '''", "list of floats. \"\"\" for feature in featureCollection['features']: tzname =", "degree in self.timezoneLongitudeShortcuts.keys(): for tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\", "feature['properties']['TZID'] if feature['geometry']['type'] == 'Polygon': exterior = feature['geometry']['coordinates'][0] interior =", "timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w') as", "isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname]))", "= minLat while degree <= maxLat: if degree not in", "(math.floor(min(ls) / gridSize) * gridSize) maxLs = (math.floor(max(ls) / gridSize)", "timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def", "maxLs = (math.floor(max(ls) / gridSize) * gridSize) return minLs, maxLs", "\\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in self.timezoneLongitudeShortcuts.keys(): for tzname in self.timezoneLongitudeShortcuts[degree].keys():", "distances = [] if possibleTimezones: if len(possibleTimezones) == 1: return", "can save about 222MB of RAM by turning our polygon", "in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree +", "def feature_collection_polygons(featureCollection): \"\"\"Turn a feature collection into an iterator over", "\\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with", "= \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude, longitude, forceTZ=False): ''' Let's", "tzname][polyIndex] d = poly.distance(queryPoint) distances.append((d, tzname)) if len(distances) > 0:", "else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex", "for polyIndex in polyIndices: poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d =", "from the json input, unpack it to an iterator which", "self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in self.timezoneLongitudeShortcuts.keys(): for", "can't find a valid timezone return the closest timezone you", "lngSet.intersection(latSet) queryPoint = geometry.Point(longitude, latitude) if possibleTimezones: for tzname in", "collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts", "time zone computation from latitude/longitude. Ordinarily this is loaded as", "timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_long degree", "in polyIndices: poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d = poly.distance(queryPoint) distances.append((d,", "= minLng while degree <= maxLng: if degree not in", "degree + shortcut_long degree = minLat while degree <= maxLat:", "lists into # numpy arrays rather than tuples, if numpy", "for every polygon in the featureCollection. Here tzname is a", "gzip.open(path, \"rb\") as f: featureCollection = json.loads(f.read().decode(\"utf-8\")) return featureCollection def", "), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for polyIndex in", "self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex in polyIndices: poly", "instantiated and queried directly ''' import collections try: import ujson", "'''tzwhere.py - time zone computation from latitude/longitude. Ordinarily this is", "closest timezone you can find instead. Only works if the", "isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda p: p.context if isinstance(p,", "''' Let's you lookup for a given latitude and longitude", "for tzname, polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys) if forceTZ:", "collection into an iterator over polygons. Given a featureCollection of", "it's bounds, you need to specify this during initialization arleady", "= json.load(f) self.forceTZ = forceTZ for tzname in self.timezoneNamesToPolygons: #", "1: return possibleTimezones.pop() else: for tzname in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname],", "with open(DEFAULT_SHORTCUTS, 'w') as f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod", "= os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0", "to a return a timezone even if the point you", "WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as", "shortcut_lat): ''' Construct our shortcuts for looking up polygons. Much", "os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons =", "gzip import os import shapely.geometry as geometry import shapely.prepared as", "= tuple # for navigation and pulling values/files this_dir, this_filename", "= degree + shortcut_long degree = minLat while degree <=", "# Convert things to tuples to save memory for degree", "polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex in polyIndices: poly =", "degree = minLng while degree <= maxLng: if degree not", "up is slightly outside it's bounds, you need to specify", "in our package directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS =", "using an r-tree ''' def find_min_max(ls, gridSize): minLs = (math.floor(min(ls)", "{} for tzname in timezoneNamesToPolygons: tzLngs = [] tzLats =", "timezoneNamesToPolygons: tzLngs = [] tzLats = [] for polyIndex, poly", "for a given latitude and longitude the appropriate timezone. @latitude:", "true and you can't find a valid timezone return the", "the point has the same integer value for its degree", "in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda p:", "distances.append((d, tzname)) if len(distances) > 0: return sorted(distances, key=lambda x:", "gridSize): minLs = (math.floor(min(ls) / gridSize) * gridSize) maxLs =", "specify this during initialization arleady ''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen", "forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )]", "polygons. Much faster than using an r-tree ''' def find_min_max(ls,", "= [] for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0]", "read_json return reader(path) def read_json(path): with gzip.open(path, \"rb\") as f:", "lngSet = set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet) queryPoint = geometry.Point(longitude, latitude)", "forceTZ for tzname in self.timezoneNamesToPolygons: # Convert things to tuples", "Only works if the point has the same integer value", "want to force the lookup method to a return a", "for tzname, poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys in", "f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ = forceTZ for tzname", "numpy.ndarray except ImportError: WRAP = tuple COLLECTION_TYPE = tuple #", "self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts", "lngs, shortcut_long) minLat, maxLat = find_min_max( lats, shortcut_lat) degree =", "shortcuts for looking up polygons. Much faster than using an", "= set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet) queryPoint = geometry.Point(longitude, latitude) if", "self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ = forceTZ for tzname in", "= read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list) for tzname,", "for degree in self.timezoneLongitudeShortcuts.keys(): for tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] =", "feature_collection_polygons(featureCollection): \"\"\"Turn a feature collection into an iterator over polygons.", "our package directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__),", "def tzNameAt(self, latitude, longitude, forceTZ=False): ''' Let's you lookup for", "* self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet = set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet) queryPoint", "for tzname in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list(", ")] latSet = set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE)", "input, unpack it to an iterator which produces a series", "slightly outside it's bounds, you need to specify this during", "'tz_world.json.gz') def __init__(self, forceTZ=False): ''' Initializes the tzwhere class. @forceTZ:", "collections try: import ujson as json # loads 2 seconds", "maxLng: if degree not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex)", "collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for tzname, poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly)", "pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list) for tzname, poly in", "ujson as json # loads 2 seconds faster than normal", "of floats. \"\"\" for feature in featureCollection['features']: tzname = feature['properties']['TZID']", "the tzwhere class. @forceTZ: If you want to force the", "featureCollection['features']: tzname = feature['properties']['TZID'] if feature['geometry']['type'] == 'Polygon': exterior =", "in self.timezoneLatitudeShortcuts: for tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname])", "computation from latitude/longitude. Ordinarily this is loaded as a module", "try: import json except ImportError: import simplejson as json import", "polygons. Given a featureCollection of the kind loaded from the", "poly = self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return tzname if forceTZ: return", "self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with", "json import math import gzip import os import shapely.geometry as", "poly.contains_properly(queryPoint): return tzname if forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint)", "We can save about 222MB of RAM by turning our", "pgen: tzNamesToPolygons[tzname].append(poly) for tzname, polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\", "featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons", "list( map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]),", "len(distances) > 0: return sorted(distances, key=lambda x: x[0])[0][1] class prepareMap(object):", "= 1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0 # By default, use the", "len(possibleTimezones) == 1: return possibleTimezones.pop() else: for tzname in possibleTimezones:", "about 222MB of RAM by turning our polygon lists into", "import simplejson as json import math import gzip import os", "By default, use the data file in our package directory", "__init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection", "a series of (tzname, polygon) pairs, one for every polygon", "feature['geometry']['coordinates'][1:] yield (tzname, (exterior, interior)) if __name__ == \"__main__\": prepareMap()", "if poly.contains_properly(queryPoint): return tzname if forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions,", "read_json(path): with gzip.open(path, \"rb\") as f: featureCollection = json.loads(f.read().decode(\"utf-8\")) return", "iterator which produces a series of (tzname, polygon) pairs, one", "can find instead. Only works if the point has the", "import collections try: import ujson as json # loads 2", "poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng = find_min_max( lngs, shortcut_long) minLat,", "tzLngs = [] tzLats = [] for polyIndex, poly in", "self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for tzname, poly in", "if feature['geometry']['type'] == 'Polygon': exterior = feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:]", "in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude, longitude,", "even if the point you are looking up is slightly", "given latitude and longitude the appropriate timezone. @latitude: latitude @longitude:", "tzNameAt(self, latitude, longitude, forceTZ=False): ''' Let's you lookup for a", "= forceTZ for tzname in self.timezoneNamesToPolygons: # Convert things to", "the timezeone ''' if forceTZ: assert self.forceTZ, 'You need to", "a featureCollection of the kind loaded from the json input,", "def find_min_max(ls, gridSize): minLs = (math.floor(min(ls) / gridSize) * gridSize)", "not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree", "a timezone even if the point you are looking up", "the appropriate timezone. @latitude: latitude @longitude: longitude @forceTZ: If forceTZ", "0: return sorted(distances, key=lambda x: x[0])[0][1] class prepareMap(object): def __init__(self):", "enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0] for x in poly[0]] lats =", "to an iterator which produces a series of (tzname, polygon)", "for tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self,", "tzname, poly in pgen: tzNamesToPolygons[tzname].append(poly) for tzname, polys in tzNamesToPolygons.items():", "minLat while degree <= maxLat: if degree not in timezoneLatitudeShortcuts:", "@latitude: latitude @longitude: longitude @forceTZ: If forceTZ is true and", "which produces a series of (tzname, polygon) pairs, one for", "exterior = feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:] yield (tzname, (exterior, interior))", "gridSize) maxLs = (math.floor(max(ls) / gridSize) * gridSize) return minLs,", "collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_long degree = minLat", "Here tzname is a string and polygon is a list", "import json except ImportError: import simplejson as json import math", "minLng while degree <= maxLng: if degree not in timezoneLongitudeShortcuts:", "the same integer value for its degree than the timezeone", "point has the same integer value for its degree than", "in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts(", "'w') as f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons,", "self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys) if", "return minLs, maxLs timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts = {} for", "timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_long", "os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self, forceTZ=False): '''", "set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for polyIndex in polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex]", "= read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons =", "installed. try: import numpy WRAP = numpy.asarray COLLECTION_TYPE = numpy.ndarray", "directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def", "= {} timezoneLatitudeShortcuts = {} for tzname in timezoneNamesToPolygons: tzLngs", "''' def find_min_max(ls, gridSize): minLs = (math.floor(min(ls) / gridSize) *", "self.timezoneNamesToPolygons[tzname] = WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS,", "you lookup for a given latitude and longitude the appropriate", "valid timezone return the closest timezone you can find instead.", "memory for degree in self.timezoneLatitudeShortcuts: for tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname]", "DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection)", "set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex in polyIndices: poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex]", "poly[0]] lats = [x[1] for x in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats)", "1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0 # By default, use the data", "numpy is installed. try: import numpy WRAP = numpy.asarray COLLECTION_TYPE", "tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng = find_min_max( lngs, shortcut_long) minLat, maxLat", "self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ = forceTZ for tzname in self.timezoneNamesToPolygons:", "open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ =", "tuples to save memory for degree in self.timezoneLatitudeShortcuts: for tzname", "minLng, maxLng = find_min_max( lngs, shortcut_long) minLat, maxLat = find_min_max(", "to specify this during initialization arleady ''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS)", "prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__),", "self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )] latSet = set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str(", "its degree than the timezeone ''' if forceTZ: assert self.forceTZ,", "timezone. @latitude: latitude @longitude: longitude @forceTZ: If forceTZ is true", "is a list of floats. \"\"\" for feature in featureCollection['features']:", "# for navigation and pulling values/files this_dir, this_filename = os.path.split(__file__)", "of the tzwhere class are instantiated and queried directly '''", "os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0 #", "queried directly ''' import collections try: import ujson as json", "ImportError: import simplejson as json import math import gzip import", "open(DEFAULT_SHORTCUTS, 'w') as f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod def", "return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint) def __forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions,", "self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet = set(lngTzOptions.keys())", "d = poly.distance(queryPoint) distances.append((d, tzname)) if len(distances) > 0: return", "tzwhere with forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) *", "if len(distances) > 0: return sorted(distances, key=lambda x: x[0])[0][1] class", "lngTzOptions, queryPoint): distances = [] if possibleTimezones: if len(possibleTimezones) ==", "tzname, poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys in self.timezoneNamesToPolygons.items():", "ImportError: WRAP = tuple COLLECTION_TYPE = tuple # for navigation", "p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex in polyIndices:", ")) for polyIndex in polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint):", "is true and you can't find a valid timezone return", "* gridSize) return minLs, maxLs timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts =", "of RAM by turning our polygon lists into # numpy", "for degree in self.timezoneLatitudeShortcuts: for tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] =", "prepared # We can save about 222MB of RAM by", "self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet = set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet)", "tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons,", "self.timezoneNamesToPolygons: # Convert things to tuples to save memory for", "\"\"\"Turn a feature collection into an iterator over polygons. Given", "key=lambda x: x[0])[0][1] class prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__),", "in the featureCollection. Here tzname is a string and polygon", "== 'Polygon': exterior = feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:] yield (tzname,", "class prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS =", "point you are looking up is slightly outside it's bounds,", "if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda p: p.context if", "map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname]))", "geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex in", "tzNamesToPolygons = collections.defaultdict(list) for tzname, poly in pgen: tzNamesToPolygons[tzname].append(poly) for", "instances of the tzwhere class are instantiated and queried directly", "timezone even if the point you are looking up is", "is slightly outside it's bounds, you need to specify this", "a return a timezone even if the point you are", "import shapely.prepared as prepared # We can save about 222MB", "return the closest timezone you can find instead. Only works", "of (tzname, polygon) pairs, one for every polygon in the", "file in our package directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS", "in polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return tzname if", "> 0: return sorted(distances, key=lambda x: x[0])[0][1] class prepareMap(object): def", "'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list)", "as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ = forceTZ for", "feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:] yield (tzname, (exterior, interior)) if __name__", "try: import ujson as json # loads 2 seconds faster", "rather than tuples, if numpy is installed. try: import numpy", "tzname, polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname]", "= feature['geometry']['coordinates'][1:] yield (tzname, (exterior, interior)) if __name__ == \"__main__\":", "you can find instead. Only works if the point has", "import gzip import os import shapely.geometry as geometry import shapely.prepared", "polyIndices: poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d = poly.distance(queryPoint) distances.append((d, tzname))", "tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in", "latitude) if possibleTimezones: for tzname in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE):", "polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts =", "prepared.prep( geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] ))", "possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list( map(lambda p: prepared.prep(", "@staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): ''' Construct our shortcuts for", "Convert things to tuples to save memory for degree in", "If you want to force the lookup method to a", "values/files this_dir, this_filename = os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir) class tzwhere(object):", "construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): ''' Construct our shortcuts for looking up", "degree <= maxLat: if degree not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\", "initialization arleady ''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons", "by turning our polygon lists into # numpy arrays rather", "than the timezeone ''' if forceTZ: assert self.forceTZ, 'You need", "''' Initializes the tzwhere class. @forceTZ: If you want to", "gridSize) return minLs, maxLs timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts = {}", "a module and instances of the tzwhere class are instantiated", "things to tuples to save memory for degree in self.timezoneLatitudeShortcuts:", "timezoneLatitudeShortcuts = {} for tzname in timezoneNamesToPolygons: tzLngs = []", "x[0])[0][1] class prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS", "shortcut_long degree = minLat while degree <= maxLat: if degree", "json.loads(f.read().decode(\"utf-8\")) return featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn a feature collection into", "/ gridSize) * gridSize) return minLs, maxLs timezoneLongitudeShortcuts = {}", "tzname in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list( map(lambda", "module and instances of the tzwhere class are instantiated and", "isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list( map(lambda p: prepared.prep( geometry.Polygon(p[0], p[1])", "forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint) def __forceTZ__(self, possibleTimezones, latTzOptions,", "= WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r')", "= set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for polyIndex in polyIndices: poly = self.unprepTimezoneNamesToPolygons[", "and queried directly ''' import collections try: import ujson as", "tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude, longitude, forceTZ=False): ''' Let's you lookup", "maxLat = find_min_max( lats, shortcut_lat) degree = minLng while degree", "method to a return a timezone even if the point", "degree not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree =", "queryPoint) def __forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions, queryPoint): distances = []", "= collections.defaultdict(list) for tzname, poly in pgen: tzNamesToPolygons[tzname].append(poly) for tzname,", "p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for polyIndex", "''' Construct our shortcuts for looking up polygons. Much faster", "@forceTZ: If forceTZ is true and you can't find a", "shortcut_long) minLat, maxLat = find_min_max( lats, shortcut_lat) degree = minLng", "and longitude the appropriate timezone. @latitude: latitude @longitude: longitude @forceTZ:", "set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet) queryPoint = geometry.Point(longitude, latitude) if possibleTimezones:", "<= maxLng: if degree not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list)", "1.0 # By default, use the data file in our", "in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in self.timezoneLongitudeShortcuts.keys():", "numpy arrays rather than tuples, if numpy is installed. try:", "tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w') as f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f)", "# loads 2 seconds faster than normal json except: try:", "WRAP = tuple COLLECTION_TYPE = tuple # for navigation and", ")] lngSet = set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet) queryPoint = geometry.Point(longitude,", "for feature in featureCollection['features']: tzname = feature['properties']['TZID'] if feature['geometry']['type'] ==", "produces a series of (tzname, polygon) pairs, one for every", "feature['geometry']['type'] == 'Polygon': exterior = feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:] yield", "works if the point has the same integer value for", "polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] =", "= geometry.Point(longitude, latitude) if possibleTimezones: for tzname in possibleTimezones: if", "class. @forceTZ: If you want to force the lookup method", "longitude @forceTZ: If forceTZ is true and you can't find", "=\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_long degree =", "{} timezoneLatitudeShortcuts = {} for tzname in timezoneNamesToPolygons: tzLngs =", "def __init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz')", "tzname, polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts", "value for its degree than the timezeone ''' if forceTZ:", "tuple COLLECTION_TYPE = tuple # for navigation and pulling values/files", "(math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet = set(lngTzOptions.keys()) possibleTimezones", "f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): ''' Construct our shortcuts", "lats, shortcut_lat) degree = minLng while degree <= maxLng: if", "while degree <= maxLng: if degree not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree]", "as geometry import shapely.prepared as prepared # We can save", "If forceTZ is true and you can't find a valid", "'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self, forceTZ=False): ''' Initializes", "series of (tzname, polygon) pairs, one for every polygon in", "def read_json(path): with gzip.open(path, \"rb\") as f: featureCollection = json.loads(f.read().decode(\"utf-8\"))", "# By default, use the data file in our package", "if forceTZ: assert self.forceTZ, 'You need to initialize tzwhere with", "json except: try: import json except ImportError: import simplejson as", "= degree + shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path): reader", "the json input, unpack it to an iterator which produces", "and you can't find a valid timezone return the closest", "= 1.0 # By default, use the data file in", "floats. \"\"\" for feature in featureCollection['features']: tzname = feature['properties']['TZID'] if", "turning our polygon lists into # numpy arrays rather than", "polygon lists into # numpy arrays rather than tuples, if", "Construct our shortcuts for looking up polygons. Much faster than", "one for every polygon in the featureCollection. Here tzname is", "for x in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng = find_min_max(", "self.forceTZ, 'You need to initialize tzwhere with forceTZ' latTzOptions =", "tzname in timezoneNamesToPolygons: tzLngs = [] tzLats = [] for", "[x[0] for x in poly[0]] lats = [x[1] for x", "featureCollection of the kind loaded from the json input, unpack", "to save memory for degree in self.timezoneLatitudeShortcuts: for tzname in", "latSet = set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) *", "return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path): reader = read_json return reader(path)", "lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet", "json # loads 2 seconds faster than normal json except:", "save memory for degree in self.timezoneLatitudeShortcuts: for tzname in self.timezoneLatitudeShortcuts[degree].keys():", "class tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0 # By", "COLLECTION_TYPE = tuple # for navigation and pulling values/files this_dir,", "directly ''' import collections try: import ujson as json #", "tzname)) if len(distances) > 0: return sorted(distances, key=lambda x: x[0])[0][1]", "\\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude, longitude, forceTZ=False): ''' Let's you", "self.timezoneLatitudeShortcuts: for tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for", "it to an iterator which produces a series of (tzname,", "SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE = 1.0 # By default, use", "over polygons. Given a featureCollection of the kind loaded from", "unpack it to an iterator which produces a series of", "timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_lat return", "+ shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path): reader = read_json", "than normal json except: try: import json except ImportError: import", "outside it's bounds, you need to specify this during initialization", "to tuples to save memory for degree in self.timezoneLatitudeShortcuts: for", "polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0] for x in", "''' import collections try: import ujson as json # loads", "= \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE)", "in timezoneNamesToPolygons: tzLngs = [] tzLats = [] for polyIndex,", "a list of floats. \"\"\" for feature in featureCollection['features']: tzname", "as f: featureCollection = json.loads(f.read().decode(\"utf-8\")) return featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn", "featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn a feature collection into an iterator", "[] for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0] for", "latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )] latSet", "maxLat: if degree not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex)", "polygon is a list of floats. \"\"\" for feature in", "= feature['properties']['TZID'] if feature['geometry']['type'] == 'Polygon': exterior = feature['geometry']['coordinates'][0] interior", "self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return tzname if forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions,", "return reader(path) def read_json(path): with gzip.open(path, \"rb\") as f: featureCollection", "p: prepared.prep( geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname]", "tuples, if numpy is installed. try: import numpy WRAP =", "longitude, forceTZ=False): ''' Let's you lookup for a given latitude", "self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude, longitude, forceTZ=False):", "read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list) for tzname, poly", "if forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint) def __forceTZ__(self, possibleTimezones,", "poly in enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0] for x in poly[0]]", "[x[1] for x in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng =", "polyIndex in polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return tzname", "faster than normal json except: try: import json except ImportError:", "return a timezone even if the point you are looking", "reader = read_json return reader(path) def read_json(path): with gzip.open(path, \"rb\")", "a feature collection into an iterator over polygons. Given a", "feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list) for tzname, poly in pgen: tzNamesToPolygons[tzname].append(poly)", "__forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions, queryPoint): distances = [] if possibleTimezones:", "else: for tzname in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] =", "return featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn a feature collection into an", "except ImportError: import simplejson as json import math import gzip", "integer value for its degree than the timezeone ''' if", "forceTZ: assert self.forceTZ, 'You need to initialize tzwhere with forceTZ'", "# We can save about 222MB of RAM by turning", "minLat, maxLat = find_min_max( lats, shortcut_lat) degree = minLng while", "gridSize) * gridSize) maxLs = (math.floor(max(ls) / gridSize) * gridSize)", "lats = [x[1] for x in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng,", "== 1: return possibleTimezones.pop() else: for tzname in possibleTimezones: if", "tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w') as f: json.dump( (timezoneLongitudeShortcuts,", "if degree not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree", "sorted(distances, key=lambda x: x[0])[0][1] class prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS =", "reader(path) def read_json(path): with gzip.open(path, \"rb\") as f: featureCollection =", "in self.timezoneNamesToPolygons: # Convert things to tuples to save memory", "tzwhere class are instantiated and queried directly ''' import collections", "self.timezoneNamesToPolygons[tzname] = list( map(lambda p: prepared.prep( geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname]))", "Let's you lookup for a given latitude and longitude the", "self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )] latSet = set(latTzOptions.keys())", "our polygon lists into # numpy arrays rather than tuples,", "queryPoint = geometry.Point(longitude, latitude) if possibleTimezones: for tzname in possibleTimezones:", "WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS,", "(tzname, polygon) pairs, one for every polygon in the featureCollection.", "if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f:", "= WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts =", "maxLng = find_min_max( lngs, shortcut_long) minLat, maxLat = find_min_max( lats,", "same integer value for its degree than the timezeone '''", "timezoneLatitudeShortcuts def read_tzworld(path): reader = read_json return reader(path) def read_json(path):", "than using an r-tree ''' def find_min_max(ls, gridSize): minLs =", "COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list( map(lambda p: prepared.prep( geometry.Polygon(p[0], p[1]) ),", "p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices =", "import shapely.geometry as geometry import shapely.prepared as prepared # We", "force the lookup method to a return a timezone even", "gridSize) * gridSize) return minLs, maxLs timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts", "degree = degree + shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path):", "= tuple COLLECTION_TYPE = tuple # for navigation and pulling", "python '''tzwhere.py - time zone computation from latitude/longitude. Ordinarily this", "DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self,", "in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list( map(lambda p:", "of the kind loaded from the json input, unpack it", "bounds, you need to specify this during initialization arleady '''", "import os import shapely.geometry as geometry import shapely.prepared as prepared", "shapely.prepared as prepared # We can save about 222MB of", "degree not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree =", "iterator over polygons. Given a featureCollection of the kind loaded", "+ shortcut_long degree = minLat while degree <= maxLat: if", "if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(", "self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry) else", "WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f)", "json except ImportError: import simplejson as json import math import", "numpy.asarray COLLECTION_TYPE = numpy.ndarray except ImportError: WRAP = tuple COLLECTION_TYPE", "json.load(f) self.forceTZ = forceTZ for tzname in self.timezoneNamesToPolygons: # Convert", "tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w') as f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts),", "= self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )] latSet =", "for its degree than the timezeone ''' if forceTZ: assert", "= [x[0] for x in poly[0]] lats = [x[1] for", "if len(possibleTimezones) == 1: return possibleTimezones.pop() else: for tzname in", "RAM by turning our polygon lists into # numpy arrays", "up polygons. Much faster than using an r-tree ''' def", "= (math.floor(min(ls) / gridSize) * gridSize) maxLs = (math.floor(max(ls) /", "minLs, maxLs timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts = {} for tzname", "the featureCollection. Here tzname is a string and polygon is", "lookup method to a return a timezone even if the", "degree in self.timezoneLatitudeShortcuts: for tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\", "to force the lookup method to a return a timezone", "as prepared # We can save about 222MB of RAM", "kind loaded from the json input, unpack it to an", "timezeone ''' if forceTZ: assert self.forceTZ, 'You need to initialize", "an iterator which produces a series of (tzname, polygon) pairs,", "in featureCollection['features']: tzname = feature['properties']['TZID'] if feature['geometry']['type'] == 'Polygon': exterior", "polygon) pairs, one for every polygon in the featureCollection. Here", "pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for", "lookup for a given latitude and longitude the appropriate timezone.", "DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection =", "(timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): ''' Construct", "find_min_max( lats, shortcut_lat) degree = minLng while degree <= maxLng:", "Ordinarily this is loaded as a module and instances of", "read_tzworld(path): reader = read_json return reader(path) def read_json(path): with gzip.open(path,", "not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree", "\"rb\") as f: featureCollection = json.loads(f.read().decode(\"utf-8\")) return featureCollection def feature_collection_polygons(featureCollection):", "if possibleTimezones: if len(possibleTimezones) == 1: return possibleTimezones.pop() else: for", "= [] tzLats = [] for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]):", "= collections.defaultdict(list) for tzname, poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname,", "assert self.forceTZ, 'You need to initialize tzwhere with forceTZ' latTzOptions", "__init__(self, forceTZ=False): ''' Initializes the tzwhere class. @forceTZ: If you", "self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w') as f: json.dump(", "self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for tzname, poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for", "import math import gzip import os import shapely.geometry as geometry", "navigation and pulling values/files this_dir, this_filename = os.path.split(__file__) BASE_DIR =", "you want to force the lookup method to a return", "forceTZ=False): ''' Let's you lookup for a given latitude and", "string and polygon is a list of floats. \"\"\" for", "DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self, forceTZ=False): ''' Initializes the", "return tzname if forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint) def", "f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat):", "= lngSet.intersection(latSet) queryPoint = geometry.Point(longitude, latitude) if possibleTimezones: for tzname", "as a module and instances of the tzwhere class are", "tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude,", "self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d = poly.distance(queryPoint) distances.append((d, tzname)) if len(distances) >", "in pgen: tzNamesToPolygons[tzname].append(poly) for tzname, polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] =", "from latitude/longitude. Ordinarily this is loaded as a module and", "= find_min_max( lngs, shortcut_long) minLat, maxLat = find_min_max( lats, shortcut_lat)", "need to specify this during initialization arleady ''' featureCollection =", "polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for polyIndex in polyIndices: poly", "return possibleTimezones.pop() else: for tzname in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE):", "as json import math import gzip import os import shapely.geometry", "r-tree ''' def find_min_max(ls, gridSize): minLs = (math.floor(min(ls) / gridSize)", "for navigation and pulling values/files this_dir, this_filename = os.path.split(__file__) BASE_DIR", "x in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng = find_min_max( lngs,", "SHORTCUT_DEGREES_LONGITUDE = 1.0 # By default, use the data file", "= list( map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0],", "appropriate timezone. @latitude: latitude @longitude: longitude @forceTZ: If forceTZ is", "in poly[0]] lats = [x[1] for x in poly[0]] tzLngs.extend(lngs)", "= json.loads(f.read().decode(\"utf-8\")) return featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn a feature collection", "latTzOptions, lngTzOptions, queryPoint) def __forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions, queryPoint): distances", "json input, unpack it to an iterator which produces a", "* self.SHORTCUT_DEGREES_LATITUDE) )] latSet = set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude", "latitude, longitude, forceTZ=False): ''' Let's you lookup for a given", "2 seconds faster than normal json except: try: import json", "poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d = poly.distance(queryPoint) distances.append((d, tzname)) if", "geometry import shapely.prepared as prepared # We can save about", "forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys) with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f: self.timezoneLongitudeShortcuts,", "the data file in our package directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__),", "* gridSize) maxLs = (math.floor(max(ls) / gridSize) * gridSize) return", "= collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for tzname, poly in pgen:", "possibleTimezones, latTzOptions, lngTzOptions, queryPoint): distances = [] if possibleTimezones: if", "= (math.floor(max(ls) / gridSize) * gridSize) return minLs, maxLs timezoneLongitudeShortcuts", "the closest timezone you can find instead. Only works if", "= [] if possibleTimezones: if len(possibleTimezones) == 1: return possibleTimezones.pop()", "into an iterator over polygons. Given a featureCollection of the", "math import gzip import os import shapely.geometry as geometry import", "collections.defaultdict(list) for tzname, poly in pgen: tzNamesToPolygons[tzname].append(poly) for tzname, polys", "tzname is a string and polygon is a list of", "latitude/longitude. Ordinarily this is loaded as a module and instances", "self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for polyIndex in polyIndices:", "in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] =", "= self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d = poly.distance(queryPoint) distances.append((d, tzname)) if len(distances)", "= set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for polyIndex in polyIndices: poly =", "list( map(lambda p: prepared.prep( geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices =", "set(lngTzOptions[tzname])) for polyIndex in polyIndices: poly = self.unprepTimezoneNamesToPolygons[ tzname][polyIndex] d", "feature collection into an iterator over polygons. Given a featureCollection", "with forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE)", "are instantiated and queried directly ''' import collections try: import", "zone computation from latitude/longitude. Ordinarily this is loaded as a", "self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet = set(lngTzOptions.keys()) possibleTimezones = lngSet.intersection(latSet) queryPoint =", "'You need to initialize tzwhere with forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str(", "tzLats.extend(lats) minLng, maxLng = find_min_max( lngs, shortcut_long) minLat, maxLat =", "than tuples, if numpy is installed. try: import numpy WRAP", "you are looking up is slightly outside it's bounds, you", "loaded from the json input, unpack it to an iterator", "''' if forceTZ: assert self.forceTZ, 'You need to initialize tzwhere", "an iterator over polygons. Given a featureCollection of the kind", "os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self, forceTZ=False): ''' Initializes the tzwhere class.", "tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in self.timezoneLongitudeShortcuts.keys(): for tzname in self.timezoneLongitudeShortcuts[degree].keys(): self.timezoneLongitudeShortcuts[degree][tzname]", "are looking up is slightly outside it's bounds, you need", "longitude the appropriate timezone. @latitude: latitude @longitude: longitude @forceTZ: If", "if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] = list( map(lambda p: prepared.prep( geometry.Polygon(p[0],", "= find_min_max( lats, shortcut_lat) degree = minLng while degree <=", "if possibleTimezones: for tzname in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname]", "arrays rather than tuples, if numpy is installed. try: import", "polygon in the featureCollection. Here tzname is a string and", "lngTzOptions[tzname] )) for polyIndex in polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex] if", "self.SHORTCUT_DEGREES_LATITUDE) )] latSet = set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude /", "default, use the data file in our package directory DEFAULT_SHORTCUTS", "= feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:] yield (tzname, (exterior, interior)) if", "as json # loads 2 seconds faster than normal json", "except: try: import json except ImportError: import simplejson as json", "os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen", "= numpy.asarray COLLECTION_TYPE = numpy.ndarray except ImportError: WRAP = tuple", "is a string and polygon is a list of floats.", "def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): ''' Construct our shortcuts for looking", "import ujson as json # loads 2 seconds faster than", "'Polygon': exterior = feature['geometry']['coordinates'][0] interior = feature['geometry']['coordinates'][1:] yield (tzname, (exterior,", "= os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0", "a valid timezone return the closest timezone you can find", "possibleTimezones.pop() else: for tzname in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname]", "the lookup method to a return a timezone even if", "for tzname in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list(", "p: p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices", "= list( map(lambda p: prepared.prep( geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices", "geometry.Polygon(p[0], p[1]) ), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection(set( lngTzOptions[tzname] )) for", "COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry)", "class are instantiated and queried directly ''' import collections try:", "if numpy is installed. try: import numpy WRAP = numpy.asarray", "faster than using an r-tree ''' def find_min_max(ls, gridSize): minLs", "instead. Only works if the point has the same integer", "os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE", "queryPoint): distances = [] if possibleTimezones: if len(possibleTimezones) == 1:", "''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list)", "= numpy.ndarray except ImportError: WRAP = tuple COLLECTION_TYPE = tuple", "during initialization arleady ''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection)", "interior = feature['geometry']['coordinates'][1:] yield (tzname, (exterior, interior)) if __name__ ==", "this is loaded as a module and instances of the", "package directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz')", "@forceTZ: If you want to force the lookup method to", "WRAP = numpy.asarray COLLECTION_TYPE = numpy.ndarray except ImportError: WRAP =", "self.timezoneLongitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLongitudeShortcuts[degree][tzname]) def tzNameAt(self, latitude, longitude, forceTZ=False): '''", "timezone return the closest timezone you can find instead. Only", "= self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return tzname if forceTZ: return self.__forceTZ__(possibleTimezones,", "= poly.distance(queryPoint) distances.append((d, tzname)) if len(distances) > 0: return sorted(distances,", "= self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w') as f:", "timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_long degree = minLat while", "pulling values/files this_dir, this_filename = os.path.split(__file__) BASE_DIR = os.path.dirname(this_dir) class", "for polyIndex in polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return", "prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]), self.timezoneNamesToPolygons[tzname])) polyIndices = set(latTzOptions[tzname]).intersection( set(lngTzOptions[tzname])) for", "our shortcuts for looking up polygons. Much faster than using", "featureCollection = json.loads(f.read().decode(\"utf-8\")) return featureCollection def feature_collection_polygons(featureCollection): \"\"\"Turn a feature", "= os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS)", "lngs = [x[0] for x in poly[0]] lats = [x[1]", "COLLECTION_TYPE = numpy.ndarray except ImportError: WRAP = tuple COLLECTION_TYPE =", "= feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for tzname,", "looking up is slightly outside it's bounds, you need to", "/ gridSize) * gridSize) maxLs = (math.floor(max(ls) / gridSize) *", "an r-tree ''' def find_min_max(ls, gridSize): minLs = (math.floor(min(ls) /", "if the point you are looking up is slightly outside", "is installed. try: import numpy WRAP = numpy.asarray COLLECTION_TYPE =", "lngTzOptions, queryPoint) def __forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions, queryPoint): distances =", "self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint) def __forceTZ__(self, possibleTimezones, latTzOptions, lngTzOptions, queryPoint):", "feature in featureCollection['features']: tzname = feature['properties']['TZID'] if feature['geometry']['type'] == 'Polygon':", "222MB of RAM by turning our polygon lists into #", "= os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons", "you can't find a valid timezone return the closest timezone", "os import shapely.geometry as geometry import shapely.prepared as prepared #", "looking up polygons. Much faster than using an r-tree '''", "possibleTimezones = lngSet.intersection(latSet) queryPoint = geometry.Point(longitude, latitude) if possibleTimezones: for", "'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen =", "x: x[0])[0][1] class prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json')", "def read_tzworld(path): reader = read_json return reader(path) def read_json(path): with", "self.forceTZ = forceTZ for tzname in self.timezoneNamesToPolygons: # Convert things", "initialize tzwhere with forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE)", "tzwhere class. @forceTZ: If you want to force the lookup", "has the same integer value for its degree than the", "timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_lat", "loaded as a module and instances of the tzwhere class", "latitude and longitude the appropriate timezone. @latitude: latitude @longitude: longitude", "and instances of the tzwhere class are instantiated and queried", "normal json except: try: import json except ImportError: import simplejson", "as f: json.dump( (timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long,", "= self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE) )] lngSet =", "geometry.Point(longitude, latitude) if possibleTimezones: for tzname in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname],", "def __init__(self, forceTZ=False): ''' Initializes the tzwhere class. @forceTZ: If", "tzname in possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda", "timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE, tzwhere.SHORTCUT_DEGREES_LATITUDE) with open(DEFAULT_SHORTCUTS, 'w')", "in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname] = WRAP(polys) if forceTZ: self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys)", "timezone you can find instead. Only works if the point", "tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\ timezoneLatitudeShortcuts = self.construct_shortcuts( tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE,", "in enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0] for x in poly[0]] lats", "forceTZ is true and you can't find a valid timezone", "and polygon is a list of floats. \"\"\" for feature", "for tzname, poly in pgen: tzNamesToPolygons[tzname].append(poly) for tzname, polys in", "= [x[1] for x in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng", "poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys in self.timezoneNamesToPolygons.items(): self.timezoneNamesToPolygons[tzname]", "simplejson as json import math import gzip import os import", "tzname in self.timezoneNamesToPolygons: # Convert things to tuples to save", "'r') as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ = forceTZ", "need to initialize tzwhere with forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude", "tzNamesToPolygons[tzname].append(poly) for tzname, polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname])", "<= maxLat: if degree not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree] =\\ collections.defaultdict(list)", "degree = degree + shortcut_long degree = minLat while degree", "loads 2 seconds faster than normal json except: try: import", "@longitude: longitude @forceTZ: If forceTZ is true and you can't", "with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f: self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f) self.forceTZ", "timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path): reader = read_json return reader(path) def", "maxLs timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts = {} for tzname in", "every polygon in the featureCollection. Here tzname is a string", "=\\ collections.defaultdict(list) timezoneLatitudeShortcuts[degree][tzname].append(polyIndex) degree = degree + shortcut_lat return timezoneLongitudeShortcuts,", "minLs = (math.floor(min(ls) / gridSize) * gridSize) maxLs = (math.floor(max(ls)", "timezoneLatitudeShortcuts), f) @staticmethod def construct_shortcuts(timezoneNamesToPolygons, shortcut_long, shortcut_lat): ''' Construct our", "for x in poly[0]] lats = [x[1] for x in", "= os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self, forceTZ=False): ''' Initializes the tzwhere", "arleady ''' featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons =", "into # numpy arrays rather than tuples, if numpy is", "for tzname in self.timezoneLatitudeShortcuts[degree].keys(): self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree", "Initializes the tzwhere class. @forceTZ: If you want to force", "a string and polygon is a list of floats. \"\"\"", "Given a featureCollection of the kind loaded from the json", "read_tzworld(tzwhere.DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list)", "degree than the timezeone ''' if forceTZ: assert self.forceTZ, 'You", "possibleTimezones: for tzname in possibleTimezones: if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.timezoneNamesToPolygons[tzname] =", "\"\"\" for feature in featureCollection['features']: tzname = feature['properties']['TZID'] if feature['geometry']['type']", "pairs, one for every polygon in the featureCollection. Here tzname", "= {} for tzname in timezoneNamesToPolygons: tzLngs = [] tzLats", "featureCollection = read_tzworld(DEFAULT_POLYGONS) pgen = feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list) for", "the point you are looking up is slightly outside it's", "except ImportError: WRAP = tuple COLLECTION_TYPE = tuple # for", "for tzname in timezoneNamesToPolygons: tzLngs = [] tzLats = []", "degree = minLat while degree <= maxLat: if degree not", "to initialize tzwhere with forceTZ' latTzOptions = self.timezoneLatitudeShortcuts[str( (math.floor(latitude /", "if degree not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\ collections.defaultdict(list) timezoneLongitudeShortcuts[degree][tzname].append(polyIndex) degree", "#!/usr/bin/env python '''tzwhere.py - time zone computation from latitude/longitude. Ordinarily", "use the data file in our package directory DEFAULT_SHORTCUTS =", "degree <= maxLng: if degree not in timezoneLongitudeShortcuts: timezoneLongitudeShortcuts[degree] =\\", "shapely.geometry as geometry import shapely.prepared as prepared # We can", "(math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )] latSet = set(latTzOptions.keys()) lngTzOptions", "latTzOptions, lngTzOptions, queryPoint): distances = [] if possibleTimezones: if len(possibleTimezones)", "for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]): lngs = [x[0] for x", "tzname if forceTZ: return self.__forceTZ__(possibleTimezones, latTzOptions, lngTzOptions, queryPoint) def __forceTZ__(self,", "[] tzLats = [] for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]): lngs", "= set(latTzOptions.keys()) lngTzOptions = self.timezoneLongitudeShortcuts[str( (math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) * self.SHORTCUT_DEGREES_LONGITUDE)", "import numpy WRAP = numpy.asarray COLLECTION_TYPE = numpy.ndarray except ImportError:", "polyIndices: poly = self.timezoneNamesToPolygons[tzname][polyIndex] if poly.contains_properly(queryPoint): return tzname if forceTZ:", "while degree <= maxLat: if degree not in timezoneLatitudeShortcuts: timezoneLatitudeShortcuts[degree]", "a given latitude and longitude the appropriate timezone. @latitude: latitude", "shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path): reader = read_json return", "with gzip.open(path, \"rb\") as f: featureCollection = json.loads(f.read().decode(\"utf-8\")) return featureCollection", "find instead. Only works if the point has the same", "BASE_DIR = os.path.dirname(this_dir) class tzwhere(object): SHORTCUT_DEGREES_LATITUDE = 1.0 SHORTCUT_DEGREES_LONGITUDE =", "self.timezoneLatitudeShortcuts[degree][tzname] = \\ tuple(self.timezoneLatitudeShortcuts[degree][tzname]) for degree in self.timezoneLongitudeShortcuts.keys(): for tzname", "if the point has the same integer value for its", "[] if possibleTimezones: if len(possibleTimezones) == 1: return possibleTimezones.pop() else:", "save about 222MB of RAM by turning our polygon lists", "for tzname, polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname] = \\ WRAP(tzNamesToPolygons[tzname]) timezoneLongitudeShortcuts,\\", "in poly[0]] tzLngs.extend(lngs) tzLats.extend(lats) minLng, maxLng = find_min_max( lngs, shortcut_long)", "possibleTimezones: if isinstance(self.unprepTimezoneNamesToPolygons[tzname], COLLECTION_TYPE): self.unprepTimezoneNamesToPolygons[tzname] = list( map(lambda p: p.context", "tzname = feature['properties']['TZID'] if feature['geometry']['type'] == 'Polygon': exterior = feature['geometry']['coordinates'][0]", "poly.distance(queryPoint) distances.append((d, tzname)) if len(distances) > 0: return sorted(distances, key=lambda", "data file in our package directory DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json')", "poly in pgen: tzNamesToPolygons[tzname].append(poly) for tzname, polys in tzNamesToPolygons.items(): tzNamesToPolygons[tzname]", "seconds faster than normal json except: try: import json except", "latitude @longitude: longitude @forceTZ: If forceTZ is true and you", "find a valid timezone return the closest timezone you can", "<reponame>tuxiqae/pytzwhere #!/usr/bin/env python '''tzwhere.py - time zone computation from latitude/longitude.", "= os.path.join(os.path.dirname(__file__), 'tz_world_shortcuts.json') DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__), 'tz_world.json.gz') def __init__(self, forceTZ=False):", "for tzname in self.timezoneNamesToPolygons: # Convert things to tuples to", "timezoneLongitudeShortcuts = {} timezoneLatitudeShortcuts = {} for tzname in timezoneNamesToPolygons:", "collections.defaultdict(list) for tzname, poly in pgen: self.timezoneNamesToPolygons[tzname].append(poly) for tzname, polys", "the kind loaded from the json input, unpack it to", "return sorted(distances, key=lambda x: x[0])[0][1] class prepareMap(object): def __init__(self): DEFAULT_SHORTCUTS", "the tzwhere class are instantiated and queried directly ''' import", "shortcut_long, shortcut_lat): ''' Construct our shortcuts for looking up polygons.", "x in poly[0]] lats = [x[1] for x in poly[0]]", "find_min_max( lngs, shortcut_long) minLat, maxLat = find_min_max( lats, shortcut_lat) degree", "featureCollection. Here tzname is a string and polygon is a", "Much faster than using an r-tree ''' def find_min_max(ls, gridSize):", "/ self.SHORTCUT_DEGREES_LATITUDE) * self.SHORTCUT_DEGREES_LATITUDE) )] latSet = set(latTzOptions.keys()) lngTzOptions =", "find_min_max(ls, gridSize): minLs = (math.floor(min(ls) / gridSize) * gridSize) maxLs", "feature_collection_polygons(featureCollection) self.timezoneNamesToPolygons = collections.defaultdict(list) self.unprepTimezoneNamesToPolygons = collections.defaultdict(list) for tzname, poly", "tuple # for navigation and pulling values/files this_dir, this_filename =", "= feature_collection_polygons(featureCollection) tzNamesToPolygons = collections.defaultdict(list) for tzname, poly in pgen:", "is loaded as a module and instances of the tzwhere", "degree + shortcut_lat return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts def read_tzworld(path): reader =", "(math.floor(max(ls) / gridSize) * gridSize) return minLs, maxLs timezoneLongitudeShortcuts =" ]
[ "query feature that gets the average length of normalized tokens", "query and returns the average normalized token length \"\"\" #", "gets the average length of normalized tokens in the query„", "Returns: (function) A feature extraction function that takes a query", "feature extraction function that takes a query and returns the", "in the query„ Returns: (function) A feature extraction function that", "tokens in the query„ Returns: (function) A feature extraction function", "takes a query and returns the average normalized token length", "def extract_average_token_length(**args): \"\"\" Example query feature that gets the average", "returns the average normalized token length \"\"\" # pylint: disable=locally-disabled,unused-argument", "the average length of normalized tokens in the query„ Returns:", "(function) A feature extraction function that takes a query and", "= sum([len(t) for t in tokens]) / len(tokens) return {'average_token_length':", "from mindmeld.models.helpers import register_query_feature @register_query_feature(feature_name='average-token-length') def extract_average_token_length(**args): \"\"\" Example query", "extraction function that takes a query and returns the average", "function that takes a query and returns the average normalized", "token length \"\"\" # pylint: disable=locally-disabled,unused-argument def _extractor(query, resources): tokens", "average_token_length = sum([len(t) for t in tokens]) / len(tokens) return", "resources): tokens = query.normalized_tokens average_token_length = sum([len(t) for t in", "that takes a query and returns the average normalized token", "normalized tokens in the query„ Returns: (function) A feature extraction", "a query and returns the average normalized token length \"\"\"", "# pylint: disable=locally-disabled,unused-argument def _extractor(query, resources): tokens = query.normalized_tokens average_token_length", "feature that gets the average length of normalized tokens in", "length \"\"\" # pylint: disable=locally-disabled,unused-argument def _extractor(query, resources): tokens =", "tokens = query.normalized_tokens average_token_length = sum([len(t) for t in tokens])", "average normalized token length \"\"\" # pylint: disable=locally-disabled,unused-argument def _extractor(query,", "that gets the average length of normalized tokens in the", "query„ Returns: (function) A feature extraction function that takes a", "average length of normalized tokens in the query„ Returns: (function)", "= query.normalized_tokens average_token_length = sum([len(t) for t in tokens]) /", "for t in tokens]) / len(tokens) return {'average_token_length': average_token_length} return", "the average normalized token length \"\"\" # pylint: disable=locally-disabled,unused-argument def", "Example query feature that gets the average length of normalized", "t in tokens]) / len(tokens) return {'average_token_length': average_token_length} return _extractor", "register_query_feature @register_query_feature(feature_name='average-token-length') def extract_average_token_length(**args): \"\"\" Example query feature that gets", "length of normalized tokens in the query„ Returns: (function) A", "of normalized tokens in the query„ Returns: (function) A feature", "extract_average_token_length(**args): \"\"\" Example query feature that gets the average length", "normalized token length \"\"\" # pylint: disable=locally-disabled,unused-argument def _extractor(query, resources):", "query.normalized_tokens average_token_length = sum([len(t) for t in tokens]) / len(tokens)", "_extractor(query, resources): tokens = query.normalized_tokens average_token_length = sum([len(t) for t", "\"\"\" # pylint: disable=locally-disabled,unused-argument def _extractor(query, resources): tokens = query.normalized_tokens", "<filename>tests/home_assistant/custom_features.py<gh_stars>1-10 from mindmeld.models.helpers import register_query_feature @register_query_feature(feature_name='average-token-length') def extract_average_token_length(**args): \"\"\" Example", "and returns the average normalized token length \"\"\" # pylint:", "pylint: disable=locally-disabled,unused-argument def _extractor(query, resources): tokens = query.normalized_tokens average_token_length =", "disable=locally-disabled,unused-argument def _extractor(query, resources): tokens = query.normalized_tokens average_token_length = sum([len(t)", "import register_query_feature @register_query_feature(feature_name='average-token-length') def extract_average_token_length(**args): \"\"\" Example query feature that", "def _extractor(query, resources): tokens = query.normalized_tokens average_token_length = sum([len(t) for", "the query„ Returns: (function) A feature extraction function that takes", "sum([len(t) for t in tokens]) / len(tokens) return {'average_token_length': average_token_length}", "@register_query_feature(feature_name='average-token-length') def extract_average_token_length(**args): \"\"\" Example query feature that gets the", "mindmeld.models.helpers import register_query_feature @register_query_feature(feature_name='average-token-length') def extract_average_token_length(**args): \"\"\" Example query feature", "A feature extraction function that takes a query and returns", "\"\"\" Example query feature that gets the average length of" ]
[ "elif code == \"3001\": value = \"Customer is invalid.\" elif", "\"1004\": value = \"There was an error with the database.\"", "Application registration key does not match.\" elif code == \"1112\":", "elif code == \"2513\": value = \"Serial number is not", "inventory failed.\" elif code == \"2203\": value = \"Can not", "value = \"This Integrated Application registration key does not match.\"", "== \"3001\": value = \"Customer is invalid.\" elif code ==", "value = \"There was an error load PO {0}.\" elif", "\"Was not able to find the Tag number {0}.\" elif", "elif code == \"2601\": value = \"Invalid location.\" elif code", "PO {0}.\" elif code == \"4001\": value = \"Unknow status", "elif code == \"1110\": value = \"A new Integrated Application", "request function.\" elif code == \"1100\": value = \"Unknown login", "number is not valid.\" elif code == \"2600\": value =", "have been logged off the server by an administrator.\" elif", "== \"1004\": value = \"There was an error with the", "is not valid.\" elif code == \"2510\": value = \"Serial", "\"2513\": value = \"Serial number is not valid.\" elif code", "code == \"1000\": value = \"Success!\" elif code == \"1001\":", "been logged off the server by an administrator.\" elif code", "Tracking is not valid.\" elif code == \"2510\": value =", "data was of the wrong type.\" elif code == \"2000\":", "not been approved by the Fishbowl Inventory Administrator.\" elif code", "value = \"Invalid location.\" elif code == \"2602\": value =", "== \"1130\": value = \"Invalid Ticket passed to Fishbowl Inventory", "\"1010\": value = \"You have been logged off the server", "Server has been shut down.\" elif code == \"1010\": value", "in Tagnumber {0}.\" elif code == \"2305\": value = \"Tag", "found.\" elif code == \"2601\": value = \"Invalid location.\" elif", "now isn't that helpful...\" elif code == \"1004\": value =", "was an error load PO {0}.\" elif code == \"4001\":", "a location.\" elif code == \"2400\": value = \"Invalid UOM.\"", "= \"Was not able to find the Part {0}.\" elif", "value = \"There was an error with the database.\" elif", "value = \"Not enough available inventory in Tagnumber {0}.\" elif", "\"1140\": value = \"Initialization token is not correct type.\" elif", "the Tag number {0}.\" elif code == \"2301\": value =", "been reached for the server's key.\" elif code == \"1200\":", "contact your Fishbowl Inventory Administrator to approve this Integrated Application.\"", "= \"Initialization token is not correct type.\" elif code ==", "= \"Commit failed.\" elif code == \"2202\": value = \"Add", "\"File could not be written to.\" elif code == \"1505\":", "== \"2513\": value = \"Serial number is not valid.\" elif", "was of the wrong type.\" elif code == \"2000\": value", "{0} not found.\" elif code == \"3001\": value = \"Customer", "reached for the server's key.\" elif code == \"1200\": value", "== \"1009\": value = \"Fishbowl Server has been shut down.\"", "Inventory Administrator.\" elif code == \"1120\": value = \"Invalid Username", "\"1150\": value = \"Request was invalid\" elif code == \"1160\":", "\"2000\": value = \"Was not able to find the Part", "been added to Fishbowl Inventory. Please contact your Fishbowl Inventory", "elif code == \"1002\": value = \"Connection to Fishbowl Server", "\"2303\": value = \"Was not able to save Tag number", "value = \"Success!\" elif code == \"1001\": value = \"Unknown", "\"Can not adjust committed inventory.\" elif code == \"2300\": value", "by the Fishbowl Inventory Administrator.\" elif code == \"1120\": value", "is invalid.\" elif code == \"3100\": value = \"Vendor {0}", "== \"2201\": value = \"Commit failed.\" elif code == \"2202\":", "not able to find the Tag number {0}.\" elif code", "value = \"Invalid Username or Password.\" elif code == \"1130\":", "\"2300\": value = \"Was not able to find the Tag", "to find the Part {0}.\" elif code == \"2001\": value", "code == \"2510\": value = \"Serial number is missing.\" elif", "product was invalid.\" elif code == \"2200\": value = \"The", "== \"1000\": value = \"Success!\" elif code == \"1001\": value", "the wrong type.\" elif code == \"2000\": value = \"Was", "code == \"2000\": value = \"Was not able to find", "database.\" elif code == \"1009\": value = \"Fishbowl Server has", "code == \"1001\": value = \"Unknown Message Received\" elif code", "quantity.\" elif code == \"2500\": value = \"The Tracking is", "code == \"2201\": value = \"Commit failed.\" elif code ==", "found.\" elif code == \"3001\": value = \"Customer is invalid.\"", "== \"1131\": value = \"Invalid Key value.\" elif code ==", "= \"Custom Field is invalid.\" elif code == \"1500\": value", "option in the purchase order module options.\" else: value =", "== \"3101\": value = \"Vendor is invalid.\" elif code ==", "written to.\" elif code == \"1505\": value = \"The import", "added to Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator", "\"Serial number is null.\" elif code == \"2512\": value =", "\"2512\": value = \"Serial number is duplicate.\" elif code ==", "code == \"1112\": value = \"This Integrated Application has not", "that helpful...\" elif code == \"1004\": value = \"There was", "number. Please turn on the auto-assign PO number option in", "value = \"Some Requests had errors -- now isn't that", "was invalid.\" elif code == \"2200\": value = \"The yield", "== \"2401\": value = \"UOM {0} not found.\" elif code", "{0} is a location.\" elif code == \"2400\": value =", "code == \"3101\": value = \"Vendor is invalid.\" elif code", "\"2301\": value = \"The tag is invalid.\" elif code ==", "committed inventory.\" elif code == \"2300\": value = \"Was not", "to save Tag number {0}.\" elif code == \"2304\": value", "\"Add initial inventory failed.\" elif code == \"2203\": value =", "value = \"You have been logged off the server by", "value = \"Was not able to find the Product {0}.\"", "Application.\" elif code == \"1111\": value = \"This Integrated Application", "\"Success!\" elif code == \"1001\": value = \"Unknown Message Received\"", "# -*- coding: utf-8 -*- def getstatus(code): if code ==", "= \"Response was invalid.\" elif code == \"1162\": value =", "to approve this Integrated Application.\" elif code == \"1111\": value", "a PO number. Please turn on the auto-assign PO number", "able to find the Tag number {0}.\" elif code ==", "the purchase order module options.\" else: value = 'Unknown status'", "code == \"1502\": value = \"File not found.\" elif code", "code == \"1004\": value = \"There was an error with", "elif code == \"2305\": value = \"Tag number {0} is", "== \"1010\": value = \"You have been logged off the", "number is null.\" elif code == \"2512\": value = \"Serial", "value = \"Serial number is missing.\" elif code == \"2511\":", "is null.\" elif code == \"2512\": value = \"Serial number", "= \"Location not found.\" elif code == \"2601\": value =", "elif code == \"1100\": value = \"Unknown login error occurred.\"", "occurred.\" elif code == \"1110\": value = \"A new Integrated", "value = \"Was not able to find the Part {0}.\"", "location.\" elif code == \"2400\": value = \"Invalid UOM.\" elif", "key does not match.\" elif code == \"1112\": value =", "= \"Vendor {0} not found.\" elif code == \"3101\": value", "class {0}.\" elif code == \"4004\": value = \"PO does", "== \"2100\": value = \"Was not able to find the", "the Fishbowl Inventory Administrator.\" elif code == \"1120\": value =", "elif code == \"2200\": value = \"The yield failed.\" elif", "code == \"1500\": value = \"The import was not properly", "== \"1502\": value = \"File not found.\" elif code ==", "the auto-assign PO number option in the purchase order module", "load PO {0}.\" elif code == \"4001\": value = \"Unknow", "{0} not found.\" elif code == \"2402\": value = \"Integer", "\"The import was not properly formed.\" elif code == \"1501\":", "is not correct type.\" elif code == \"1150\": value =", "not found.\" elif code == \"3001\": value = \"Customer is", "number {0}.\" elif code == \"2301\": value = \"The tag", "code == \"1504\": value = \"File could not be written", "\"3001\": value = \"Customer is invalid.\" elif code == \"3100\":", "\"Customer is invalid.\" elif code == \"3100\": value = \"Vendor", "== \"2600\": value = \"Location not found.\" elif code ==", "elif code == \"4003\": value = \"Unknown QuickBooks class {0}.\"", "= \"The tag move failed.\" elif code == \"2303\": value", "{0} not found.\" elif code == \"3101\": value = \"Vendor", "\"That import type is not supported\" elif code == \"1502\":", "== \"2203\": value = \"Can not adjust committed inventory.\" elif", "status {0}.\" elif code == \"4002\": value = \"Unknown carrier", "= \"Connection to Fishbowl Server was lost\" elif code ==", "passed to Fishbowl Inventory Server.\" elif code == \"1131\": value", "\"The Tracking is not valid.\" elif code == \"2510\": value", "\"Invalid Key value.\" elif code == \"1140\": value = \"Initialization", "failed.\" elif code == \"2303\": value = \"Was not able", "== \"1003\": value = \"Some Requests had errors -- now", "Key value.\" elif code == \"1140\": value = \"Initialization token", "Please contact your Fishbowl Inventory Administrator to approve this Integrated", "\"1503\": value = \"That export type is not supported.\" elif", "\"Unknown login error occurred.\" elif code == \"1110\": value =", "number is missing.\" elif code == \"2511\": value = \"Serial", "\"A new Integrated Application has been added to Fishbowl Inventory.", "supported\" elif code == \"1502\": value = \"File not found.\"", "code == \"1130\": value = \"Invalid Ticket passed to Fishbowl", "value = \"The Tracking is not valid.\" elif code ==", "= \"Invalid Username or Password.\" elif code == \"1130\": value", "= \"Unknown QuickBooks class {0}.\" elif code == \"4004\": value", "this Integrated Application.\" elif code == \"1111\": value = \"This", "== \"1505\": value = \"The import data was of the", "Tagnumber {0}.\" elif code == \"2305\": value = \"Tag number", "login error occurred.\" elif code == \"1110\": value = \"A", "{0}.\" elif code == \"2001\": value = \"The part was", "elif code == \"4004\": value = \"PO does not have", "code == \"1010\": value = \"You have been logged off", "error with the database.\" elif code == \"1009\": value =", "Administrator to approve this Integrated Application.\" elif code == \"1111\":", "code == \"2200\": value = \"The yield failed.\" elif code", "yield failed.\" elif code == \"2201\": value = \"Commit failed.\"", "inventory in Tagnumber {0}.\" elif code == \"2305\": value =", "code == \"1110\": value = \"A new Integrated Application has", "-*- def getstatus(code): if code == \"1000\": value = \"Success!\"", "= \"Unknown Message Received\" elif code == \"1002\": value =", "inventory.\" elif code == \"2300\": value = \"Was not able", "== \"1100\": value = \"Unknown login error occurred.\" elif code", "== \"2512\": value = \"Serial number is duplicate.\" elif code", "Field is invalid.\" elif code == \"1500\": value = \"The", "elif code == \"1162\": value = \"The login limit has", "elif code == \"2302\": value = \"The tag move failed.\"", "not correct type.\" elif code == \"1150\": value = \"Request", "= \"Success!\" elif code == \"1001\": value = \"Unknown Message", "= \"Invalid UOM.\" elif code == \"2401\": value = \"UOM", "== \"3000\": value = \"Customer {0} not found.\" elif code", "location.\" elif code == \"2602\": value = \"Location Group {0}", "\"You have been logged off the server by an administrator.\"", "number {0}.\" elif code == \"2304\": value = \"Not enough", "\"2401\": value = \"UOM {0} not found.\" elif code ==", "in the purchase order module options.\" else: value = 'Unknown", "\"This Integrated Application has not been approved by the Fishbowl", "value = \"The import data was of the wrong type.\"", "value = \"Request was invalid\" elif code == \"1160\": value", "elif code == \"1111\": value = \"This Integrated Application registration", "able to save Tag number {0}.\" elif code == \"2304\":", "\"1003\": value = \"Some Requests had errors -- now isn't", "error occurred.\" elif code == \"1110\": value = \"A new", "\"2400\": value = \"Invalid UOM.\" elif code == \"2401\": value", "= \"Serial number is duplicate.\" elif code == \"2513\": value", "import was not properly formed.\" elif code == \"1501\": value", "= \"The import was not properly formed.\" elif code ==", "to find the Product {0}.\" elif code == \"2101\": value", "code == \"2304\": value = \"Not enough available inventory in", "\"That export type is not supported.\" elif code == \"1504\":", "\"Fishbowl Server has been shut down.\" elif code == \"1010\":", "able to find the Product {0}.\" elif code == \"2101\":", "code == \"2305\": value = \"Tag number {0} is a", "token is not correct type.\" elif code == \"1150\": value", "value.\" elif code == \"1140\": value = \"Initialization token is", "elif code == \"2304\": value = \"Not enough available inventory", "not supported\" elif code == \"1502\": value = \"File not", "== \"4000\": value = \"There was an error load PO", "\"4001\": value = \"Unknow status {0}.\" elif code == \"4002\":", "= \"The Tracking is not valid.\" elif code == \"2510\":", "value = \"Unknown Message Received\" elif code == \"1002\": value", "value = \"Response was invalid.\" elif code == \"1162\": value", "elif code == \"1501\": value = \"That import type is", "== \"2400\": value = \"Invalid UOM.\" elif code == \"2401\":", "= \"This Integrated Application has not been approved by the", "limit has been reached for the server's key.\" elif code", "valid.\" elif code == \"2600\": value = \"Location not found.\"", "code == \"2500\": value = \"The Tracking is not valid.\"", "down.\" elif code == \"1010\": value = \"You have been", "not able to save Tag number {0}.\" elif code ==", "== \"2101\": value = \"The product was invalid.\" elif code", "\"3100\": value = \"Vendor {0} not found.\" elif code ==", "Fishbowl Inventory Administrator.\" elif code == \"1120\": value = \"Invalid", "invalid\" elif code == \"1160\": value = \"Response was invalid.\"", "== \"2000\": value = \"Was not able to find the", "code == \"4002\": value = \"Unknown carrier {0}.\" elif code", "not have a PO number. Please turn on the auto-assign", "has been added to Fishbowl Inventory. Please contact your Fishbowl", "elif code == \"4000\": value = \"There was an error", "\"The login limit has been reached for the server's key.\"", "= \"Serial number is null.\" elif code == \"2512\": value", "does not match.\" elif code == \"1112\": value = \"This", "invalid.\" elif code == \"3100\": value = \"Vendor {0} not", "elif code == \"1505\": value = \"The import data was", "code == \"3001\": value = \"Customer is invalid.\" elif code", "= \"Not enough available inventory in Tagnumber {0}.\" elif code", "= \"Vendor is invalid.\" elif code == \"4000\": value =", "an administrator.\" elif code == \"1012\": value = \"Unknown request", "errors -- now isn't that helpful...\" elif code == \"1004\":", "{0}.\" elif code == \"4001\": value = \"Unknow status {0}.\"", "= \"Serial number is missing.\" elif code == \"2511\": value", "\"1012\": value = \"Unknown request function.\" elif code == \"1100\":", "Inventory Server.\" elif code == \"1131\": value = \"Invalid Key", "wrong type.\" elif code == \"2000\": value = \"Was not", "\"1000\": value = \"Success!\" elif code == \"1001\": value =", "login limit has been reached for the server's key.\" elif", "\"The part was invalid.\" elif code == \"2100\": value =", "invalid.\" elif code == \"2100\": value = \"Was not able", "{0}.\" elif code == \"4002\": value = \"Unknown carrier {0}.\"", "= \"File could not be written to.\" elif code ==", "is not valid.\" elif code == \"2600\": value = \"Location", "value = \"The part was invalid.\" elif code == \"2100\":", "= \"Unknown request function.\" elif code == \"1100\": value =", "= \"The product was invalid.\" elif code == \"2200\": value", "elif code == \"4002\": value = \"Unknown carrier {0}.\" elif", "administrator.\" elif code == \"1012\": value = \"Unknown request function.\"", "Application has been added to Fishbowl Inventory. Please contact your", "code == \"2301\": value = \"The tag is invalid.\" elif", "value = \"Fishbowl Server has been shut down.\" elif code", "\"File not found.\" elif code == \"1503\": value = \"That", "\"Not enough available inventory in Tagnumber {0}.\" elif code ==", "found.\" elif code == \"3101\": value = \"Vendor is invalid.\"", "= \"Some Requests had errors -- now isn't that helpful...\"", "= \"Invalid Ticket passed to Fishbowl Inventory Server.\" elif code", "elif code == \"3101\": value = \"Vendor is invalid.\" elif", "code == \"1200\": value = \"Custom Field is invalid.\" elif", "\"4004\": value = \"PO does not have a PO number.", "\"1001\": value = \"Unknown Message Received\" elif code == \"1002\":", "getstatus(code): if code == \"1000\": value = \"Success!\" elif code", "type is not supported\" elif code == \"1502\": value =", "== \"1504\": value = \"File could not be written to.\"", "PO number option in the purchase order module options.\" else:", "\"2203\": value = \"Can not adjust committed inventory.\" elif code", "elif code == \"2300\": value = \"Was not able to", "code == \"1012\": value = \"Unknown request function.\" elif code", "not found.\" elif code == \"2402\": value = \"Integer UOM", "found.\" elif code == \"2402\": value = \"Integer UOM {0}", "save Tag number {0}.\" elif code == \"2304\": value =", "value = \"That export type is not supported.\" elif code", "elif code == \"1200\": value = \"Custom Field is invalid.\"", "= \"The tag is invalid.\" elif code == \"2302\": value", "\"Invalid location.\" elif code == \"2602\": value = \"Location Group", "= \"Location Group {0} not found.\" elif code == \"3000\":", "of the wrong type.\" elif code == \"2000\": value =", "value = \"The yield failed.\" elif code == \"2201\": value", "= \"Tag number {0} is a location.\" elif code ==", "\"Invalid Ticket passed to Fishbowl Inventory Server.\" elif code ==", "== \"1150\": value = \"Request was invalid\" elif code ==", "elif code == \"1502\": value = \"File not found.\" elif", "not able to find the Part {0}.\" elif code ==", "\"Response was invalid.\" elif code == \"1162\": value = \"The", "found.\" elif code == \"3000\": value = \"Customer {0} not", "\"1160\": value = \"Response was invalid.\" elif code == \"1162\":", "not match.\" elif code == \"1112\": value = \"This Integrated", "value = \"Customer is invalid.\" elif code == \"3100\": value", "== \"2500\": value = \"The Tracking is not valid.\" elif", "value = \"Unknown login error occurred.\" elif code == \"1110\":", "not supported.\" elif code == \"1504\": value = \"File could", "was invalid.\" elif code == \"1162\": value = \"The login", "off the server by an administrator.\" elif code == \"1012\":", "code == \"2300\": value = \"Was not able to find", "move failed.\" elif code == \"2303\": value = \"Was not", "your Fishbowl Inventory Administrator to approve this Integrated Application.\" elif", "value = \"Initialization token is not correct type.\" elif code", "\"Initialization token is not correct type.\" elif code == \"1150\":", "Fishbowl Inventory Server.\" elif code == \"1131\": value = \"Invalid", "== \"1120\": value = \"Invalid Username or Password.\" elif code", "\"Serial number is duplicate.\" elif code == \"2513\": value =", "\"2302\": value = \"The tag move failed.\" elif code ==", "\"2511\": value = \"Serial number is null.\" elif code ==", "elif code == \"2201\": value = \"Commit failed.\" elif code", "code == \"1501\": value = \"That import type is not", "approve this Integrated Application.\" elif code == \"1111\": value =", "== \"2402\": value = \"Integer UOM {0} cannot have non-integer", "been approved by the Fishbowl Inventory Administrator.\" elif code ==", "\"Invalid Username or Password.\" elif code == \"1130\": value =", "Part {0}.\" elif code == \"2001\": value = \"The part", "\"Was not able to find the Product {0}.\" elif code", "approved by the Fishbowl Inventory Administrator.\" elif code == \"1120\":", "== \"2305\": value = \"Tag number {0} is a location.\"", "import data was of the wrong type.\" elif code ==", "{0}.\" elif code == \"4004\": value = \"PO does not", "code == \"2202\": value = \"Add initial inventory failed.\" elif", "\"Vendor is invalid.\" elif code == \"4000\": value = \"There", "Requests had errors -- now isn't that helpful...\" elif code", "elif code == \"1001\": value = \"Unknown Message Received\" elif", "valid.\" elif code == \"2510\": value = \"Serial number is", "value = \"Unknow status {0}.\" elif code == \"4002\": value", "\"Was not able to save Tag number {0}.\" elif code", "elif code == \"2301\": value = \"The tag is invalid.\"", "coding: utf-8 -*- def getstatus(code): if code == \"1000\": value", "code == \"2302\": value = \"The tag move failed.\" elif", "\"2602\": value = \"Location Group {0} not found.\" elif code", "code == \"2100\": value = \"Was not able to find", "code == \"2602\": value = \"Location Group {0} not found.\"", "\"Location Group {0} not found.\" elif code == \"3000\": value", "adjust committed inventory.\" elif code == \"2300\": value = \"Was", "initial inventory failed.\" elif code == \"2203\": value = \"Can", "== \"1162\": value = \"The login limit has been reached", "elif code == \"2400\": value = \"Invalid UOM.\" elif code", "= \"A new Integrated Application has been added to Fishbowl", "== \"1140\": value = \"Initialization token is not correct type.\"", "Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator to approve", "\"Was not able to find the Part {0}.\" elif code", "code == \"4001\": value = \"Unknow status {0}.\" elif code", "could not be written to.\" elif code == \"1505\": value", "invalid.\" elif code == \"1500\": value = \"The import was", "value = \"Invalid Ticket passed to Fishbowl Inventory Server.\" elif", "elif code == \"1012\": value = \"Unknown request function.\" elif", "= \"You have been logged off the server by an", "Administrator.\" elif code == \"1120\": value = \"Invalid Username or", "== \"4002\": value = \"Unknown carrier {0}.\" elif code ==", "elif code == \"2512\": value = \"Serial number is duplicate.\"", "for the server's key.\" elif code == \"1200\": value =", "elif code == \"1130\": value = \"Invalid Ticket passed to", "\"Location not found.\" elif code == \"2601\": value = \"Invalid", "the Product {0}.\" elif code == \"2101\": value = \"The", "= \"Invalid Key value.\" elif code == \"1140\": value =", "\"Unknown Message Received\" elif code == \"1002\": value = \"Connection", "value = \"The import was not properly formed.\" elif code", "is not supported.\" elif code == \"1504\": value = \"File", "= \"File not found.\" elif code == \"1503\": value =", "match.\" elif code == \"1112\": value = \"This Integrated Application", "== \"4004\": value = \"PO does not have a PO", "tag move failed.\" elif code == \"2303\": value = \"Was", "= \"Can not adjust committed inventory.\" elif code == \"2300\":", "= \"The yield failed.\" elif code == \"2201\": value =", "\"2201\": value = \"Commit failed.\" elif code == \"2202\": value", "with the database.\" elif code == \"1009\": value = \"Fishbowl", "\"PO does not have a PO number. Please turn on", "was invalid\" elif code == \"1160\": value = \"Response was", "\"2402\": value = \"Integer UOM {0} cannot have non-integer quantity.\"", "Integrated Application has not been approved by the Fishbowl Inventory", "to find the Tag number {0}.\" elif code == \"2301\":", "= \"Integer UOM {0} cannot have non-integer quantity.\" elif code", "Application has not been approved by the Fishbowl Inventory Administrator.\"", "value = \"Customer {0} not found.\" elif code == \"3001\":", "{0}.\" elif code == \"4003\": value = \"Unknown QuickBooks class", "\"Unknown request function.\" elif code == \"1100\": value = \"Unknown", "value = \"A new Integrated Application has been added to", "value = \"The product was invalid.\" elif code == \"2200\":", "{0}.\" elif code == \"2305\": value = \"Tag number {0}", "elif code == \"2602\": value = \"Location Group {0} not", "order module options.\" else: value = 'Unknown status' return value", "code == \"1100\": value = \"Unknown login error occurred.\" elif", "available inventory in Tagnumber {0}.\" elif code == \"2305\": value", "the server by an administrator.\" elif code == \"1012\": value", "elif code == \"1504\": value = \"File could not be", "\"Connection to Fishbowl Server was lost\" elif code == \"1003\":", "Integrated Application registration key does not match.\" elif code ==", "== \"2302\": value = \"The tag move failed.\" elif code", "Fishbowl Inventory Administrator to approve this Integrated Application.\" elif code", "== \"1012\": value = \"Unknown request function.\" elif code ==", "= \"The part was invalid.\" elif code == \"2100\": value", "{0} cannot have non-integer quantity.\" elif code == \"2500\": value", "not valid.\" elif code == \"2510\": value = \"Serial number", "Group {0} not found.\" elif code == \"3000\": value =", "value = \"Unknown request function.\" elif code == \"1100\": value", "the server's key.\" elif code == \"1200\": value = \"Custom", "\"2101\": value = \"The product was invalid.\" elif code ==", "Received\" elif code == \"1002\": value = \"Connection to Fishbowl", "#!/usr/bin/python # -*- coding: utf-8 -*- def getstatus(code): if code", "elif code == \"1500\": value = \"The import was not", "== \"2001\": value = \"The part was invalid.\" elif code", "cannot have non-integer quantity.\" elif code == \"2500\": value =", "= \"There was an error load PO {0}.\" elif code", "was invalid.\" elif code == \"2100\": value = \"Was not", "an error load PO {0}.\" elif code == \"4001\": value", "\"2500\": value = \"The Tracking is not valid.\" elif code", "\"1100\": value = \"Unknown login error occurred.\" elif code ==", "\"Customer {0} not found.\" elif code == \"3001\": value =", "be written to.\" elif code == \"1505\": value = \"The", "== \"1110\": value = \"A new Integrated Application has been", "have a PO number. Please turn on the auto-assign PO", "not found.\" elif code == \"3000\": value = \"Customer {0}", "turn on the auto-assign PO number option in the purchase", "\"The import data was of the wrong type.\" elif code", "= \"PO does not have a PO number. Please turn", "\"UOM {0} not found.\" elif code == \"2402\": value =", "value = \"Unknown carrier {0}.\" elif code == \"4003\": value", "\"The tag is invalid.\" elif code == \"2302\": value =", "= \"Serial number is not valid.\" elif code == \"2600\":", "elif code == \"2510\": value = \"Serial number is missing.\"", "correct type.\" elif code == \"1150\": value = \"Request was", "Integrated Application.\" elif code == \"1111\": value = \"This Integrated", "\"1120\": value = \"Invalid Username or Password.\" elif code ==", "\"The tag move failed.\" elif code == \"2303\": value =", "elif code == \"2000\": value = \"Was not able to", "import type is not supported\" elif code == \"1502\": value", "\"1009\": value = \"Fishbowl Server has been shut down.\" elif", "== \"1500\": value = \"The import was not properly formed.\"", "was an error with the database.\" elif code == \"1009\":", "\"Some Requests had errors -- now isn't that helpful...\" elif", "elif code == \"2203\": value = \"Can not adjust committed", "value = \"Was not able to find the Tag number", "not adjust committed inventory.\" elif code == \"2300\": value =", "= \"Was not able to find the Product {0}.\" elif", "code == \"2600\": value = \"Location not found.\" elif code", "\"Invalid UOM.\" elif code == \"2401\": value = \"UOM {0}", "is missing.\" elif code == \"2511\": value = \"Serial number", "code == \"4003\": value = \"Unknown QuickBooks class {0}.\" elif", "elif code == \"2401\": value = \"UOM {0} not found.\"", "== \"2202\": value = \"Add initial inventory failed.\" elif code", "Ticket passed to Fishbowl Inventory Server.\" elif code == \"1131\":", "found.\" elif code == \"1503\": value = \"That export type", "code == \"1503\": value = \"That export type is not", "elif code == \"2101\": value = \"The product was invalid.\"", "code == \"1120\": value = \"Invalid Username or Password.\" elif", "value = \"Invalid UOM.\" elif code == \"2401\": value =", "Please turn on the auto-assign PO number option in the", "value = \"Add initial inventory failed.\" elif code == \"2203\":", "\"1500\": value = \"The import was not properly formed.\" elif", "-*- coding: utf-8 -*- def getstatus(code): if code == \"1000\":", "code == \"2402\": value = \"Integer UOM {0} cannot have", "code == \"2513\": value = \"Serial number is not valid.\"", "== \"4001\": value = \"Unknow status {0}.\" elif code ==", "value = \"Tag number {0} is a location.\" elif code", "= \"Was not able to save Tag number {0}.\" elif", "elif code == \"1009\": value = \"Fishbowl Server has been", "code == \"4000\": value = \"There was an error load", "\"Unknown carrier {0}.\" elif code == \"4003\": value = \"Unknown", "== \"2301\": value = \"The tag is invalid.\" elif code", "== \"2304\": value = \"Not enough available inventory in Tagnumber", "= \"Customer {0} not found.\" elif code == \"3001\": value", "the Part {0}.\" elif code == \"2001\": value = \"The", "supported.\" elif code == \"1504\": value = \"File could not", "== \"2511\": value = \"Serial number is null.\" elif code", "code == \"3100\": value = \"Vendor {0} not found.\" elif", "value = \"Location not found.\" elif code == \"2601\": value", "\"Serial number is not valid.\" elif code == \"2600\": value", "code == \"2601\": value = \"Invalid location.\" elif code ==", "have non-integer quantity.\" elif code == \"2500\": value = \"The", "\"2001\": value = \"The part was invalid.\" elif code ==", "is invalid.\" elif code == \"2302\": value = \"The tag", "\"1131\": value = \"Invalid Key value.\" elif code == \"1140\":", "\"2200\": value = \"The yield failed.\" elif code == \"2201\":", "missing.\" elif code == \"2511\": value = \"Serial number is", "is a location.\" elif code == \"2400\": value = \"Invalid", "value = \"The tag is invalid.\" elif code == \"2302\":", "elif code == \"1503\": value = \"That export type is", "duplicate.\" elif code == \"2513\": value = \"Serial number is", "= \"Invalid location.\" elif code == \"2602\": value = \"Location", "\"Vendor {0} not found.\" elif code == \"3101\": value =", "elif code == \"2202\": value = \"Add initial inventory failed.\"", "elif code == \"1140\": value = \"Initialization token is not", "\"Request was invalid\" elif code == \"1160\": value = \"Response", "\"2305\": value = \"Tag number {0} is a location.\" elif", "elif code == \"3000\": value = \"Customer {0} not found.\"", "error load PO {0}.\" elif code == \"4001\": value =", "code == \"2400\": value = \"Invalid UOM.\" elif code ==", "new Integrated Application has been added to Fishbowl Inventory. Please", "\"There was an error with the database.\" elif code ==", "code == \"1140\": value = \"Initialization token is not correct", "PO number. Please turn on the auto-assign PO number option", "\"1504\": value = \"File could not be written to.\" elif", "value = \"This Integrated Application has not been approved by", "Server was lost\" elif code == \"1003\": value = \"Some", "number option in the purchase order module options.\" else: value", "= \"Unknown carrier {0}.\" elif code == \"4003\": value =", "was not properly formed.\" elif code == \"1501\": value =", "Message Received\" elif code == \"1002\": value = \"Connection to", "value = \"Location Group {0} not found.\" elif code ==", "value = \"That import type is not supported\" elif code", "purchase order module options.\" else: value = 'Unknown status' return", "to Fishbowl Server was lost\" elif code == \"1003\": value", "server's key.\" elif code == \"1200\": value = \"Custom Field", "has not been approved by the Fishbowl Inventory Administrator.\" elif", "code == \"1160\": value = \"Response was invalid.\" elif code", "\"1162\": value = \"The login limit has been reached for", "was lost\" elif code == \"1003\": value = \"Some Requests", "value = \"Serial number is null.\" elif code == \"2512\":", "Fishbowl Server was lost\" elif code == \"1003\": value =", "== \"2602\": value = \"Location Group {0} not found.\" elif", "\"Unknown QuickBooks class {0}.\" elif code == \"4004\": value =", "type is not supported.\" elif code == \"1504\": value =", "== \"2510\": value = \"Serial number is missing.\" elif code", "elif code == \"1003\": value = \"Some Requests had errors", "utf-8 -*- def getstatus(code): if code == \"1000\": value =", "not found.\" elif code == \"2601\": value = \"Invalid location.\"", "Password.\" elif code == \"1130\": value = \"Invalid Ticket passed", "been shut down.\" elif code == \"1010\": value = \"You", "shut down.\" elif code == \"1010\": value = \"You have", "find the Part {0}.\" elif code == \"2001\": value =", "\"2510\": value = \"Serial number is missing.\" elif code ==", "is duplicate.\" elif code == \"2513\": value = \"Serial number", "is invalid.\" elif code == \"1500\": value = \"The import", "elif code == \"1112\": value = \"This Integrated Application has", "or Password.\" elif code == \"1130\": value = \"Invalid Ticket", "export type is not supported.\" elif code == \"1504\": value", "code == \"1505\": value = \"The import data was of", "== \"1503\": value = \"That export type is not supported.\"", "= \"Request was invalid\" elif code == \"1160\": value =", "UOM.\" elif code == \"2401\": value = \"UOM {0} not", "\"1111\": value = \"This Integrated Application registration key does not", "null.\" elif code == \"2512\": value = \"Serial number is", "has been reached for the server's key.\" elif code ==", "is not supported\" elif code == \"1502\": value = \"File", "Inventory Administrator to approve this Integrated Application.\" elif code ==", "auto-assign PO number option in the purchase order module options.\"", "\"2100\": value = \"Was not able to find the Product", "elif code == \"2402\": value = \"Integer UOM {0} cannot", "tag is invalid.\" elif code == \"2302\": value = \"The", "number is duplicate.\" elif code == \"2513\": value = \"Serial", "elif code == \"1120\": value = \"Invalid Username or Password.\"", "value = \"Was not able to save Tag number {0}.\"", "= \"This Integrated Application registration key does not match.\" elif", "failed.\" elif code == \"2203\": value = \"Can not adjust", "Tag number {0}.\" elif code == \"2304\": value = \"Not", "QuickBooks class {0}.\" elif code == \"4004\": value = \"PO", "= \"The login limit has been reached for the server's", "number {0} is a location.\" elif code == \"2400\": value", "not valid.\" elif code == \"2600\": value = \"Location not", "{0}.\" elif code == \"2301\": value = \"The tag is", "value = \"Vendor is invalid.\" elif code == \"4000\": value", "\"1505\": value = \"The import data was of the wrong", "Username or Password.\" elif code == \"1130\": value = \"Invalid", "to Fishbowl Inventory Server.\" elif code == \"1131\": value =", "type.\" elif code == \"2000\": value = \"Was not able", "isn't that helpful...\" elif code == \"1004\": value = \"There", "value = \"PO does not have a PO number. Please", "elif code == \"1010\": value = \"You have been logged", "= \"Was not able to find the Tag number {0}.\"", "enough available inventory in Tagnumber {0}.\" elif code == \"2305\":", "elif code == \"2303\": value = \"Was not able to", "value = \"The login limit has been reached for the", "not be written to.\" elif code == \"1505\": value =", "elif code == \"1004\": value = \"There was an error", "had errors -- now isn't that helpful...\" elif code ==", "find the Tag number {0}.\" elif code == \"2301\": value", "code == \"2512\": value = \"Serial number is duplicate.\" elif", "{0}.\" elif code == \"2101\": value = \"The product was", "Server.\" elif code == \"1131\": value = \"Invalid Key value.\"", "key.\" elif code == \"1200\": value = \"Custom Field is", "== \"3100\": value = \"Vendor {0} not found.\" elif code", "logged off the server by an administrator.\" elif code ==", "value = \"Can not adjust committed inventory.\" elif code ==", "\"This Integrated Application registration key does not match.\" elif code", "elif code == \"2001\": value = \"The part was invalid.\"", "UOM {0} cannot have non-integer quantity.\" elif code == \"2500\":", "code == \"4004\": value = \"PO does not have a", "on the auto-assign PO number option in the purchase order", "== \"1501\": value = \"That import type is not supported\"", "to.\" elif code == \"1505\": value = \"The import data", "invalid.\" elif code == \"2200\": value = \"The yield failed.\"", "failed.\" elif code == \"2201\": value = \"Commit failed.\" elif", "= \"That import type is not supported\" elif code ==", "value = \"Connection to Fishbowl Server was lost\" elif code", "value = \"File not found.\" elif code == \"1503\": value", "an error with the database.\" elif code == \"1009\": value", "value = \"UOM {0} not found.\" elif code == \"2402\":", "non-integer quantity.\" elif code == \"2500\": value = \"The Tracking", "= \"Add initial inventory failed.\" elif code == \"2203\": value", "== \"1111\": value = \"This Integrated Application registration key does", "code == \"1131\": value = \"Invalid Key value.\" elif code", "find the Product {0}.\" elif code == \"2101\": value =", "code == \"1002\": value = \"Connection to Fishbowl Server was", "\"Commit failed.\" elif code == \"2202\": value = \"Add initial", "\"1130\": value = \"Invalid Ticket passed to Fishbowl Inventory Server.\"", "elif code == \"2511\": value = \"Serial number is null.\"", "code == \"1162\": value = \"The login limit has been", "code == \"2401\": value = \"UOM {0} not found.\" elif", "formed.\" elif code == \"1501\": value = \"That import type", "failed.\" elif code == \"2202\": value = \"Add initial inventory", "\"1112\": value = \"This Integrated Application has not been approved", "\"1002\": value = \"Connection to Fishbowl Server was lost\" elif", "value = \"The tag move failed.\" elif code == \"2303\":", "\"Integer UOM {0} cannot have non-integer quantity.\" elif code ==", "{0} not found.\" elif code == \"3000\": value = \"Customer", "elif code == \"2100\": value = \"Was not able to", "by an administrator.\" elif code == \"1012\": value = \"Unknown", "== \"2300\": value = \"Was not able to find the", "\"3000\": value = \"Customer {0} not found.\" elif code ==", "= \"Customer is invalid.\" elif code == \"3100\": value =", "\"2202\": value = \"Add initial inventory failed.\" elif code ==", "elif code == \"2600\": value = \"Location not found.\" elif", "== \"1001\": value = \"Unknown Message Received\" elif code ==", "able to find the Part {0}.\" elif code == \"2001\":", "invalid.\" elif code == \"4000\": value = \"There was an", "value = \"Custom Field is invalid.\" elif code == \"1500\":", "elif code == \"4001\": value = \"Unknow status {0}.\" elif", "value = \"Commit failed.\" elif code == \"2202\": value =", "value = \"Serial number is duplicate.\" elif code == \"2513\":", "== \"1160\": value = \"Response was invalid.\" elif code ==", "elif code == \"1150\": value = \"Request was invalid\" elif", "\"Tag number {0} is a location.\" elif code == \"2400\":", "elif code == \"3100\": value = \"Vendor {0} not found.\"", "has been shut down.\" elif code == \"1010\": value =", "not found.\" elif code == \"1503\": value = \"That export", "part was invalid.\" elif code == \"2100\": value = \"Was", "\"4000\": value = \"There was an error load PO {0}.\"", "properly formed.\" elif code == \"1501\": value = \"That import", "code == \"2101\": value = \"The product was invalid.\" elif", "code == \"1009\": value = \"Fishbowl Server has been shut", "helpful...\" elif code == \"1004\": value = \"There was an", "value = \"File could not be written to.\" elif code", "def getstatus(code): if code == \"1000\": value = \"Success!\" elif", "server by an administrator.\" elif code == \"1012\": value =", "\"Unknow status {0}.\" elif code == \"4002\": value = \"Unknown", "\"4002\": value = \"Unknown carrier {0}.\" elif code == \"4003\":", "code == \"1111\": value = \"This Integrated Application registration key", "elif code == \"2500\": value = \"The Tracking is not", "= \"Fishbowl Server has been shut down.\" elif code ==", "= \"Unknow status {0}.\" elif code == \"4002\": value =", "== \"1200\": value = \"Custom Field is invalid.\" elif code", "not properly formed.\" elif code == \"1501\": value = \"That", "invalid.\" elif code == \"1162\": value = \"The login limit", "\"The yield failed.\" elif code == \"2201\": value = \"Commit", "Tag number {0}.\" elif code == \"2301\": value = \"The", "\"Serial number is missing.\" elif code == \"2511\": value =", "code == \"3000\": value = \"Customer {0} not found.\" elif", "code == \"2203\": value = \"Can not adjust committed inventory.\"", "invalid.\" elif code == \"2302\": value = \"The tag move", "value = \"Integer UOM {0} cannot have non-integer quantity.\" elif", "\"1501\": value = \"That import type is not supported\" elif", "\"3101\": value = \"Vendor is invalid.\" elif code == \"4000\":", "Inventory. Please contact your Fishbowl Inventory Administrator to approve this", "registration key does not match.\" elif code == \"1112\": value", "is invalid.\" elif code == \"4000\": value = \"There was", "\"2600\": value = \"Location not found.\" elif code == \"2601\":", "value = \"Invalid Key value.\" elif code == \"1140\": value", "== \"1002\": value = \"Connection to Fishbowl Server was lost\"", "to Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator to", "= \"UOM {0} not found.\" elif code == \"2402\": value", "not able to find the Product {0}.\" elif code ==", "Product {0}.\" elif code == \"2101\": value = \"The product", "not found.\" elif code == \"3101\": value = \"Vendor is", "\"2601\": value = \"Invalid location.\" elif code == \"2602\": value", "{0}.\" elif code == \"2304\": value = \"Not enough available", "code == \"1003\": value = \"Some Requests had errors --", "lost\" elif code == \"1003\": value = \"Some Requests had", "\"2304\": value = \"Not enough available inventory in Tagnumber {0}.\"", "code == \"2511\": value = \"Serial number is null.\" elif", "\"Custom Field is invalid.\" elif code == \"1500\": value =", "== \"2303\": value = \"Was not able to save Tag", "value = \"Vendor {0} not found.\" elif code == \"3101\":", "\"1110\": value = \"A new Integrated Application has been added", "function.\" elif code == \"1100\": value = \"Unknown login error", "== \"4003\": value = \"Unknown QuickBooks class {0}.\" elif code", "= \"That export type is not supported.\" elif code ==", "value = \"Serial number is not valid.\" elif code ==", "code == \"1150\": value = \"Request was invalid\" elif code", "-- now isn't that helpful...\" elif code == \"1004\": value", "the database.\" elif code == \"1009\": value = \"Fishbowl Server", "elif code == \"1160\": value = \"Response was invalid.\" elif", "\"1200\": value = \"Custom Field is invalid.\" elif code ==", "value = \"Unknown QuickBooks class {0}.\" elif code == \"4004\":", "carrier {0}.\" elif code == \"4003\": value = \"Unknown QuickBooks", "= \"The import data was of the wrong type.\" elif", "does not have a PO number. Please turn on the", "= \"Unknown login error occurred.\" elif code == \"1110\": value", "code == \"2001\": value = \"The part was invalid.\" elif", "Integrated Application has been added to Fishbowl Inventory. Please contact", "== \"2601\": value = \"Invalid location.\" elif code == \"2602\":", "\"The product was invalid.\" elif code == \"2200\": value =", "if code == \"1000\": value = \"Success!\" elif code ==", "code == \"2303\": value = \"Was not able to save", "\"1502\": value = \"File not found.\" elif code == \"1503\":", "type.\" elif code == \"1150\": value = \"Request was invalid\"", "== \"1112\": value = \"This Integrated Application has not been", "\"There was an error load PO {0}.\" elif code ==", "= \"There was an error with the database.\" elif code", "\"4003\": value = \"Unknown QuickBooks class {0}.\" elif code ==", "== \"2200\": value = \"The yield failed.\" elif code ==", "elif code == \"1131\": value = \"Invalid Key value.\" elif" ]
[ "#!/usr/bin/python # -*- coding: utf-8 -*- # [Import start] from", "[Import start] from flask import Blueprint, jsonify # [Import end]", "[Import end] app = Blueprint( 'hoge', __name__, url_prefix='/hoge' ) @app.route('/test')", "start] from flask import Blueprint, jsonify # [Import end] app", "# [Import end] app = Blueprint( 'hoge', __name__, url_prefix='/hoge' )", "app = Blueprint( 'hoge', __name__, url_prefix='/hoge' ) @app.route('/test') def hoge():", "Blueprint, jsonify # [Import end] app = Blueprint( 'hoge', __name__,", "-*- # [Import start] from flask import Blueprint, jsonify #", "flask import Blueprint, jsonify # [Import end] app = Blueprint(", "utf-8 -*- # [Import start] from flask import Blueprint, jsonify", "import Blueprint, jsonify # [Import end] app = Blueprint( 'hoge',", "from flask import Blueprint, jsonify # [Import end] app =", "end] app = Blueprint( 'hoge', __name__, url_prefix='/hoge' ) @app.route('/test') def", "# [Import start] from flask import Blueprint, jsonify # [Import", "# -*- coding: utf-8 -*- # [Import start] from flask", "coding: utf-8 -*- # [Import start] from flask import Blueprint,", "= Blueprint( 'hoge', __name__, url_prefix='/hoge' ) @app.route('/test') def hoge(): return", "-*- coding: utf-8 -*- # [Import start] from flask import", "jsonify # [Import end] app = Blueprint( 'hoge', __name__, url_prefix='/hoge'", "Blueprint( 'hoge', __name__, url_prefix='/hoge' ) @app.route('/test') def hoge(): return \"\\nhogehoge\"" ]
[ "pkgname = i pkgver = None package_string = pkgname #", "if relationship_operator is not None: if relationship_operator in [\"<\", \">\"]:", "except KeyError: missing_packages += [package_string] continue # Get the list", "if (len(installed_pkg_versions) != 0) and (relationship_operator is None): continue #", "installed_version = i.current_ver.ver_str version_result = version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result, 0):", "installed_pkg_versions += [pkg] for i in pkg.provides_list: parent_pkg = i[2].parent_pkg", "# Build the package relationship string for use by 'apt-get", "False for i in installed_pkg_versions: installed_version = i.current_ver.ver_str version_result =", "i pkgver = None package_string = pkgname # Check if", "j else: relationship_operator_formatted = j package = i.split(relationship_operator) pkgname =", "Build the package relationship string for use by 'apt-get satisfy'.", "else: pkgname = i pkgver = None package_string = pkgname", "\"=\"]: if j in i: relationship_operator = j break if", "the package is in the cache. try: pkg = cache[pkgname]", "None): continue # Otherwise, check all matching installed packages and", "an installed package was found and no relationship operators were", "matching installed packages and see if any of them fit", "j in i: relationship_operator = j break if relationship_operator is", "for relationship operators. relation_operators = {\"<<\": lt, \"<=\": le, \"=\":", "if j in i: relationship_operator = j break if relationship_operator", "apt_pkg import CURSTATE_INSTALLED, version_compare from operator import lt, le, eq,", "for i in pkg.provides_list: parent_pkg = i[2].parent_pkg if parent_pkg.current_state ==", "and provided packages that are currently installed. installed_pkg_versions = []", "in installed_pkg_versions: installed_version = i.current_ver.ver_str version_result = version_compare(installed_version, pkgver) if", "pkgver = None package_string = pkgname # Check if the", "in the cache. try: pkg = cache[pkgname] except KeyError: missing_packages", "found and no relationship operators were used, the dependency has", "and (relationship_operator is None): continue # Otherwise, check all matching", "if relationship_operator in [\"<\", \">\"]: relationship_operator_formatted = j + j", "is in the cache. try: pkg = cache[pkgname] except KeyError:", "are currently installed. installed_pkg_versions = [] if pkg.current_state == CURSTATE_INSTALLED:", "== CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg] # If an installed package", "operators were used, the dependency has been satisfied. if (len(installed_pkg_versions)", "# Function mappings for relationship operators. relation_operators = {\"<<\": lt,", "None for j in [\"<=\", \">=\", \"<\", \">\", \"=\"]: if", "is not None: if relationship_operator in [\"<\", \">\"]: relationship_operator_formatted =", "of them fit the specified relationship operator. matched_pkg = False", "if pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [pkg] for i in", "for use by 'apt-get satisfy'. relationship_operator = None for j", "= None package_string = pkgname # Check if the package", "see if any of them fit the specified relationship operator.", "missing_packages += [package_string] continue # Get the list of installed", "gt # Function mappings for relationship operators. relation_operators = {\"<<\":", "= cache[pkgname] except KeyError: missing_packages += [package_string] continue # Get", "If an installed package was found and no relationship operators", "\">>\": gt} # Set up APT cache. apt_pkg.init() cache =", "\">\"]: relationship_operator_formatted = j + j else: relationship_operator_formatted = j", "if parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg] # If an", "check all matching installed packages and see if any of", "[] for i in sys.argv[1:]: # Build the package relationship", "= j package = i.split(relationship_operator) pkgname = package[0] pkgver =", "package was found and no relationship operators were used, the", "for i in sys.argv[1:]: # Build the package relationship string", "and no relationship operators were used, the dependency has been", "installed package was found and no relationship operators were used,", "try: pkg = cache[pkgname] except KeyError: missing_packages += [package_string] continue", "(len(installed_pkg_versions) != 0) and (relationship_operator is None): continue # Otherwise,", "le, eq, ge, gt # Function mappings for relationship operators.", "break if relationship_operator is not None: if relationship_operator in [\"<\",", "if any of them fit the specified relationship operator. matched_pkg", "i.current_ver.ver_str version_result = version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg =", "installed and provided packages that are currently installed. installed_pkg_versions =", "string for use by 'apt-get satisfy'. relationship_operator = None for", "lt, le, eq, ge, gt # Function mappings for relationship", "'apt-get satisfy'. relationship_operator = None for j in [\"<=\", \">=\",", "cache. try: pkg = cache[pkgname] except KeyError: missing_packages += [package_string]", "[\"<\", \">\"]: relationship_operator_formatted = j + j else: relationship_operator_formatted =", "# Get the list of installed and provided packages that", "# If an installed package was found and no relationship", "from operator import lt, le, eq, ge, gt # Function", "apt_pkg import sys from apt_pkg import CURSTATE_INSTALLED, version_compare from operator", "package_string = f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else: pkgname = i pkgver", "continue # Get the list of installed and provided packages", "version_compare from operator import lt, le, eq, ge, gt #", "!= 0) and (relationship_operator is None): continue # Otherwise, check", "for i in installed_pkg_versions: installed_version = i.current_ver.ver_str version_result = version_compare(installed_version,", "import lt, le, eq, ge, gt # Function mappings for", "Get the list of installed and provided packages that are", "import sys from apt_pkg import CURSTATE_INSTALLED, version_compare from operator import", "CURSTATE_INSTALLED, version_compare from operator import lt, le, eq, ge, gt", "the dependency has been satisfied. if (len(installed_pkg_versions) != 0) and", "= True if not matched_pkg: missing_packages += [package_string] for i", "no relationship operators were used, the dependency has been satisfied.", "Set up APT cache. apt_pkg.init() cache = apt_pkg.Cache(None) missing_packages =", "parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg] # If an installed", "dependency has been satisfied. if (len(installed_pkg_versions) != 0) and (relationship_operator", "not matched_pkg: missing_packages += [package_string] for i in missing_packages: print(i)", "the cache. try: pkg = cache[pkgname] except KeyError: missing_packages +=", "for j in [\"<=\", \">=\", \"<\", \">\", \"=\"]: if j", "has been satisfied. if (len(installed_pkg_versions) != 0) and (relationship_operator is", "i[2].parent_pkg if parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg] # If", "\"=\": eq, \">=\": ge, \">>\": gt} # Set up APT", "pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [pkg] for i in pkg.provides_list:", "0) and (relationship_operator is None): continue # Otherwise, check all", "in sys.argv[1:]: # Build the package relationship string for use", "= [] if pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [pkg] for", "packages and see if any of them fit the specified", "j in [\"<=\", \">=\", \"<\", \">\", \"=\"]: if j in", "from apt_pkg import CURSTATE_INSTALLED, version_compare from operator import lt, le,", "apt_pkg.Cache(None) missing_packages = [] for i in sys.argv[1:]: # Build", "pkgname = package[0] pkgver = package[1] package_string = f\"{pkgname} ({relationship_operator_formatted}", "currently installed. installed_pkg_versions = [] if pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions", "relationship_operator = j break if relationship_operator is not None: if", "import CURSTATE_INSTALLED, version_compare from operator import lt, le, eq, ge,", "eq, \">=\": ge, \">>\": gt} # Set up APT cache.", "sys.argv[1:]: # Build the package relationship string for use by", "\"<\", \">\", \"=\"]: if j in i: relationship_operator = j", "= version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg = True if", "installed_pkg_versions = [] if pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [pkg]", "relationship operators were used, the dependency has been satisfied. if", "relationship_operator = None for j in [\"<=\", \">=\", \"<\", \">\",", "in [\"<=\", \">=\", \"<\", \">\", \"=\"]: if j in i:", "python3 import apt_pkg import sys from apt_pkg import CURSTATE_INSTALLED, version_compare", "i in sys.argv[1:]: # Build the package relationship string for", "missing_packages = [] for i in sys.argv[1:]: # Build the", "CURSTATE_INSTALLED: installed_pkg_versions += [pkg] for i in pkg.provides_list: parent_pkg =", "been satisfied. if (len(installed_pkg_versions) != 0) and (relationship_operator is None):", "mappings for relationship operators. relation_operators = {\"<<\": lt, \"<=\": le,", "f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else: pkgname = i pkgver = None", "of installed and provided packages that are currently installed. installed_pkg_versions", "= f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else: pkgname = i pkgver =", "eq, ge, gt # Function mappings for relationship operators. relation_operators", "[pkg] for i in pkg.provides_list: parent_pkg = i[2].parent_pkg if parent_pkg.current_state", "installed_pkg_versions: installed_version = i.current_ver.ver_str version_result = version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result,", "\">\", \"=\"]: if j in i: relationship_operator = j break", "the list of installed and provided packages that are currently", "and see if any of them fit the specified relationship", "relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg = True if not matched_pkg: missing_packages +=", "package = i.split(relationship_operator) pkgname = package[0] pkgver = package[1] package_string", "import apt_pkg import sys from apt_pkg import CURSTATE_INSTALLED, version_compare from", "continue # Otherwise, check all matching installed packages and see", "lt, \"<=\": le, \"=\": eq, \">=\": ge, \">>\": gt} #", "j package = i.split(relationship_operator) pkgname = package[0] pkgver = package[1]", "pkgname # Check if the package is in the cache.", "{\"<<\": lt, \"<=\": le, \"=\": eq, \">=\": ge, \">>\": gt}", "j break if relationship_operator is not None: if relationship_operator in", "else: relationship_operator_formatted = j package = i.split(relationship_operator) pkgname = package[0]", "# Otherwise, check all matching installed packages and see if", "the specified relationship operator. matched_pkg = False for i in", "installed. installed_pkg_versions = [] if pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions +=", "was found and no relationship operators were used, the dependency", "parent_pkg = i[2].parent_pkg if parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg]", "= package[1] package_string = f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else: pkgname =", "None package_string = pkgname # Check if the package is", "\">=\": ge, \">>\": gt} # Set up APT cache. apt_pkg.init()", "package is in the cache. try: pkg = cache[pkgname] except", "sys from apt_pkg import CURSTATE_INSTALLED, version_compare from operator import lt,", "= None for j in [\"<=\", \">=\", \"<\", \">\", \"=\"]:", "0): matched_pkg = True if not matched_pkg: missing_packages += [package_string]", "package relationship string for use by 'apt-get satisfy'. relationship_operator =", "= package[0] pkgver = package[1] package_string = f\"{pkgname} ({relationship_operator_formatted} {pkgver})\"", "= [] for i in sys.argv[1:]: # Build the package", "Check if the package is in the cache. try: pkg", "if the package is in the cache. try: pkg =", "version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg = True if not", "= i.current_ver.ver_str version_result = version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg", "i in installed_pkg_versions: installed_version = i.current_ver.ver_str version_result = version_compare(installed_version, pkgver)", "matched_pkg = True if not matched_pkg: missing_packages += [package_string] for", "in pkg.provides_list: parent_pkg = i[2].parent_pkg if parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions", "not None: if relationship_operator in [\"<\", \">\"]: relationship_operator_formatted = j", "that are currently installed. installed_pkg_versions = [] if pkg.current_state ==", "#!/usr/bin/env python3 import apt_pkg import sys from apt_pkg import CURSTATE_INSTALLED,", "operators. relation_operators = {\"<<\": lt, \"<=\": le, \"=\": eq, \">=\":", "= i.split(relationship_operator) pkgname = package[0] pkgver = package[1] package_string =", "installed_pkg_versions += [parent_pkg] # If an installed package was found", "({relationship_operator_formatted} {pkgver})\" else: pkgname = i pkgver = None package_string", "pkg.provides_list: parent_pkg = i[2].parent_pkg if parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions +=", "gt} # Set up APT cache. apt_pkg.init() cache = apt_pkg.Cache(None)", "i: relationship_operator = j break if relationship_operator is not None:", "operator import lt, le, eq, ge, gt # Function mappings", "CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg] # If an installed package was", "installed packages and see if any of them fit the", "fit the specified relationship operator. matched_pkg = False for i", "operator. matched_pkg = False for i in installed_pkg_versions: installed_version =", "+= [package_string] continue # Get the list of installed and", "pkg = cache[pkgname] except KeyError: missing_packages += [package_string] continue #", "i in pkg.provides_list: parent_pkg = i[2].parent_pkg if parent_pkg.current_state == CURSTATE_INSTALLED:", "if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg = True if not matched_pkg: missing_packages", "\">=\", \"<\", \">\", \"=\"]: if j in i: relationship_operator =", "pkgver = package[1] package_string = f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else: pkgname", "package[1] package_string = f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else: pkgname = i", "None: if relationship_operator in [\"<\", \">\"]: relationship_operator_formatted = j +", "relationship operator. matched_pkg = False for i in installed_pkg_versions: installed_version", "use by 'apt-get satisfy'. relationship_operator = None for j in", "pkgver) if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg = True if not matched_pkg:", "= {\"<<\": lt, \"<=\": le, \"=\": eq, \">=\": ge, \">>\":", "i.split(relationship_operator) pkgname = package[0] pkgver = package[1] package_string = f\"{pkgname}", "satisfy'. relationship_operator = None for j in [\"<=\", \">=\", \"<\",", "used, the dependency has been satisfied. if (len(installed_pkg_versions) != 0)", "\"<=\": le, \"=\": eq, \">=\": ge, \">>\": gt} # Set", "relationship_operator is not None: if relationship_operator in [\"<\", \">\"]: relationship_operator_formatted", "{pkgver})\" else: pkgname = i pkgver = None package_string =", "= i[2].parent_pkg if parent_pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [parent_pkg] #", "is None): continue # Otherwise, check all matching installed packages", "= j + j else: relationship_operator_formatted = j package =", "cache. apt_pkg.init() cache = apt_pkg.Cache(None) missing_packages = [] for i", "the package relationship string for use by 'apt-get satisfy'. relationship_operator", "[\"<=\", \">=\", \"<\", \">\", \"=\"]: if j in i: relationship_operator", "+= [pkg] for i in pkg.provides_list: parent_pkg = i[2].parent_pkg if", "all matching installed packages and see if any of them", "[package_string] continue # Get the list of installed and provided", "== CURSTATE_INSTALLED: installed_pkg_versions += [pkg] for i in pkg.provides_list: parent_pkg", "relationship operators. relation_operators = {\"<<\": lt, \"<=\": le, \"=\": eq,", "ge, \">>\": gt} # Set up APT cache. apt_pkg.init() cache", "packages that are currently installed. installed_pkg_versions = [] if pkg.current_state", "= False for i in installed_pkg_versions: installed_version = i.current_ver.ver_str version_result", "KeyError: missing_packages += [package_string] continue # Get the list of", "= j break if relationship_operator is not None: if relationship_operator", "+ j else: relationship_operator_formatted = j package = i.split(relationship_operator) pkgname", "relation_operators = {\"<<\": lt, \"<=\": le, \"=\": eq, \">=\": ge,", "relationship_operator in [\"<\", \">\"]: relationship_operator_formatted = j + j else:", "version_result = version_compare(installed_version, pkgver) if relation_operators[relationship_operator_formatted](version_result, 0): matched_pkg = True", "le, \"=\": eq, \">=\": ge, \">>\": gt} # Set up", "were used, the dependency has been satisfied. if (len(installed_pkg_versions) !=", "Otherwise, check all matching installed packages and see if any", "any of them fit the specified relationship operator. matched_pkg =", "Function mappings for relationship operators. relation_operators = {\"<<\": lt, \"<=\":", "+= [parent_pkg] # If an installed package was found and", "= i pkgver = None package_string = pkgname # Check", "[] if pkg.current_state == CURSTATE_INSTALLED: installed_pkg_versions += [pkg] for i", "relationship_operator_formatted = j + j else: relationship_operator_formatted = j package", "relationship_operator_formatted = j package = i.split(relationship_operator) pkgname = package[0] pkgver", "True if not matched_pkg: missing_packages += [package_string] for i in", "provided packages that are currently installed. installed_pkg_versions = [] if", "by 'apt-get satisfy'. relationship_operator = None for j in [\"<=\",", "satisfied. if (len(installed_pkg_versions) != 0) and (relationship_operator is None): continue", "ge, gt # Function mappings for relationship operators. relation_operators =", "j + j else: relationship_operator_formatted = j package = i.split(relationship_operator)", "[parent_pkg] # If an installed package was found and no", "in i: relationship_operator = j break if relationship_operator is not", "matched_pkg = False for i in installed_pkg_versions: installed_version = i.current_ver.ver_str", "apt_pkg.init() cache = apt_pkg.Cache(None) missing_packages = [] for i in", "package_string = pkgname # Check if the package is in", "them fit the specified relationship operator. matched_pkg = False for", "matched_pkg: missing_packages += [package_string] for i in missing_packages: print(i) exit(0)", "= apt_pkg.Cache(None) missing_packages = [] for i in sys.argv[1:]: #", "if not matched_pkg: missing_packages += [package_string] for i in missing_packages:", "up APT cache. apt_pkg.init() cache = apt_pkg.Cache(None) missing_packages = []", "list of installed and provided packages that are currently installed.", "(relationship_operator is None): continue # Otherwise, check all matching installed", "cache[pkgname] except KeyError: missing_packages += [package_string] continue # Get the", "in [\"<\", \">\"]: relationship_operator_formatted = j + j else: relationship_operator_formatted", "package[0] pkgver = package[1] package_string = f\"{pkgname} ({relationship_operator_formatted} {pkgver})\" else:", "APT cache. apt_pkg.init() cache = apt_pkg.Cache(None) missing_packages = [] for", "# Set up APT cache. apt_pkg.init() cache = apt_pkg.Cache(None) missing_packages", "= pkgname # Check if the package is in the", "specified relationship operator. matched_pkg = False for i in installed_pkg_versions:", "relationship string for use by 'apt-get satisfy'. relationship_operator = None", "cache = apt_pkg.Cache(None) missing_packages = [] for i in sys.argv[1:]:", "# Check if the package is in the cache. try:" ]
[ "name='Zn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='urine_specific_gravity', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True),", "name='Cr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True,", "migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True,", "null=True), ), migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "class Migration(migrations.Migration): dependencies = [ ('datasets', '0008_auto_20200821_1427'), ] operations =", "model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True),", "name='iAs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection", "migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True,", "name='Sb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True), ),", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "models class Migration(migrations.Migration): dependencies = [ ('datasets', '0008_auto_20200821_1427'), ] operations", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True,", "model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below", "null=True), ), migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "), migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag_IDL',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "[ ('datasets', '0008_auto_20200821_1427'), ] operations = [ migrations.AddField( model_name='rawdar', name='AsB',", "), migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr_BDL',", "model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ),", "null=True), ), migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True,", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True),", "migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True,", "name='Ba', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1',", "), migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg_IDL',", "model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True),", "), migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "name='Se_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True), ),", "] operations = [ migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn_IDL',", "model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True,", "name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "name='Se', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True),", "), migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba_BDL',", "migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True,", "null=True), ), migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "name='As', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True),", "name='Tl_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn',", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "), migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr_IDL',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True,", "null=True), ), migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True,", "), migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn',", "model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True,", "), migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo',", "migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True,", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "null=True), ), migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "name='Pb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd',", "model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True),", "'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0,", "), migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be_IDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se',", "name='MMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True,", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "= [ ('datasets', '0008_auto_20200821_1427'), ] operations = [ migrations.AddField( model_name='rawdar',", "), migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd_IDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "name='Cr', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True), ),", "name='Sn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True),", "name='Cu_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As_IDL',", "model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True),", "null=True), ), migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn_IDL',", "model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "name='DMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection", "), migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA_BDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Ba_IDL',", "name='Co', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True), ),", "migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1',", "'0008_auto_20200821_1427'), ] operations = [ migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "), migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True),", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "name='Al_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True),", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "operations = [ migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True,", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "2020-08-24 06:17 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "name='Sr', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below", "name='Hg', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True),", "name='U_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs_BDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "'above detection level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField(", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "name='Tl', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True),", "migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True,", "name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "), migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA_BDL',", "), migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr',", "null=True), ), migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "), migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn_IDL',", "field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')],", "model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True),", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "Django 3.0.7 on 2020-08-24 06:17 from django.db import migrations, models", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U_IDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='MMA_IDL',", "), migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al_IDL',", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True),", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True,", "name='Ni_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True),", "name='Cd_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True), ),", "name='Ag_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True), ),", "migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True,", "name='Fe', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "# Generated by Django 3.0.7 on 2020-08-24 06:17 from django.db", "), migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu_IDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True),", "name='V_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True), ),", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Sr_IDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "), migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "name='iAs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True), ),", "migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True,", "migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True,", "migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True,", "name='Cd', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True,", "name='Fe_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True,", "null=True), ), migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "name='Hg_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True), ),", "name='Sr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True),", "migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1',", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True,", "06:17 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "name='Co_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "), migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba',", "model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb_IDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='urine_specific_gravity',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "name='W_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True), ),", "name='Mn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True), ),", "name='Be_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True),", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True), ),", "migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True,", "migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True,", "migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True,", "migrations, models class Migration(migrations.Migration): dependencies = [ ('datasets', '0008_auto_20200821_1427'), ]", "migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='urine_specific_gravity', field=models.FloatField(blank=True,", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb',", "), migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se_IDL',", "('datasets', '0008_auto_20200821_1427'), ] operations = [ migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True,", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True,", "name='Ni', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "by Django 3.0.7 on 2020-08-24 06:17 from django.db import migrations,", "name='Mo', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True), ),", "name='Mo_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True), ),", "name='Cu', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb_IDL',", "level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False,", "), migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V',", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni',", "model_name='rawdar', name='Mo_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True,", "), migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn',", "migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co',", "null=True), ), migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr', field=models.FloatField(blank=True, null=True),", "name='Cs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True,", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "name='AsB', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection", "), migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe',", "), migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs_BDL',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cr',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True,", "migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1',", "null=True), ), migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "null=True), ), migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below", "), migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe_IDL',", "), migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo_IDL',", "model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "), migrations.AddField( model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co_IDL',", "), migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "), migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs',", "name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('datasets', '0008_auto_20200821_1427'),", "name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "), migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al',", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True), ),", "name='Al', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs',", "model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True,", "detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),", "model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below", "name='MMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U',", "[ migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='AsB_BDL',", "model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True,", "model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True,", "Migration(migrations.Migration): dependencies = [ ('datasets', '0008_auto_20200821_1427'), ] operations = [", "name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan',", "migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr', field=models.FloatField(blank=True,", "name='Zn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True,", "model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True,", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True,", "migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True,", "model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True,", "migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True,", "), migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W_IDL',", "migrations.AddField( model_name='rawdar', name='iAs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "name='Mn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mo', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True,", "model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be',", "null=True), ), migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "name='AsB_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "preserve_default=False, ), migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='iAs_IDL',", "model_name='rawdar', name='Mn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl',", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni_IDL', field=models.FloatField(blank=True, null=True),", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True, null=True),", "name='U', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U_IDL', field=models.FloatField(blank=True, null=True), ),", "name='Ba_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True), ),", "model_name='rawdar', name='Sr', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Sr_BDL', field=models.CharField(choices=[('1', 'below", "null=True), ), migrations.AlterField( model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "model_name='rawdar', name='Ba_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "null=True), ), migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Pb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True,", "null=True), ), migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='MMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True), ),", "migrations.AlterField( model_name='rawdar', name='Hg_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn', field=models.FloatField(blank=True,", "model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As',", "model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='urine_specific_gravity', field=models.FloatField(blank=True, null=True),", "), migrations.AlterField( model_name='rawdar', name='Fe_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg',", "name='Pb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Fe', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='W', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "Generated by Django 3.0.7 on 2020-08-24 06:17 from django.db import", "model_name='rawdar', name='Al', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True),", "model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag', field=models.FloatField(blank=True, null=True),", "migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True,", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "name='W', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='W_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Hg', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Mn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='urine_specific_gravity', field=models.FloatField(blank=True, null=True), ), ]", "migrations.AlterField( model_name='rawdar', name='Sb', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb_IDL', field=models.FloatField(blank=True,", "= [ migrations.AddField( model_name='rawdar', name='AsB', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='As_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True,", "), migrations.AlterField( model_name='rawdar', name='Ni', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ni_IDL',", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True,", "null=True), ), migrations.AddField( model_name='rawdar', name='iAs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "), migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA',", "null=True), ), migrations.AddField( model_name='rawdar', name='AsB_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0',", "migrations.AlterField( model_name='rawdar', name='Sn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True,", "name='Be', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be_IDL', field=models.FloatField(blank=True, null=True), ),", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Cs_IDL',", "name='V', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection", "model_name='rawdar', name='Cd', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cd_IDL', field=models.FloatField(blank=True, null=True),", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='DMA_IDL',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Cr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True, null=True),", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above", "null=True), ), migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'),", "model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True),", "detection level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar',", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Ba', field=models.FloatField(blank=True,", "null=True), ), migrations.AlterField( model_name='rawdar', name='Zn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='Cs_BDL', field=models.CharField(choices=[('1',", "name='Cs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA', field=models.FloatField(blank=True, null=True), ),", "field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA_BDL', field=models.CharField(choices=[('1', 'below detection level'),", "), migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V_IDL',", "null=True), ), migrations.AddField( model_name='rawdar', name='Cs', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar',", "migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA_BDL', field=models.CharField(choices=[('1',", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('datasets',", "migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True,", "dependencies = [ ('datasets', '0008_auto_20200821_1427'), ] operations = [ migrations.AddField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Se_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "null=True), ), migrations.AlterField( model_name='rawdar', name='Co', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='V', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='V_IDL', field=models.FloatField(blank=True,", "max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Sr_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField(", "null=True), ), migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "migrations.AlterField( model_name='rawdar', name='Tl_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='U', field=models.FloatField(blank=True,", "name='Sb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Se', field=models.FloatField(blank=True, null=True), ),", "on 2020-08-24 06:17 from django.db import migrations, models class Migration(migrations.Migration):", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sn', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True), ),", "), migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl_IDL',", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True,", "name='Sn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Tl', field=models.FloatField(blank=True, null=True), ),", "), migrations.AddField( model_name='rawdar', name='iAs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag',", "level'), ('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='AsB_IDL',", "'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='AsB_IDL', field=models.FloatField(blank=True, null=True),", "name='Ag', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Ag_IDL', field=models.FloatField(blank=True, null=True), ),", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='Ba_IDL', field=models.FloatField(blank=True,", "migrations.AlterField( model_name='rawdar', name='Cu', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Cu_IDL', field=models.FloatField(blank=True,", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Co_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AddField( model_name='rawdar', name='Cs_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='DMA',", "migrations.AlterField( model_name='rawdar', name='Al_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True,", "name='As_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Be', field=models.FloatField(blank=True, null=True), ),", "null=True), ), migrations.AlterField( model_name='rawdar', name='As', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar',", "3.0.7 on 2020-08-24 06:17 from django.db import migrations, models class", "field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Zn_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AlterField( model_name='rawdar', name='Pb_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AlterField( model_name='rawdar', name='Sb',", "name='DMA_IDL', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='rawdar', name='MMA', field=models.FloatField(blank=True, null=True), ),", "('nan', 'invalid')], default=0, max_length=3), preserve_default=False, ), migrations.AddField( model_name='rawdar', name='DMA_IDL', field=models.FloatField(blank=True," ]
[ "print \"Trying my hand at Git!\" print \"Something else\" for", "\"Hello World!\" print \"Trying my hand at Git!\" print \"Something", "my hand at Git!\" print \"Something else\" for i in", "World!\" print \"Trying my hand at Git!\" print \"Something else\"", "Git!\" print \"Something else\" for i in range(10): print i", "<gh_stars>0 print \"Hello World!\" print \"Trying my hand at Git!\"", "hand at Git!\" print \"Something else\" for i in range(10):", "at Git!\" print \"Something else\" for i in range(10): print", "print \"Hello World!\" print \"Trying my hand at Git!\" print", "\"Trying my hand at Git!\" print \"Something else\" for i" ]
[ "django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic.edit import CreateView, UpdateView, DeleteView from", "\"All\")] for item in Status.objects.all(): statuses.append((item.text, item.text)) return statuses class", "self.object.start_date, 'finish_date': self.object.finish_date, 'status': self.object.status, 'description': self.object.description, } context =", "== '---': tasks = tasks.filter(level=None) else: tasks = tasks.filter(level__project__name=project_name) if", "= tasks.filter(status__text=status_name) status_list = Status.objects.all() last_initial = { 'status': status_name,", "def _get_statuses(): statuses = [(\"All\", \"All\")] for item in Status.objects.all():", "TaskDetailView(DetailView): model = Task template_name = 'tasks/details.html' def get_object(self): obj", "TaskCreateView(LoginRequiredMixin, CreateView): login_url = '/users/register' model = Task form_class =", "tasks context['filter_form'] = form context['task_form'] = TaskForm return context class", "DeleteView from django.views.generic import DetailView, ListView from projects.models import Project", "{ 'name': self.object.name, 'start_date': self.object.start_date, 'finish_date': self.object.finish_date, 'status': self.object.status, 'description':", "def get_context_data(self, *args, **kwargs): try: project_name = self.request.GET['project'] except KeyError:", "'status': self.object.status, 'description': self.object.description, } context = super(TaskDetailView, self).get_context_data(*args, **kwargs)", "return super().form_valid(form) class TaskDeleteView(DeleteView): model = Task template_name = \"tasks/delete_task.html\"", "template_name = 'tasks/details.html' def get_object(self): obj = super().get_object() obj.check_expired() return", "model = Task template_name = 'tasks/details.html' def get_object(self): obj =", "self.request.GET['status'] except KeyError: status_name = '' if self.request.user.is_authenticated: tasks =", "def form_valid(self, form): self.object.check_expired() return super().form_valid(form) class TaskDeleteView(DeleteView): model =", "return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url = '/users/register' model =", "status_list = Status.objects.all() last_initial = { 'status': status_name, 'project': project_name,", "<reponame>TheDim0n/ProjectManager<filename>tasks/views.py<gh_stars>0 from django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic.edit import CreateView, UpdateView,", "'finish_date': self.object.finish_date, 'status': self.object.status, 'description': self.object.description, } context = super(TaskDetailView,", "obj def get_context_data(self, *args, **kwargs): initial_content = { 'name': self.object.name,", "= _get_statuses() context = super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list'] = status_list", "ListView): login_url = '/users/register' model = Task context_object_name = 'tasks'", "TaskListView(LoginRequiredMixin, ListView): login_url = '/users/register' model = Task context_object_name =", "if self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user) if project_name and project_name !=", "from status.models import Status from .models import Task from .forms", "status_list context['tasks'] = tasks context['filter_form'] = form context['task_form'] = TaskForm", "project_name != \"All\": if project_name == '---': tasks = tasks.filter(level=None)", "'/users/register' model = Task form_class = TaskForm template_name = \"tasks/update_task.html\"", "projects.models import Project from status.models import Status from .models import", "django.views.generic.edit import CreateView, UpdateView, DeleteView from django.views.generic import DetailView, ListView", "Status.objects.all(): statuses.append((item.text, item.text)) return statuses class TaskListView(LoginRequiredMixin, ListView): login_url =", "return queryset def get_context_data(self, *args, **kwargs): try: project_name = self.request.GET['project']", "{ 'status': status_name, 'project': project_name, } form = FilterForm(initial=last_initial) form.fields['project'].choices", "'' if self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user) if project_name and project_name", "return obj def get_context_data(self, *args, **kwargs): initial_content = { 'name':", "tasks = tasks.filter(level__project__name=project_name) if status_name and status_name != \"All\": tasks", "= [(\"All\", \"All\"), ('---', '---')] for item in Project.objects.filter(created_by=user): projects.append((item.name,", "except KeyError: status_name = '' if self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user)", "= TaskForm return context class TaskDetailView(DetailView): model = Task template_name", "TaskForm template_name = 'tasks/index.html' def form_valid(self, form): form.instance.created_by = self.request.user", "UpdateView, DeleteView from django.views.generic import DetailView, ListView from projects.models import", "**kwargs) context['status_list'] = status_list context['tasks'] = tasks context['filter_form'] = form", "super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form'] = TaskForm(initial=initial_content) return context class TaskCreateView(LoginRequiredMixin,", "if project_name and project_name != \"All\": if project_name == '---':", "FilterForm def _get_projects(user): projects = [(\"All\", \"All\"), ('---', '---')] for", "def get_object(self): obj = super().get_object() obj.check_expired() return obj def get_context_data(self,", "for item in Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return projects def _get_statuses():", "form_valid(self, form): form.instance.created_by = self.request.user return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView):", "*args, **kwargs): initial_content = { 'name': self.object.name, 'start_date': self.object.start_date, 'finish_date':", "= Task.objects.filter(created_by=self.request.user) if project_name and project_name != \"All\": if project_name", "projects.append((item.name, item.name)) return projects def _get_statuses(): statuses = [(\"All\", \"All\")]", "CreateView, UpdateView, DeleteView from django.views.generic import DetailView, ListView from projects.models", "queryset def get_context_data(self, *args, **kwargs): try: project_name = self.request.GET['project'] except", "status_name = '' if self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user) if project_name", "return projects def _get_statuses(): statuses = [(\"All\", \"All\")] for item", "\"All\"), ('---', '---')] for item in Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return", "'name': self.object.name, 'start_date': self.object.start_date, 'finish_date': self.object.finish_date, 'status': self.object.status, 'description': self.object.description,", "in queryset: obj.check_expired() return queryset def get_context_data(self, *args, **kwargs): try:", "TaskForm(initial=initial_content) return context class TaskCreateView(LoginRequiredMixin, CreateView): login_url = '/users/register' model", "status.models import Status from .models import Task from .forms import", "context = super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form'] = TaskForm(initial=initial_content) return context", "TaskForm template_name = \"tasks/update_task.html\" def form_valid(self, form): self.object.check_expired() return super().form_valid(form)", "model = Task form_class = TaskForm template_name = \"tasks/update_task.html\" def", "login_url = '/users/register' model = Task form_class = TaskForm template_name", "import TaskForm, FilterForm def _get_projects(user): projects = [(\"All\", \"All\"), ('---',", "= Task form_class = TaskForm template_name = 'tasks/index.html' def form_valid(self,", "TaskForm return context class TaskDetailView(DetailView): model = Task template_name =", "try: project_name = self.request.GET['project'] except KeyError: project_name = '' try:", "form context['task_form'] = TaskForm return context class TaskDetailView(DetailView): model =", "class TaskDetailView(DetailView): model = Task template_name = 'tasks/details.html' def get_object(self):", "in Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return projects def _get_statuses(): statuses =", "= \"tasks/update_task.html\" def form_valid(self, form): self.object.check_expired() return super().form_valid(form) class TaskDeleteView(DeleteView):", "tasks = Task.objects.filter(created_by=self.request.user) if project_name and project_name != \"All\": if", "\"All\": if project_name == '---': tasks = tasks.filter(level=None) else: tasks", "ordering = ['finish_date'] def get_queryset(self): queryset = super().get_queryset() for obj", "= super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list'] = status_list context['tasks'] = tasks", "**kwargs): initial_content = { 'name': self.object.name, 'start_date': self.object.start_date, 'finish_date': self.object.finish_date,", "KeyError: project_name = '' try: status_name = self.request.GET['status'] except KeyError:", "if project_name == '---': tasks = tasks.filter(level=None) else: tasks =", "form): self.object.check_expired() return super().form_valid(form) class TaskDeleteView(DeleteView): model = Task template_name", "= super().get_queryset() for obj in queryset: obj.check_expired() return queryset def", "and project_name != \"All\": if project_name == '---': tasks =", "!= \"All\": tasks = tasks.filter(status__text=status_name) status_list = Status.objects.all() last_initial =", "form): form.instance.created_by = self.request.user return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url", "_get_statuses(): statuses = [(\"All\", \"All\")] for item in Status.objects.all(): statuses.append((item.text,", "= FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses() context =", "template_name = 'tasks/index.html' def form_valid(self, form): form.instance.created_by = self.request.user return", "class TaskListView(LoginRequiredMixin, ListView): login_url = '/users/register' model = Task context_object_name", "= status_list context['tasks'] = tasks context['filter_form'] = form context['task_form'] =", "= { 'name': self.object.name, 'start_date': self.object.start_date, 'finish_date': self.object.finish_date, 'status': self.object.status,", "self.object.finish_date, 'status': self.object.status, 'description': self.object.description, } context = super(TaskDetailView, self).get_context_data(*args,", "= Status.objects.all() last_initial = { 'status': status_name, 'project': project_name, }", "for item in Status.objects.all(): statuses.append((item.text, item.text)) return statuses class TaskListView(LoginRequiredMixin,", "_get_statuses() context = super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list'] = status_list context['tasks']", "form_valid(self, form): self.object.check_expired() return super().form_valid(form) class TaskDeleteView(DeleteView): model = Task", "class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url = '/users/register' model = Task form_class", "status_name = self.request.GET['status'] except KeyError: status_name = '' if self.request.user.is_authenticated:", "_get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses() context = super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list']", "form = FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses() context", "login_url = '/users/register' model = Task context_object_name = 'tasks' template_name", "**kwargs): try: project_name = self.request.GET['project'] except KeyError: project_name = ''", "= self.request.GET['project'] except KeyError: project_name = '' try: status_name =", "context['task_form'] = TaskForm(initial=initial_content) return context class TaskCreateView(LoginRequiredMixin, CreateView): login_url =", "self.request.GET['project'] except KeyError: project_name = '' try: status_name = self.request.GET['status']", "model = Task context_object_name = 'tasks' template_name = 'tasks/index.html' ordering", "context['tasks'] = tasks context['filter_form'] = form context['task_form'] = TaskForm return", "super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list'] = status_list context['tasks'] = tasks context['filter_form']", "statuses = [(\"All\", \"All\")] for item in Status.objects.all(): statuses.append((item.text, item.text))", "self.object.description, } context = super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form'] = TaskForm(initial=initial_content)", "import CreateView, UpdateView, DeleteView from django.views.generic import DetailView, ListView from", "tasks.filter(level__project__name=project_name) if status_name and status_name != \"All\": tasks = tasks.filter(status__text=status_name)", "Task.objects.filter(created_by=self.request.user) if project_name and project_name != \"All\": if project_name ==", "obj = super().get_object() obj.check_expired() return obj def get_context_data(self, *args, **kwargs):", "def get_context_data(self, *args, **kwargs): initial_content = { 'name': self.object.name, 'start_date':", "= '/users/register' model = Task context_object_name = 'tasks' template_name =", "!= \"All\": if project_name == '---': tasks = tasks.filter(level=None) else:", "get_context_data(self, *args, **kwargs): initial_content = { 'name': self.object.name, 'start_date': self.object.start_date,", "['finish_date'] def get_queryset(self): queryset = super().get_queryset() for obj in queryset:", "super().get_object() obj.check_expired() return obj def get_context_data(self, *args, **kwargs): initial_content =", "'project': project_name, } form = FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices", "obj in queryset: obj.check_expired() return queryset def get_context_data(self, *args, **kwargs):", "Status.objects.all() last_initial = { 'status': status_name, 'project': project_name, } form", "context['status_list'] = status_list context['tasks'] = tasks context['filter_form'] = form context['task_form']", "in Status.objects.all(): statuses.append((item.text, item.text)) return statuses class TaskListView(LoginRequiredMixin, ListView): login_url", "from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.views.generic import DetailView,", "class TaskCreateView(LoginRequiredMixin, CreateView): login_url = '/users/register' model = Task form_class", "import Project from status.models import Status from .models import Task", "tasks.filter(status__text=status_name) status_list = Status.objects.all() last_initial = { 'status': status_name, 'project':", "UpdateView): login_url = '/users/register' model = Task form_class = TaskForm", "def get_queryset(self): queryset = super().get_queryset() for obj in queryset: obj.check_expired()", "= Task template_name = 'tasks/details.html' def get_object(self): obj = super().get_object()", "'tasks/index.html' ordering = ['finish_date'] def get_queryset(self): queryset = super().get_queryset() for", "'/users/register' model = Task context_object_name = 'tasks' template_name = 'tasks/index.html'", "project_name, } form = FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices =", "form_class = TaskForm template_name = 'tasks/index.html' def form_valid(self, form): form.instance.created_by", "initial_content = { 'name': self.object.name, 'start_date': self.object.start_date, 'finish_date': self.object.finish_date, 'status':", "= TaskForm template_name = \"tasks/update_task.html\" def form_valid(self, form): self.object.check_expired() return", "**kwargs) context['task_form'] = TaskForm(initial=initial_content) return context class TaskCreateView(LoginRequiredMixin, CreateView): login_url", "= [(\"All\", \"All\")] for item in Status.objects.all(): statuses.append((item.text, item.text)) return", "project_name and project_name != \"All\": if project_name == '---': tasks", "status_name, 'project': project_name, } form = FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user)", "form.fields['status'].choices = _get_statuses() context = super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list'] =", "self.object.status, 'description': self.object.description, } context = super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form']", "= Task form_class = TaskForm template_name = \"tasks/update_task.html\" def form_valid(self,", "return statuses class TaskListView(LoginRequiredMixin, ListView): login_url = '/users/register' model =", "\"All\": tasks = tasks.filter(status__text=status_name) status_list = Status.objects.all() last_initial = {", "form_class = TaskForm template_name = \"tasks/update_task.html\" def form_valid(self, form): self.object.check_expired()", "= 'tasks' template_name = 'tasks/index.html' ordering = ['finish_date'] def get_queryset(self):", "Status from .models import Task from .forms import TaskForm, FilterForm", "Task from .forms import TaskForm, FilterForm def _get_projects(user): projects =", "queryset = super().get_queryset() for obj in queryset: obj.check_expired() return queryset", "get_context_data(self, *args, **kwargs): try: project_name = self.request.GET['project'] except KeyError: project_name", "import DetailView, ListView from projects.models import Project from status.models import", "'---')] for item in Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return projects def", "= Task context_object_name = 'tasks' template_name = 'tasks/index.html' ordering =", "if status_name and status_name != \"All\": tasks = tasks.filter(status__text=status_name) status_list", "'tasks/details.html' def get_object(self): obj = super().get_object() obj.check_expired() return obj def", "DetailView, ListView from projects.models import Project from status.models import Status", "_get_projects(user): projects = [(\"All\", \"All\"), ('---', '---')] for item in", "Task form_class = TaskForm template_name = 'tasks/index.html' def form_valid(self, form):", "self.request.user return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url = '/users/register' model", "} context = super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form'] = TaskForm(initial=initial_content) return", "statuses class TaskListView(LoginRequiredMixin, ListView): login_url = '/users/register' model = Task", "context class TaskCreateView(LoginRequiredMixin, CreateView): login_url = '/users/register' model = Task", "super().get_queryset() for obj in queryset: obj.check_expired() return queryset def get_context_data(self,", "from .models import Task from .forms import TaskForm, FilterForm def", "return context class TaskDetailView(DetailView): model = Task template_name = 'tasks/details.html'", "context['task_form'] = TaskForm return context class TaskDetailView(DetailView): model = Task", "super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url = '/users/register' model = Task", "context = super(TaskListView, self).get_context_data(*args, **kwargs) context['status_list'] = status_list context['tasks'] =", "Task template_name = 'tasks/details.html' def get_object(self): obj = super().get_object() obj.check_expired()", "('---', '---')] for item in Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return projects", "form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses() context = super(TaskListView, self).get_context_data(*args,", "self.object.name, 'start_date': self.object.start_date, 'finish_date': self.object.finish_date, 'status': self.object.status, 'description': self.object.description, }", "= '' if self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user) if project_name and", "= super().get_object() obj.check_expired() return obj def get_context_data(self, *args, **kwargs): initial_content", "import LoginRequiredMixin from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.views.generic", "= tasks.filter(level=None) else: tasks = tasks.filter(level__project__name=project_name) if status_name and status_name", "= 'tasks/index.html' ordering = ['finish_date'] def get_queryset(self): queryset = super().get_queryset()", "\"tasks/update_task.html\" def form_valid(self, form): self.object.check_expired() return super().form_valid(form) class TaskDeleteView(DeleteView): model", "'tasks' template_name = 'tasks/index.html' ordering = ['finish_date'] def get_queryset(self): queryset", "'---': tasks = tasks.filter(level=None) else: tasks = tasks.filter(level__project__name=project_name) if status_name", "tasks.filter(level=None) else: tasks = tasks.filter(level__project__name=project_name) if status_name and status_name !=", "obj.check_expired() return obj def get_context_data(self, *args, **kwargs): initial_content = {", "from .forms import TaskForm, FilterForm def _get_projects(user): projects = [(\"All\",", "import Task from .forms import TaskForm, FilterForm def _get_projects(user): projects", "try: status_name = self.request.GET['status'] except KeyError: status_name = '' if", ".models import Task from .forms import TaskForm, FilterForm def _get_projects(user):", ".forms import TaskForm, FilterForm def _get_projects(user): projects = [(\"All\", \"All\"),", "= form context['task_form'] = TaskForm return context class TaskDetailView(DetailView): model", "self).get_context_data(*args, **kwargs) context['task_form'] = TaskForm(initial=initial_content) return context class TaskCreateView(LoginRequiredMixin, CreateView):", "obj.check_expired() return queryset def get_context_data(self, *args, **kwargs): try: project_name =", "KeyError: status_name = '' if self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user) if", "form.instance.created_by = self.request.user return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url =", "= tasks.filter(level__project__name=project_name) if status_name and status_name != \"All\": tasks =", "= self.request.user return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin, UpdateView): login_url = '/users/register'", "= '' try: status_name = self.request.GET['status'] except KeyError: status_name =", "CreateView): login_url = '/users/register' model = Task form_class = TaskForm", "= self.request.GET['status'] except KeyError: status_name = '' if self.request.user.is_authenticated: tasks", "= { 'status': status_name, 'project': project_name, } form = FilterForm(initial=last_initial)", "item.name)) return projects def _get_statuses(): statuses = [(\"All\", \"All\")] for", "queryset: obj.check_expired() return queryset def get_context_data(self, *args, **kwargs): try: project_name", "} form = FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses()", "= super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form'] = TaskForm(initial=initial_content) return context class", "= TaskForm template_name = 'tasks/index.html' def form_valid(self, form): form.instance.created_by =", "item in Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return projects def _get_statuses(): statuses", "last_initial = { 'status': status_name, 'project': project_name, } form =", "Task context_object_name = 'tasks' template_name = 'tasks/index.html' ordering = ['finish_date']", "self.object.check_expired() return super().form_valid(form) class TaskDeleteView(DeleteView): model = Task template_name =", "item.text)) return statuses class TaskListView(LoginRequiredMixin, ListView): login_url = '/users/register' model", "'status': status_name, 'project': project_name, } form = FilterForm(initial=last_initial) form.fields['project'].choices =", "Task form_class = TaskForm template_name = \"tasks/update_task.html\" def form_valid(self, form):", "'/users/register' model = Task form_class = TaskForm template_name = 'tasks/index.html'", "projects def _get_statuses(): statuses = [(\"All\", \"All\")] for item in", "FilterForm(initial=last_initial) form.fields['project'].choices = _get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses() context = super(TaskListView,", "model = Task form_class = TaskForm template_name = 'tasks/index.html' def", "from projects.models import Project from status.models import Status from .models", "from django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic.edit import CreateView, UpdateView, DeleteView", "TaskForm, FilterForm def _get_projects(user): projects = [(\"All\", \"All\"), ('---', '---')]", "tasks = tasks.filter(status__text=status_name) status_list = Status.objects.all() last_initial = { 'status':", "get_object(self): obj = super().get_object() obj.check_expired() return obj def get_context_data(self, *args,", "import Status from .models import Task from .forms import TaskForm,", "project_name = self.request.GET['project'] except KeyError: project_name = '' try: status_name", "item in Status.objects.all(): statuses.append((item.text, item.text)) return statuses class TaskListView(LoginRequiredMixin, ListView):", "except KeyError: project_name = '' try: status_name = self.request.GET['status'] except", "'tasks/index.html' def form_valid(self, form): form.instance.created_by = self.request.user return super().form_valid(form) class", "project_name == '---': tasks = tasks.filter(level=None) else: tasks = tasks.filter(level__project__name=project_name)", "[(\"All\", \"All\"), ('---', '---')] for item in Project.objects.filter(created_by=user): projects.append((item.name, item.name))", "Project.objects.filter(created_by=user): projects.append((item.name, item.name)) return projects def _get_statuses(): statuses = [(\"All\",", "from django.views.generic import DetailView, ListView from projects.models import Project from", "for obj in queryset: obj.check_expired() return queryset def get_context_data(self, *args,", "project_name = '' try: status_name = self.request.GET['status'] except KeyError: status_name", "'' try: status_name = self.request.GET['status'] except KeyError: status_name = ''", "return context class TaskCreateView(LoginRequiredMixin, CreateView): login_url = '/users/register' model =", "template_name = 'tasks/index.html' ordering = ['finish_date'] def get_queryset(self): queryset =", "context['filter_form'] = form context['task_form'] = TaskForm return context class TaskDetailView(DetailView):", "and status_name != \"All\": tasks = tasks.filter(status__text=status_name) status_list = Status.objects.all()", "TaskUpdateView(LoginRequiredMixin, UpdateView): login_url = '/users/register' model = Task form_class =", "tasks = tasks.filter(level=None) else: tasks = tasks.filter(level__project__name=project_name) if status_name and", "= _get_projects(user=self.request.user) form.fields['status'].choices = _get_statuses() context = super(TaskListView, self).get_context_data(*args, **kwargs)", "template_name = \"tasks/update_task.html\" def form_valid(self, form): self.object.check_expired() return super().form_valid(form) class", "Project from status.models import Status from .models import Task from", "*args, **kwargs): try: project_name = self.request.GET['project'] except KeyError: project_name =", "self).get_context_data(*args, **kwargs) context['status_list'] = status_list context['tasks'] = tasks context['filter_form'] =", "'start_date': self.object.start_date, 'finish_date': self.object.finish_date, 'status': self.object.status, 'description': self.object.description, } context", "def _get_projects(user): projects = [(\"All\", \"All\"), ('---', '---')] for item", "projects = [(\"All\", \"All\"), ('---', '---')] for item in Project.objects.filter(created_by=user):", "status_name != \"All\": tasks = tasks.filter(status__text=status_name) status_list = Status.objects.all() last_initial", "= 'tasks/index.html' def form_valid(self, form): form.instance.created_by = self.request.user return super().form_valid(form)", "[(\"All\", \"All\")] for item in Status.objects.all(): statuses.append((item.text, item.text)) return statuses", "status_name and status_name != \"All\": tasks = tasks.filter(status__text=status_name) status_list =", "get_queryset(self): queryset = super().get_queryset() for obj in queryset: obj.check_expired() return", "self.request.user.is_authenticated: tasks = Task.objects.filter(created_by=self.request.user) if project_name and project_name != \"All\":", "else: tasks = tasks.filter(level__project__name=project_name) if status_name and status_name != \"All\":", "= tasks context['filter_form'] = form context['task_form'] = TaskForm return context", "'description': self.object.description, } context = super(TaskDetailView, self).get_context_data(*args, **kwargs) context['task_form'] =", "def form_valid(self, form): form.instance.created_by = self.request.user return super().form_valid(form) class TaskUpdateView(LoginRequiredMixin,", "context class TaskDetailView(DetailView): model = Task template_name = 'tasks/details.html' def", "ListView from projects.models import Project from status.models import Status from", "context_object_name = 'tasks' template_name = 'tasks/index.html' ordering = ['finish_date'] def", "= ['finish_date'] def get_queryset(self): queryset = super().get_queryset() for obj in", "= '/users/register' model = Task form_class = TaskForm template_name =", "django.views.generic import DetailView, ListView from projects.models import Project from status.models", "= 'tasks/details.html' def get_object(self): obj = super().get_object() obj.check_expired() return obj", "= TaskForm(initial=initial_content) return context class TaskCreateView(LoginRequiredMixin, CreateView): login_url = '/users/register'", "statuses.append((item.text, item.text)) return statuses class TaskListView(LoginRequiredMixin, ListView): login_url = '/users/register'", "LoginRequiredMixin from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.views.generic import" ]
[ "as auto: if file.endswith(\".shp\"): try: filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for", "is later than end time, we'll assume the earlier time", "start epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold epoch_start_round =", "time, we'll assume the earlier time is start epoch_end_round =", "* ((min(epoch_s,epoch_e) + 1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed", "= dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) #", "for root,dirs,files in os.walk(source_shape_file_path): for file in files: with open(os.path.join(root,file),\"r\")", "+ 1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed to parse", "pd import logging logger = logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\" threshold", "geojson import MultiLineString from shapely import geometry import shapefile import", "logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\" threshold = 60*60 cols = ['start',", "in shape.iterRecords(): start_time = dt.datetime.strptime(r[1], '%Y%j %H%M') end_time = dt.datetime.strptime(r[2],", "((max(epoch_s,epoch_e) + 1800) // 3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e)", "import math import time import geohash import geojson from geojson", "import shapefile import numpy import datetime as dt import pandas", "end time, we'll assume the earlier time is start epoch_end_round", "with open(os.path.join(root,file),\"r\") as auto: if file.endswith(\".shp\"): try: filename = file.replace(\".shp\",\"\")", "logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\") continue df = pd.DataFrame(times, columns=cols) df.to_csv('noaa_times.csv')", "= dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) # sometimes start is later than", "epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt])", "times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\") continue df =", "'%Y%j %H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2],", "* threshold epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold epoch_end_round_dt", "import logging logger = logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\" threshold =", "auto: if file.endswith(\".shp\"): try: filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r", "dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) # sometimes", "epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold epoch_start_round = round(min(epoch_s,epoch_e)", "try: filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in shape.iterRecords(): start_time", "import geohash import geojson from geojson import MultiLineString from shapely", "start is later than end time, we'll assume the earlier", "later than end time, we'll assume the earlier time is", "except: logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\") continue df = pd.DataFrame(times, columns=cols)", "= dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600)) epoch_start_round_dt =", "in files: with open(os.path.join(root,file),\"r\") as auto: if file.endswith(\".shp\"): try: filename", "geojson from geojson import MultiLineString from shapely import geometry import", "import numpy import datetime as dt import pandas as pd", "3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600))", "geohash import geojson from geojson import MultiLineString from shapely import", "r in shape.iterRecords(): start_time = dt.datetime.strptime(r[1], '%Y%j %H%M') end_time =", "than end time, we'll assume the earlier time is start", "threshold epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold epoch_end_round_dt =", "epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) # sometimes start is later", "dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600", "= dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e", "shapely import geometry import shapefile import numpy import datetime as", "/ threshold) * threshold epoch_start_round = round(min(epoch_s,epoch_e) / threshold) *", "\"C:/temp/2018/\" threshold = 60*60 cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times =", "geometry import shapefile import numpy import datetime as dt import", "threshold = 60*60 cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = []", "shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in shape.iterRecords(): start_time = dt.datetime.strptime(r[1], '%Y%j %H%M')", "+ 1800) // 3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) +", "threshold) * threshold epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold", "1800) // 3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800)", "time is start epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold", "%H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j", "* ((max(epoch_s,epoch_e) + 1800) // 3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 *", "# sometimes start is later than end time, we'll assume", "import time import geohash import geojson from geojson import MultiLineString", "= file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in shape.iterRecords(): start_time = dt.datetime.strptime(r[1],", "assume the earlier time is start epoch_end_round = round(max(epoch_s,epoch_e) /", "pandas as pd import logging logger = logging.getLogger(__name__) source_shape_file_path =", "'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = [] for root,dirs,files in os.walk(source_shape_file_path): for file", "// 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\") continue", "start_time = dt.datetime.strptime(r[1], '%Y%j %H%M') end_time = dt.datetime.strptime(r[2], '%Y%j %H%M')", "dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) # sometimes start is later than end", "= [] for root,dirs,files in os.walk(source_shape_file_path): for file in files:", "break except: logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\") continue df = pd.DataFrame(times,", "= round(max(epoch_s,epoch_e) / threshold) * threshold epoch_start_round = round(min(epoch_s,epoch_e) /", "file in files: with open(os.path.join(root,file),\"r\") as auto: if file.endswith(\".shp\"): try:", "import MultiLineString from shapely import geometry import shapefile import numpy", "((min(epoch_s,epoch_e) + 1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed to", "= round(min(epoch_s,epoch_e) / threshold) * threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 *", "dt import pandas as pd import logging logger = logging.getLogger(__name__)", "end_time = dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M'))", "= logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\" threshold = 60*60 cols =", "for r in shape.iterRecords(): start_time = dt.datetime.strptime(r[1], '%Y%j %H%M') end_time", "'%Y%j %H%M') end_time = dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1],", "the earlier time is start epoch_end_round = round(max(epoch_s,epoch_e) / threshold)", "60*60 cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = [] for root,dirs,files", "is start epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold epoch_start_round", "for file in files: with open(os.path.join(root,file),\"r\") as auto: if file.endswith(\".shp\"):", "os.walk(source_shape_file_path): for file in files: with open(os.path.join(root,file),\"r\") as auto: if", "dt.datetime.strptime(r[1], '%Y%j %H%M') end_time = dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s =", "1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\")", "import os import math import time import geohash import geojson", "= ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = [] for root,dirs,files in os.walk(source_shape_file_path):", "round(min(epoch_s,epoch_e) / threshold) * threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e)", "%H%M')) # sometimes start is later than end time, we'll", "file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in shape.iterRecords(): start_time = dt.datetime.strptime(r[1], '%Y%j", "/ threshold) * threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) +", "root,dirs,files in os.walk(source_shape_file_path): for file in files: with open(os.path.join(root,file),\"r\") as", "sometimes start is later than end time, we'll assume the", "import datetime as dt import pandas as pd import logging", "logging logger = logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\" threshold = 60*60", "import pandas as pd import logging logger = logging.getLogger(__name__) source_shape_file_path", "= dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break", "os import math import time import geohash import geojson from", "logger = logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\" threshold = 60*60 cols", "[] for root,dirs,files in os.walk(source_shape_file_path): for file in files: with", "numpy import datetime as dt import pandas as pd import", "// 3600)) epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) //", "as pd import logging logger = logging.getLogger(__name__) source_shape_file_path = \"C:/temp/2018/\"", "shapefile import numpy import datetime as dt import pandas as", "math import time import geohash import geojson from geojson import", "if file.endswith(\".shp\"): try: filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in", "epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M'))", "cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = [] for root,dirs,files in", "import geometry import shapefile import numpy import datetime as dt", "file.endswith(\".shp\"): try: filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in shape.iterRecords():", "dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M')) epoch_e =", "MultiLineString from shapely import geometry import shapefile import numpy import", "%H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) # sometimes start is", "threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600))", "epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600)) epoch_start_round_dt", "'%Y%j %H%M')) # sometimes start is later than end time,", "'%Y%j %H%M')) epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M')) # sometimes start", "as dt import pandas as pd import logging logger =", "dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except:", "source_shape_file_path = \"C:/temp/2018/\" threshold = 60*60 cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt']", "we'll assume the earlier time is start epoch_end_round = round(max(epoch_s,epoch_e)", "= 60*60 cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = [] for", "files: with open(os.path.join(root,file),\"r\") as auto: if file.endswith(\".shp\"): try: filename =", "import geojson from geojson import MultiLineString from shapely import geometry", "= \"C:/temp/2018/\" threshold = 60*60 cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times", "open(os.path.join(root,file),\"r\") as auto: if file.endswith(\".shp\"): try: filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file)", "= dt.datetime.strptime(r[1], '%Y%j %H%M') end_time = dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s", "['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt'] times = [] for root,dirs,files in os.walk(source_shape_file_path): for", "earlier time is start epoch_end_round = round(max(epoch_s,epoch_e) / threshold) *", "datetime as dt import pandas as pd import logging logger", "shape.iterRecords(): start_time = dt.datetime.strptime(r[1], '%Y%j %H%M') end_time = dt.datetime.strptime(r[2], '%Y%j", "%H%M') end_time = dt.datetime.strptime(r[2], '%Y%j %H%M') epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j", "from geojson import MultiLineString from shapely import geometry import shapefile", "round(max(epoch_s,epoch_e) / threshold) * threshold epoch_start_round = round(min(epoch_s,epoch_e) / threshold)", "time import geohash import geojson from geojson import MultiLineString from", "times = [] for root,dirs,files in os.walk(source_shape_file_path): for file in", "in os.walk(source_shape_file_path): for file in files: with open(os.path.join(root,file),\"r\") as auto:", "epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600", "threshold) * threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800)", "3600)) times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt]) break except: logger.error('failed to parse file:'+source_shape_file_path+filename+\"/\") continue df", "filename = file.replace(\".shp\",\"\") shape=shapefile.Reader(source_shape_file_path+filename+\"/\"+file) for r in shape.iterRecords(): start_time =", "from shapely import geometry import shapefile import numpy import datetime", "* threshold epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) //" ]
[ "ballTimerMax = .75 * 60 done = False sleeping =", "infoObject.current_w height = infoObject.current_h size = width, height bgColor =", "not done: for event in pygame.event.get(): text = font.render(str(event.type), True,", "except: return None def delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main():", "= r,g,b screen.fill(bgColor) for ball in balls: screen.blit(ball.image, ball.rect) screen.blit(text,", "== None: balls = [] ballTimer = 0 ballTimerMax =", "pygame_sdl2.import_as_pygame() import pygame import os import random import math from", "= r,g,b = 0, 0, 0 screen = pygame.display.set_mode(size) pygame.display.set_mode()", "cancel # any timers, and stop drawing the screen until", "False # For now, we have to re-open the window", "done: for event in pygame.event.get(): text = font.render(str(event.type), True, (255,", "ball in balls: ball.move() ball.collideScreen(size) for first in balls: for", "= False sleeping = False font = pygame.font.Font(\"DejaVuSans.ttf\", 124) text", "start drawing the screen again. delete_state() sleeping = False #", "r,g,b screen.fill(bgColor) for ball in balls: screen.blit(ball.image, ball.rect) screen.blit(text, textRect)", "saved state (we don't need it), # restore any times,", "delete_state() if balls == None: balls = [] ballTimer =", "# The app is about to go to sleep. It", "any timers, and stop drawing the screen until an APP_DIDENTERFOREGROUND", "state. \"\"\" stateString = \"\" with open(\"state.txt\", \"w\") as f:", "with open(\"state.txt\", \"w\") as f: for ball in balls: stateString", "ball.move() ball.collideScreen(size) for first in balls: for second in balls:", "pygame.KEYDOWN and event.key == pygame.K_AC_BACK: done = True elif event.type", "= width, height bgColor = r,g,b = 0, 0, 0", "foreground. screen = pygame.display.set_mode((1280, 720)) if not sleeping: ballTimer +=", "in balls: ball.move() ball.collideScreen(size) for first in balls: for second", "as f: for line in f.read(): f, sx, sy, x,", "= .75 * 60 done = False sleeping = False", "open(\"state.txt\", \"w\") as f: for ball in balls: stateString +=", "0 ballTimerMax = .75 * 60 done = False sleeping", "pygame.APP_WILLENTERBACKGROUND: # The app is about to go to sleep.", "os import random import math from Ball import Ball def", "height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball in balls: ball.move()", "event.key == pygame.K_AC_BACK: done = True elif event.type == pygame.APP_WILLENTERBACKGROUND:", "# For now, we have to re-open the window when", "(we don't need it), # restore any times, and start", "for event in pygame.event.get(): text = font.render(str(event.type), True, (255, 255,", "save_state(balls) sleeping = True elif event.type == pygame.APP_DIDENTERFOREGROUND: # The", "screen.blit(ball.image, ball.rect) screen.blit(text, textRect) pygame.display.flip() clock.tick(60) if done: break if", "event.type == pygame.APP_WILLENTERBACKGROUND: # The app is about to go", "the game state. \"\"\" stateString = \"\" with open(\"state.txt\", \"w\")", "ball.rect) screen.blit(text, textRect) pygame.display.flip() clock.tick(60) if done: break if __name__", "= 0 ballSpeed = [random.randint(-5, 5), random.randint(-5, 5)] ballPos =", "= [random.randint(-5, 5), random.randint(-5, 5)] ballPos = [random.randint(100, width-100), random.randint(100,", "text.get_rect(center = (width/2, height/2)) while not done: for event in", "= (width/2, height/2)) while not done: for event in pygame.event.get():", "{} {} {} {}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery) stateString +=", "import pygame_sdl2 pygame_sdl2.import_as_pygame() import pygame import os import random import", "with open(\"state.txt\", \"r\") as f: for line in f.read(): f,", "= font.render(str(event.type), True, (255, 255, 255, 255)) if event.type ==", "f: for line in f.read(): f, sx, sy, x, y", "second: first.collideBall(second) bgColor = r,g,b screen.fill(bgColor) for ball in balls:", "we have to re-open the window when entering the #", "Delete the saved state (we don't need it), # restore", "ball in balls: stateString += \"{} {} {} {} {}\".format(ball.imageFile,", "pygame import os import random import math from Ball import", "False sleeping = False font = pygame.font.Font(\"DejaVuSans.ttf\", 124) text =", "pygame.font.Font(\"DejaVuSans.ttf\", 124) text = font.render(\"Start\", True, (255, 255, 255, 255))", "an APP_DIDENTERFOREGROUND # event shows up. save_state(balls) sleeping = True", "about to go to sleep. It should save state, cancel", "#print infoObject.current_w width = infoObject.current_w height = infoObject.current_h size =", "for ball in balls: screen.blit(ball.image, ball.rect) screen.blit(text, textRect) pygame.display.flip() clock.tick(60)", "False font = pygame.font.Font(\"DejaVuSans.ttf\", 124) text = font.render(\"Start\", True, (255,", "== pygame.QUIT: done = True elif event.type == pygame.KEYDOWN and", "import pygame import os import random import math from Ball", "textRect = text.get_rect(center = (width/2, height/2)) while not done: for", "= [] ballTimer = 0 ballTimerMax = .75 * 60", ".75 * 60 done = False sleeping = False font", "line.split() objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)]) return objects", "balls: for second in balls: if first != second: first.collideBall(second)", "ball in balls: screen.blit(ball.image, ball.rect) screen.blit(text, textRect) pygame.display.flip() clock.tick(60) if", "== pygame.K_AC_BACK: done = True elif event.type == pygame.APP_WILLENTERBACKGROUND: #", "re-open the window when entering the # foreground. screen =", "True elif event.type == pygame.APP_WILLENTERBACKGROUND: # The app is about", "1 if ballTimer >= ballTimerMax: ballTimer = 0 ballSpeed =", "screen.blit(text, textRect) pygame.display.flip() clock.tick(60) if done: break if __name__ ==", "first in balls: for second in balls: if first !=", "int(sy)], [int(x), int(y)]) return objects except: return None def delete_state():", "up. save_state(balls) sleeping = True elif event.type == pygame.APP_DIDENTERFOREGROUND: #", "for second in balls: if first != second: first.collideBall(second) bgColor", "pygame.display.Info() #print infoObject.current_w width = infoObject.current_w height = infoObject.current_h size", "0 ballSpeed = [random.randint(-5, 5), random.randint(-5, 5)] ballPos = [random.randint(100,", "ball.rect.centery) stateString += '\\n' f.write(stateString) def load_state(): try: objects =", "pygame.display.flip() clock.tick(60) if done: break if __name__ == \"__main__\": main()", "not sleeping: ballTimer += 1 if ballTimer >= ballTimerMax: ballTimer", "restore any times, and start drawing the screen again. delete_state()", "sy, x, y = line.split() objects += Ball(f, [int(sx), int(sy)],", "0, 0 screen = pygame.display.set_mode(size) pygame.display.set_mode() balls = load_state() delete_state()", "width-100), random.randint(100, height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball in", "os.unlink(\"state.txt\") def main(): pygame.init() clock = pygame.time.Clock() infoObject = pygame.display.Info()", "== pygame.APP_DIDENTERFOREGROUND: # The app woke back up. Delete the", "first != second: first.collideBall(second) bgColor = r,g,b screen.fill(bgColor) for ball", "pygame.event.get(): text = font.render(str(event.type), True, (255, 255, 255, 255)) if", "woke back up. Delete the saved state (we don't need", "stateString += \"{} {} {} {} {}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx,", "import Ball def save_state(balls): \"\"\" Saves the game state. \"\"\"", "event.type == pygame.APP_DIDENTERFOREGROUND: # The app woke back up. Delete", "width, height bgColor = r,g,b = 0, 0, 0 screen", "text = font.render(\"Start\", True, (255, 255, 255, 255)) textRect =", "font = pygame.font.Font(\"DejaVuSans.ttf\", 124) text = font.render(\"Start\", True, (255, 255,", "in f.read(): f, sx, sy, x, y = line.split() objects", "delete_state() sleeping = False # For now, we have to", "done = False sleeping = False font = pygame.font.Font(\"DejaVuSans.ttf\", 124)", "pygame.display.set_mode() balls = load_state() delete_state() if balls == None: balls", "balls: screen.blit(ball.image, ball.rect) screen.blit(text, textRect) pygame.display.flip() clock.tick(60) if done: break", "pygame.display.set_mode(size) pygame.display.set_mode() balls = load_state() delete_state() if balls == None:", "None def delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main(): pygame.init() clock", "balls = load_state() delete_state() if balls == None: balls =", "if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main(): pygame.init() clock = pygame.time.Clock() infoObject", "255, 255)) if event.type == pygame.QUIT: done = True elif", "# event shows up. save_state(balls) sleeping = True elif event.type", "# The app woke back up. Delete the saved state", "up. Delete the saved state (we don't need it), #", "= False font = pygame.font.Font(\"DejaVuSans.ttf\", 124) text = font.render(\"Start\", True,", "infoObject = pygame.display.Info() #print infoObject.current_w width = infoObject.current_w height =", "+= 1 if ballTimer >= ballTimerMax: ballTimer = 0 ballSpeed", "back up. Delete the saved state (we don't need it),", "True, (255, 255, 255, 255)) textRect = text.get_rect(center = (width/2,", "+= [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball in balls: ball.move() ball.collideScreen(size) for", "second in balls: if first != second: first.collideBall(second) bgColor =", "= infoObject.current_w height = infoObject.current_h size = width, height bgColor", "r,g,b = 0, 0, 0 screen = pygame.display.set_mode(size) pygame.display.set_mode() balls", "124) text = font.render(\"Start\", True, (255, 255, 255, 255)) textRect", "textRect) pygame.display.flip() clock.tick(60) if done: break if __name__ == \"__main__\":", "height bgColor = r,g,b = 0, 0, 0 screen =", "# foreground. screen = pygame.display.set_mode((1280, 720)) if not sleeping: ballTimer", "window when entering the # foreground. screen = pygame.display.set_mode((1280, 720))", ">= ballTimerMax: ballTimer = 0 ballSpeed = [random.randint(-5, 5), random.randint(-5,", "game state. \"\"\" stateString = \"\" with open(\"state.txt\", \"w\") as", "if event.type == pygame.QUIT: done = True elif event.type ==", "def load_state(): try: objects = [] with open(\"state.txt\", \"r\") as", "import random import math from Ball import Ball def save_state(balls):", "screen again. delete_state() sleeping = False # For now, we", "in balls: for second in balls: if first != second:", "drawing the screen until an APP_DIDENTERFOREGROUND # event shows up.", "the saved state (we don't need it), # restore any", "= 0 ballTimerMax = .75 * 60 done = False", "Ball import Ball def save_state(balls): \"\"\" Saves the game state.", "stateString = \"\" with open(\"state.txt\", \"w\") as f: for ball", "for first in balls: for second in balls: if first", "def delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main(): pygame.init() clock =", "= 0, 0, 0 screen = pygame.display.set_mode(size) pygame.display.set_mode() balls =", "(width/2, height/2)) while not done: for event in pygame.event.get(): text", "while not done: for event in pygame.event.get(): text = font.render(str(event.type),", "elif event.type == pygame.APP_DIDENTERFOREGROUND: # The app woke back up.", "[Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball in balls: ball.move() ball.collideScreen(size) for first", "infoObject.current_w width = infoObject.current_w height = infoObject.current_h size = width,", "ballSpeed = [random.randint(-5, 5), random.randint(-5, 5)] ballPos = [random.randint(100, width-100),", "if balls == None: balls = [] ballTimer = 0", "for ball in balls: stateString += \"{} {} {} {}", "any times, and start drawing the screen again. delete_state() sleeping", "save state, cancel # any timers, and stop drawing the", "random import math from Ball import Ball def save_state(balls): \"\"\"", "line in f.read(): f, sx, sy, x, y = line.split()", "balls == None: balls = [] ballTimer = 0 ballTimerMax", "== pygame.KEYDOWN and event.key == pygame.K_AC_BACK: done = True elif", "to go to sleep. It should save state, cancel #", "balls: stateString += \"{} {} {} {} {}\".format(ball.imageFile, ball.speedx, ball.speedy,", "0, 0, 0 screen = pygame.display.set_mode(size) pygame.display.set_mode() balls = load_state()", "f, sx, sy, x, y = line.split() objects += Ball(f,", "for ball in balls: ball.move() ball.collideScreen(size) for first in balls:", "sleeping = False font = pygame.font.Font(\"DejaVuSans.ttf\", 124) text = font.render(\"Start\",", "if not sleeping: ballTimer += 1 if ballTimer >= ballTimerMax:", "don't need it), # restore any times, and start drawing", "should save state, cancel # any timers, and stop drawing", "[int(sx), int(sy)], [int(x), int(y)]) return objects except: return None def", "go to sleep. It should save state, cancel # any", "return None def delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main(): pygame.init()", "and stop drawing the screen until an APP_DIDENTERFOREGROUND # event", "f.write(stateString) def load_state(): try: objects = [] with open(\"state.txt\", \"r\")", "pygame.init() clock = pygame.time.Clock() infoObject = pygame.display.Info() #print infoObject.current_w width", "255, 255)) textRect = text.get_rect(center = (width/2, height/2)) while not", "= pygame.display.set_mode((1280, 720)) if not sleeping: ballTimer += 1 if", "when entering the # foreground. screen = pygame.display.set_mode((1280, 720)) if", "delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main(): pygame.init() clock = pygame.time.Clock()", "720)) if not sleeping: ballTimer += 1 if ballTimer >=", "f.read(): f, sx, sy, x, y = line.split() objects +=", "entering the # foreground. screen = pygame.display.set_mode((1280, 720)) if not", "60 done = False sleeping = False font = pygame.font.Font(\"DejaVuSans.ttf\",", "as f: for ball in balls: stateString += \"{} {}", "if first != second: first.collideBall(second) bgColor = r,g,b screen.fill(bgColor) for", "open(\"state.txt\", \"r\") as f: for line in f.read(): f, sx,", "main(): pygame.init() clock = pygame.time.Clock() infoObject = pygame.display.Info() #print infoObject.current_w", "sleep. It should save state, cancel # any timers, and", "f: for ball in balls: stateString += \"{} {} {}", "is about to go to sleep. It should save state,", "save_state(balls) for ball in balls: ball.move() ball.collideScreen(size) for first in", "in balls: stateString += \"{} {} {} {} {}\".format(ball.imageFile, ball.speedx,", "True elif event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK: done", "clock = pygame.time.Clock() infoObject = pygame.display.Info() #print infoObject.current_w width =", "height/2)) while not done: for event in pygame.event.get(): text =", "Ball def save_state(balls): \"\"\" Saves the game state. \"\"\" stateString", "!= second: first.collideBall(second) bgColor = r,g,b screen.fill(bgColor) for ball in", "bgColor = r,g,b screen.fill(bgColor) for ball in balls: screen.blit(ball.image, ball.rect)", "Ball(f, [int(sx), int(sy)], [int(x), int(y)]) return objects except: return None", "= [] with open(\"state.txt\", \"r\") as f: for line in", "infoObject.current_h size = width, height bgColor = r,g,b = 0,", "timers, and stop drawing the screen until an APP_DIDENTERFOREGROUND #", "The app woke back up. Delete the saved state (we", "have to re-open the window when entering the # foreground.", "ballTimer = 0 ballSpeed = [random.randint(-5, 5), random.randint(-5, 5)] ballPos", "size = width, height bgColor = r,g,b = 0, 0,", "x, y = line.split() objects += Ball(f, [int(sx), int(sy)], [int(x),", "objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)]) return objects except:", "balls: if first != second: first.collideBall(second) bgColor = r,g,b screen.fill(bgColor)", "{} {} {}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery) stateString += '\\n'", "state, cancel # any timers, and stop drawing the screen", "{} {}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery) stateString += '\\n' f.write(stateString)", "255, 255, 255)) textRect = text.get_rect(center = (width/2, height/2)) while", "in balls: if first != second: first.collideBall(second) bgColor = r,g,b", "= infoObject.current_h size = width, height bgColor = r,g,b =", "\"\"\" Saves the game state. \"\"\" stateString = \"\" with", "# any timers, and stop drawing the screen until an", "and start drawing the screen again. delete_state() sleeping = False", "text = font.render(str(event.type), True, (255, 255, 255, 255)) if event.type", "the window when entering the # foreground. screen = pygame.display.set_mode((1280,", "pygame_sdl2 pygame_sdl2.import_as_pygame() import pygame import os import random import math", "stop drawing the screen until an APP_DIDENTERFOREGROUND # event shows", "font.render(\"Start\", True, (255, 255, 255, 255)) textRect = text.get_rect(center =", "The app is about to go to sleep. It should", "+= Ball(f, [int(sx), int(sy)], [int(x), int(y)]) return objects except: return", "= [random.randint(100, width-100), random.randint(100, height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for", "screen.fill(bgColor) for ball in balls: screen.blit(ball.image, ball.rect) screen.blit(text, textRect) pygame.display.flip()", "event.type == pygame.QUIT: done = True elif event.type == pygame.KEYDOWN", "\"{} {} {} {} {}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery) stateString", "balls = [] ballTimer = 0 ballTimerMax = .75 *", "255)) if event.type == pygame.QUIT: done = True elif event.type", "sleeping = True elif event.type == pygame.APP_DIDENTERFOREGROUND: # The app", "# restore any times, and start drawing the screen again.", "stateString += '\\n' f.write(stateString) def load_state(): try: objects = []", "and event.key == pygame.K_AC_BACK: done = True elif event.type ==", "= True elif event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK:", "balls: ball.move() ball.collideScreen(size) for first in balls: for second in", "save_state(balls): \"\"\" Saves the game state. \"\"\" stateString = \"\"", "\"\"\" stateString = \"\" with open(\"state.txt\", \"w\") as f: for", "screen = pygame.display.set_mode((1280, 720)) if not sleeping: ballTimer += 1", "sleeping: ballTimer += 1 if ballTimer >= ballTimerMax: ballTimer =", "= line.split() objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)]) return", "\"\" with open(\"state.txt\", \"w\") as f: for ball in balls:", "[int(x), int(y)]) return objects except: return None def delete_state(): if", "pygame.time.Clock() infoObject = pygame.display.Info() #print infoObject.current_w width = infoObject.current_w height", "in balls: screen.blit(ball.image, ball.rect) screen.blit(text, textRect) pygame.display.flip() clock.tick(60) if done:", "load_state() delete_state() if balls == None: balls = [] ballTimer", "load_state(): try: objects = [] with open(\"state.txt\", \"r\") as f:", "ball.collideScreen(size) for first in balls: for second in balls: if", "= text.get_rect(center = (width/2, height/2)) while not done: for event", "pygame.QUIT: done = True elif event.type == pygame.KEYDOWN and event.key", "True, (255, 255, 255, 255)) if event.type == pygame.QUIT: done", "= font.render(\"Start\", True, (255, 255, 255, 255)) textRect = text.get_rect(center", "(255, 255, 255, 255)) if event.type == pygame.QUIT: done =", "event in pygame.event.get(): text = font.render(str(event.type), True, (255, 255, 255,", "from Ball import Ball def save_state(balls): \"\"\" Saves the game", "= True elif event.type == pygame.APP_DIDENTERFOREGROUND: # The app woke", "try: objects = [] with open(\"state.txt\", \"r\") as f: for", "screen = pygame.display.set_mode(size) pygame.display.set_mode() balls = load_state() delete_state() if balls", "until an APP_DIDENTERFOREGROUND # event shows up. save_state(balls) sleeping =", "now, we have to re-open the window when entering the", "ballPos = [random.randint(100, width-100), random.randint(100, height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls)", "done = True elif event.type == pygame.KEYDOWN and event.key ==", "ballTimer = 0 ballTimerMax = .75 * 60 done =", "APP_DIDENTERFOREGROUND # event shows up. save_state(balls) sleeping = True elif", "pygame.APP_DIDENTERFOREGROUND: # The app woke back up. Delete the saved", "5)] ballPos = [random.randint(100, width-100), random.randint(100, height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)]", "sx, sy, x, y = line.split() objects += Ball(f, [int(sx),", "the screen until an APP_DIDENTERFOREGROUND # event shows up. save_state(balls)", "= pygame.display.Info() #print infoObject.current_w width = infoObject.current_w height = infoObject.current_h", "again. delete_state() sleeping = False # For now, we have", "if ballTimer >= ballTimerMax: ballTimer = 0 ballSpeed = [random.randint(-5,", "\"r\") as f: for line in f.read(): f, sx, sy,", "sleeping = False # For now, we have to re-open", "import os import random import math from Ball import Ball", "+= \"{} {} {} {} {}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery)", "event shows up. save_state(balls) sleeping = True elif event.type ==", "objects = [] with open(\"state.txt\", \"r\") as f: for line", "app woke back up. Delete the saved state (we don't", "= True elif event.type == pygame.APP_WILLENTERBACKGROUND: # The app is", "ballTimer += 1 if ballTimer >= ballTimerMax: ballTimer = 0", "= pygame.display.set_mode(size) pygame.display.set_mode() balls = load_state() delete_state() if balls ==", "= False # For now, we have to re-open the", "height = infoObject.current_h size = width, height bgColor = r,g,b", "def save_state(balls): \"\"\" Saves the game state. \"\"\" stateString =", "return objects except: return None def delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\")", "ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery) stateString += '\\n' f.write(stateString) def load_state():", "random.randint(-5, 5)] ballPos = [random.randint(100, width-100), random.randint(100, height-100)] balls +=", "font.render(str(event.type), True, (255, 255, 255, 255)) if event.type == pygame.QUIT:", "[random.randint(100, width-100), random.randint(100, height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball", "0 screen = pygame.display.set_mode(size) pygame.display.set_mode() balls = load_state() delete_state() if", "to sleep. It should save state, cancel # any timers,", "[random.randint(-5, 5), random.randint(-5, 5)] ballPos = [random.randint(100, width-100), random.randint(100, height-100)]", "255, 255, 255)) if event.type == pygame.QUIT: done = True", "app is about to go to sleep. It should save", "for line in f.read(): f, sx, sy, x, y =", "It should save state, cancel # any timers, and stop", "balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball in balls: ball.move() ball.collideScreen(size)", "def main(): pygame.init() clock = pygame.time.Clock() infoObject = pygame.display.Info() #print", "random.randint(100, height-100)] balls += [Ball(\"ball.png\",ballSpeed,ballPos)] save_state(balls) for ball in balls:", "elif event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK: done =", "y = line.split() objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)])", "state (we don't need it), # restore any times, and", "For now, we have to re-open the window when entering", "[] with open(\"state.txt\", \"r\") as f: for line in f.read():", "ballTimer >= ballTimerMax: ballTimer = 0 ballSpeed = [random.randint(-5, 5),", "objects except: return None def delete_state(): if os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def", "os.path.exists(\"state.txt\"): os.unlink(\"state.txt\") def main(): pygame.init() clock = pygame.time.Clock() infoObject =", "= pygame.font.Font(\"DejaVuSans.ttf\", 124) text = font.render(\"Start\", True, (255, 255, 255,", "width = infoObject.current_w height = infoObject.current_h size = width, height", "(255, 255, 255, 255)) textRect = text.get_rect(center = (width/2, height/2))", "+= '\\n' f.write(stateString) def load_state(): try: objects = [] with", "shows up. save_state(balls) sleeping = True elif event.type == pygame.APP_DIDENTERFOREGROUND:", "math from Ball import Ball def save_state(balls): \"\"\" Saves the", "255)) textRect = text.get_rect(center = (width/2, height/2)) while not done:", "it), # restore any times, and start drawing the screen", "ball.speedy, ball.rect.centerx, ball.rect.centery) stateString += '\\n' f.write(stateString) def load_state(): try:", "the # foreground. screen = pygame.display.set_mode((1280, 720)) if not sleeping:", "* 60 done = False sleeping = False font =", "in pygame.event.get(): text = font.render(str(event.type), True, (255, 255, 255, 255))", "times, and start drawing the screen again. delete_state() sleeping =", "= \"\" with open(\"state.txt\", \"w\") as f: for ball in", "screen until an APP_DIDENTERFOREGROUND # event shows up. save_state(balls) sleeping", "ballTimerMax: ballTimer = 0 ballSpeed = [random.randint(-5, 5), random.randint(-5, 5)]", "ball.rect.centerx, ball.rect.centery) stateString += '\\n' f.write(stateString) def load_state(): try: objects", "bgColor = r,g,b = 0, 0, 0 screen = pygame.display.set_mode(size)", "[] ballTimer = 0 ballTimerMax = .75 * 60 done", "the screen again. delete_state() sleeping = False # For now,", "first.collideBall(second) bgColor = r,g,b screen.fill(bgColor) for ball in balls: screen.blit(ball.image,", "\"w\") as f: for ball in balls: stateString += \"{}", "need it), # restore any times, and start drawing the", "int(y)]) return objects except: return None def delete_state(): if os.path.exists(\"state.txt\"):", "Saves the game state. \"\"\" stateString = \"\" with open(\"state.txt\",", "import math from Ball import Ball def save_state(balls): \"\"\" Saves", "elif event.type == pygame.APP_WILLENTERBACKGROUND: # The app is about to", "pygame.display.set_mode((1280, 720)) if not sleeping: ballTimer += 1 if ballTimer", "'\\n' f.write(stateString) def load_state(): try: objects = [] with open(\"state.txt\",", "{}\".format(ball.imageFile, ball.speedx, ball.speedy, ball.rect.centerx, ball.rect.centery) stateString += '\\n' f.write(stateString) def", "== pygame.APP_WILLENTERBACKGROUND: # The app is about to go to", "= pygame.time.Clock() infoObject = pygame.display.Info() #print infoObject.current_w width = infoObject.current_w", "5), random.randint(-5, 5)] ballPos = [random.randint(100, width-100), random.randint(100, height-100)] balls", "= load_state() delete_state() if balls == None: balls = []", "done = True elif event.type == pygame.APP_WILLENTERBACKGROUND: # The app", "to re-open the window when entering the # foreground. screen", "pygame.K_AC_BACK: done = True elif event.type == pygame.APP_WILLENTERBACKGROUND: # The", "event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK: done = True", "drawing the screen again. delete_state() sleeping = False # For", "None: balls = [] ballTimer = 0 ballTimerMax = .75", "True elif event.type == pygame.APP_DIDENTERFOREGROUND: # The app woke back" ]
[ "of the test has probability less than p. \"\"\" avg_elo", "elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results) a = s.analytics(p) print(\"Design parameters\") print(\"=================\")", "self.b ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p / 2), self.lower_cb(1", ") def lower_cb(self, p): \"\"\" Maximal elo value such that", "duplication with LLRcalc if len(results) == 5: self.sigma_pg = (2", "< 1.03 * self.a: self.clamped = True if self.llr <", "results): N, self.pdf = LLRcalc.results_to_pdf(results) if self.elo_model == \"normalized\": mu,", "pass changed into a fail). \"\"\" s = LLRcalc.L_(elo) mu_LLR,", "self.llr return ret if __name__ == \"__main__\": parser = argparse.ArgumentParser()", "elif len(results) == 3: self.sigma_pg = var ** 0.5 else:", "our current elo_model. \"\"\" if self.elo_model == \"normalized\": nt =", "+ 0.5 else: return LLRcalc.L_(elo) def lelo_to_elo(self, lelo): \"\"\" For", "help=\"probability of a false positve\", type=float, default=0.05 ) parser.add_argument( \"--beta\",", "3 and len(results) != 5: parser.error(\"argument --results: expected 3 or", "\"\"\" if self.elo_model == \"normalized\": nt = elo / LLRcalc.nelo_divided_by_nt", ": %.2f\" % a[\"elo\"]) print( \"Confidence interval : [%.2f,%.2f] (%4.2f%%)\"", "\"\"\" For external use. \"elo\" is expressed in our current", "sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr ) def lower_cb(self, p): \"\"\" Maximal elo", "math, copy import argparse from brownian import Brownian import scipy", "return nt * LLRcalc.nelo_divided_by_nt def set_state(self, results): N, self.pdf =", "self.a / slope self.llr = self.a elif self.llr > self.b:", "> 1.03 * self.b or self.llr < 1.03 * self.a:", "type=int, required=True, ) args = parser.parse_args() results = args.results if", "a pass changed into a fail). \"\"\" s = LLRcalc.L_(elo)", "beta = args.beta elo0 = args.elo0 elo1 = args.elo1 elo_model", "** 0.5 else: assert False self.s0, self.s1 = [self.elo_to_score(elo) for", "elif self.llr > self.b: self.T = self.b / slope self.llr", "\"\"\" \"elo\" is expressed in our current elo_model. \"\"\" if", "args.elo_model p = 1 - args.level s = sprt(alpha=alpha, beta=beta,", "parser = argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability of a false positve\",", "= parser.parse_args() results = args.results if len(results) != 3 and", "% (a[\"ci\"][0], a[\"ci\"][1], 100 * (1 - p)) ) print(\"LOS", "max(avg_elo - N * delta, -1000) elo1 = min(avg_elo +", "1.03 * self.a: self.clamped = True if self.llr < self.a:", ") parser.add_argument( \"--elo0\", help=\"H0 (expressed in LogisticElo)\", type=float, default=0.0 )", "args.elo0 elo1 = args.elo1 elo_model = args.elo_model p = 1", "- N * delta, -1000) elo1 = min(avg_elo + N", "\"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability of a false", "The probability of a test with the given elo with", "== 5: self.sigma_pg = (2 * var) ** 0.5 elif", "elo0 = max(avg_elo - N * delta, -1000) elo1 =", "self.s1 = [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)] mu_LLR, var_LLR", "else: return LLRcalc.L_(elo) def lelo_to_elo(self, lelo): \"\"\" For external use.", "res.converged break return sol def analytics(self, p=0.05): ret = {}", "0.5 else: return LLRcalc.L_(elo) def lelo_to_elo(self, lelo): \"\"\" For external", "a[\"LOS\"],)) print(\"Context\") print(\"=======\") print( \"LLR [u,l] : %.2f %s [%.2f,%.2f]\"", "test with the given elo with worse outcome (faster fail,", "elo: self.outcome_prob(elo) - (1 - p), elo0, elo1, full_output=True, disp=False,", "> 0: return elo1 else: return elo0 assert res.converged break", "is expressed in our current elo_model. \"lelo\" is logistic. \"\"\"", "outcome (faster fail, slower pass or a pass changed into", "self.outcome_prob(elo0) - (1 - p) > 0: return elo1 else:", "self.elo0 = elo0 self.elo1 = elo1 self.clamped = False self.LLR_drift_variance", ": %4.2f%%\" % (100 * a[\"LOS\"],)) print(\"Context\") print(\"=======\") print( \"LLR", "if self.elo_model == \"normalized\": nt = elo / LLRcalc.nelo_divided_by_nt return", "2), self.lower_cb(1 - p / 2)] ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"]", "alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"): assert elo_model in (\"logistic\", \"normalized\")", "self.T = self.b / slope self.llr = self.b def outcome_prob(self,", "error conditions must be handled better here! while True: elo0", "LLRcalc.L_(elo) def lelo_to_elo(self, lelo): \"\"\" For external use. \"elo\" is", "{} ret[\"clamped\"] = self.clamped ret[\"a\"] = self.a ret[\"b\"] = self.b", "expected 3 or 5 arguments\") alpha = args.alpha beta =", "mu_LLR self.T = N # now normalize llr (if llr", "%s\" % elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo : %.2f\" % a[\"elo\"])", "= self.elo1 - self.elo0 N = 30 # Various error", "self.s0, self.s1, s) sigma_LLR = math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b, mu=mu_LLR,", "llr is not legal then the implications # of this", "is not legal then the implications # of this are", "(self.elo0, self.elo1)] mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None) #", "use. \"elo\" is expressed in our current elo_model. \"lelo\" is", "frequencies, low to high\", nargs=\"*\", type=int, required=True, ) args =", "(1 - p)) ) print(\"LOS : %4.2f%%\" % (100 *", "type=float, default=5.0 ) parser.add_argument(\"--level\", help=\"confidence level\", type=float, default=0.95) parser.add_argument( \"--elo-model\",", "default='logistic', ) parser.add_argument( \"--results\", help=\"trinomial of pentanomial frequencies, low to", "alpha,)) print(\"False negatives : %4.2f%%\" % (100 * beta,)) print(\"[Elo0,Elo1]", "[%.2f,%.2f]\" % (elo0, elo1)) print(\"Confidence level : %4.2f%%\" % (100", "= 30 # Various error conditions must be handled better", "(score - 0.5) / self.sigma_pg return nt * LLRcalc.nelo_divided_by_nt def", "self.s1, None) # llr estimate self.llr = N * mu_LLR", "low to high\", nargs=\"*\", type=int, required=True, ) args = parser.parse_args()", "%4.2f%%\" % (100 * (1 - p),)) print(\"Elo model :", "Maximal elo value such that the observed outcome of the", "% (a[\"LLR\"], \"(clamped)\" if a[\"clamped\"] else \"\", a[\"a\"], a[\"b\"]) )", "in (\"logistic\", \"normalized\") self.elo_model = elo_model self.a = math.log(beta /", "expressed in our current elo_model. \"\"\" if self.elo_model == \"normalized\":", "self.llr / N if self.llr > 1.03 * self.b or", "worse outcome (faster fail, slower pass or a pass changed", "\"--elo-model\", help=\"logistic or normalized\", choices=['logistic', 'normalized'], default='logistic', ) parser.add_argument( \"--results\",", "!= 5: parser.error(\"argument --results: expected 3 or 5 arguments\") alpha", "beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"): assert elo_model in (\"logistic\", \"normalized\") self.elo_model", "[self.lower_cb(p / 2), self.lower_cb(1 - p / 2)] ret[\"LOS\"] =", "(2 * var) ** 0.5 elif len(results) == 3: self.sigma_pg", "ret if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--alpha\",", "print(\"Estimates\") print(\"=========\") print(\"Elo : %.2f\" % a[\"elo\"]) print( \"Confidence interval", "%.2f\" % a[\"elo\"]) print( \"Confidence interval : [%.2f,%.2f] (%4.2f%%)\" %", "external use. \"elo\" is expressed in our current elo_model. \"lelo\"", ": %4.2f%%\" % (100 * alpha,)) print(\"False negatives : %4.2f%%\"", "class sprt: def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"): assert", "+ self.elo1) / 2 delta = self.elo1 - self.elo0 N", "elo0, elo1, full_output=True, disp=False, ) except ValueError: if elo0 >", "or a pass changed into a fail). \"\"\" s =", "elo1 < 1000: N *= 2 continue else: if self.outcome_prob(elo0)", "if self.llr > 1.03 * self.b or self.llr < 1.03", "and len(results) != 5: parser.error(\"argument --results: expected 3 or 5", "self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo): \"\"\" \"elo\" is expressed", "-1000) elo1 = min(avg_elo + N * delta, 1000) try:", "0: return elo1 else: return elo0 assert res.converged break return", "while True: elo0 = max(avg_elo - N * delta, -1000)", "required=True, ) args = parser.parse_args() results = args.results if len(results)", "a[\"ci\"][1], 100 * (1 - p)) ) print(\"LOS : %4.2f%%\"", "var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s) sigma_LLR = math.sqrt(var_LLR) return", "p) > 0: return elo1 else: return elo0 assert res.converged", "mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr ) def lower_cb(self, p): \"\"\" Maximal", "[self.elo_to_score(elo) for elo in (self.elo0, self.elo1)] mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf,", "now normalize llr (if llr is not legal then the", "= self.llr return ret if __name__ == \"__main__\": parser =", "(a[\"ci\"][0], a[\"ci\"][1], 100 * (1 - p)) ) print(\"LOS :", "= self.LLR_drift_variance(self.pdf, self.s0, self.s1, None) # llr estimate self.llr =", "self.llr < 1.03 * self.a: self.clamped = True if self.llr", "p): \"\"\" Maximal elo value such that the observed outcome", "try: sol, res = scipy.optimize.brentq( lambda elo: self.outcome_prob(elo) - (1", "self.pdf = LLRcalc.results_to_pdf(results) if self.elo_model == \"normalized\": mu, var =", "self.elo1 - self.elo0 N = 30 # Various error conditions", "if elo0 > -1000 or elo1 < 1000: N *=", "LLRcalc.results_to_pdf(results) if self.elo_model == \"normalized\": mu, var = LLRcalc.stats(self.pdf) #", "self.b = math.log((1 - beta) / alpha) self.elo0 = elo0", "p=0.05): ret = {} ret[\"clamped\"] = self.clamped ret[\"a\"] = self.a", "elo1, full_output=True, disp=False, ) except ValueError: if elo0 > -1000", "self.elo_model = elo_model self.a = math.log(beta / (1 - alpha))", "\"\"\" if self.elo_model == \"logistic\": return lelo score = LLRcalc.L_(lelo)", "args.level s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results) a", "self.b or self.llr < 1.03 * self.a: self.clamped = True", "% elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo : %.2f\" % a[\"elo\"]) print(", "parameters\") print(\"=================\") print(\"False positives : %4.2f%%\" % (100 * alpha,))", "test has probability less than p. \"\"\" avg_elo = (self.elo0", "print(\"Design parameters\") print(\"=================\") print(\"False positives : %4.2f%%\" % (100 *", "[%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\" if a[\"clamped\"] else \"\", a[\"a\"], a[\"b\"])", "= args.elo1 elo_model = args.elo_model p = 1 - args.level", "ret = {} ret[\"clamped\"] = self.clamped ret[\"a\"] = self.a ret[\"b\"]", "lelo): \"\"\" For external use. \"elo\" is expressed in our", "min(avg_elo + N * delta, 1000) try: sol, res =", "% (100 * beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\" % (elo0, elo1))", "scipy import LLRcalc class sprt: def __init__(self, alpha=0.05, beta=0.05, elo0=0,", "(%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1], 100 * (1 - p)) )", "import scipy import LLRcalc class sprt: def __init__(self, alpha=0.05, beta=0.05,", "* LLRcalc.nelo_divided_by_nt def set_state(self, results): N, self.pdf = LLRcalc.results_to_pdf(results) if", "== 3: self.sigma_pg = var ** 0.5 else: assert False", ") except ValueError: if elo0 > -1000 or elo1 <", "self.lower_cb(1 - p / 2)] ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"] =", "outcome_prob(self, elo): \"\"\" The probability of a test with the", "high\", nargs=\"*\", type=int, required=True, ) args = parser.parse_args() results =", "beta) / alpha) self.elo0 = elo0 self.elo1 = elo1 self.clamped", "(100 * beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\" % (elo0, elo1)) print(\"Confidence", "+ N * delta, 1000) try: sol, res = scipy.optimize.brentq(", "p),)) print(\"Elo model : %s\" % elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo", "handled better here! while True: elo0 = max(avg_elo - N", "nt * self.sigma_pg + 0.5 else: return LLRcalc.L_(elo) def lelo_to_elo(self,", "self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p / 2), self.lower_cb(1 - p /", "= self.llr / N if self.llr > 1.03 * self.b", "- beta) / alpha) self.elo0 = elo0 self.elo1 = elo1", "self.b: self.T = self.b / slope self.llr = self.b def", "assert elo_model in (\"logistic\", \"normalized\") self.elo_model = elo_model self.a =", "= LLRcalc.L_(elo) mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s) sigma_LLR", "help=\"confidence level\", type=float, default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic or normalized\", choices=['logistic',", "print(\"Elo model : %s\" % elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo :", "self.elo_model == \"logistic\": return lelo score = LLRcalc.L_(lelo) nt =", "T=self.T, y=self.llr ) def lower_cb(self, p): \"\"\" Maximal elo value", "or elo1 < 1000: N *= 2 continue else: if", "* alpha,)) print(\"False negatives : %4.2f%%\" % (100 * beta,))", "# of this are unclear) slope = self.llr / N", "else: if self.outcome_prob(elo0) - (1 - p) > 0: return", "print(\"False negatives : %4.2f%%\" % (100 * beta,)) print(\"[Elo0,Elo1] :", "- p),)) print(\"Elo model : %s\" % elo_model) print(\"Estimates\") print(\"=========\")", "of this are unclear) slope = self.llr / N if", "0.5) / self.sigma_pg return nt * LLRcalc.nelo_divided_by_nt def set_state(self, results):", "# now normalize llr (if llr is not legal then", "conditions must be handled better here! while True: elo0 =", "p), elo0, elo1, full_output=True, disp=False, ) except ValueError: if elo0", "'normalized'], default='logistic', ) parser.add_argument( \"--results\", help=\"trinomial of pentanomial frequencies, low", "False self.s0, self.s1 = [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)]", "be handled better here! while True: elo0 = max(avg_elo -", "to high\", nargs=\"*\", type=int, required=True, ) args = parser.parse_args() results", "elo with worse outcome (faster fail, slower pass or a", "(\"logistic\", \"normalized\") self.elo_model = elo_model self.a = math.log(beta / (1", "import argparse from brownian import Brownian import scipy import LLRcalc", "self.elo_model == \"normalized\": nt = elo / LLRcalc.nelo_divided_by_nt return nt", "\"\"\" avg_elo = (self.elo0 + self.elo1) / 2 delta =", "self.elo1 = elo1 self.clamped = False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def", "\"normalized\": mu, var = LLRcalc.stats(self.pdf) # code duplication with LLRcalc", "/ 2 delta = self.elo1 - self.elo0 N = 30", "full_output=True, disp=False, ) except ValueError: if elo0 > -1000 or", "= [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)] mu_LLR, var_LLR =", "elo): \"\"\" The probability of a test with the given", "2 delta = self.elo1 - self.elo0 N = 30 #", "lambda elo: self.outcome_prob(elo) - (1 - p), elo0, elo1, full_output=True,", "level : %4.2f%%\" % (100 * (1 - p),)) print(\"Elo", "%.2f %s [%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\" if a[\"clamped\"] else \"\",", ": %4.2f%%\" % (100 * beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\" %", "of a test with the given elo with worse outcome", "with worse outcome (faster fail, slower pass or a pass", "LLRcalc class sprt: def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"):", "sol def analytics(self, p=0.05): ret = {} ret[\"clamped\"] = self.clamped", "print(\"Context\") print(\"=======\") print( \"LLR [u,l] : %.2f %s [%.2f,%.2f]\" %", "def elo_to_score(self, elo): \"\"\" \"elo\" is expressed in our current", ") parser.add_argument( \"--elo1\", help=\"H1 (expressed in LogisticElo)\", type=float, default=5.0 )", "parser.add_argument( \"--elo1\", help=\"H1 (expressed in LogisticElo)\", type=float, default=5.0 ) parser.add_argument(\"--level\",", "must be handled better here! while True: elo0 = max(avg_elo", ") parser.add_argument( \"--beta\", help=\"probability of a false negative\", type=float, default=0.05", "in LogisticElo)\", type=float, default=0.0 ) parser.add_argument( \"--elo1\", help=\"H1 (expressed in", "for elo in (self.elo0, self.elo1)] mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0,", "results = args.results if len(results) != 3 and len(results) !=", "scipy.optimize.brentq( lambda elo: self.outcome_prob(elo) - (1 - p), elo0, elo1,", "if len(results) != 3 and len(results) != 5: parser.error(\"argument --results:", "5: parser.error(\"argument --results: expected 3 or 5 arguments\") alpha =", "disp=False, ) except ValueError: if elo0 > -1000 or elo1", "model : %s\" % elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo : %.2f\"", "return elo0 assert res.converged break return sol def analytics(self, p=0.05):", "- self.elo0 N = 30 # Various error conditions must", "3 or 5 arguments\") alpha = args.alpha beta = args.beta", "(if llr is not legal then the implications # of", "True if self.llr < self.a: self.T = self.a / slope", "N # now normalize llr (if llr is not legal", "self.llr = N * mu_LLR self.T = N # now", "\"--alpha\", help=\"probability of a false positve\", type=float, default=0.05 ) parser.add_argument(", "# llr estimate self.llr = N * mu_LLR self.T =", "less than p. \"\"\" avg_elo = (self.elo0 + self.elo1) /", "import Brownian import scipy import LLRcalc class sprt: def __init__(self,", "mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s) sigma_LLR = math.sqrt(var_LLR)", "a false positve\", type=float, default=0.05 ) parser.add_argument( \"--beta\", help=\"probability of", "def lower_cb(self, p): \"\"\" Maximal elo value such that the", "nargs=\"*\", type=int, required=True, ) args = parser.parse_args() results = args.results", "p = 1 - args.level s = sprt(alpha=alpha, beta=beta, elo0=elo0,", "in our current elo_model. \"\"\" if self.elo_model == \"normalized\": nt", "mu, var = LLRcalc.stats(self.pdf) # code duplication with LLRcalc if", "N, self.pdf = LLRcalc.results_to_pdf(results) if self.elo_model == \"normalized\": mu, var", "= LLRcalc.L_(lelo) nt = (score - 0.5) / self.sigma_pg return", "self.LLR_drift_variance(self.pdf, self.s0, self.s1, None) # llr estimate self.llr = N", "parser.add_argument( \"--alpha\", help=\"probability of a false positve\", type=float, default=0.05 )", "LLRcalc.stats(self.pdf) # code duplication with LLRcalc if len(results) == 5:", "= args.elo_model p = 1 - args.level s = sprt(alpha=alpha,", "var) ** 0.5 elif len(results) == 3: self.sigma_pg = var", "= self.b ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p / 2),", "self.outcome_prob(0) ret[\"LLR\"] = self.llr return ret if __name__ == \"__main__\":", "self.b def outcome_prob(self, elo): \"\"\" The probability of a test", "*= 2 continue else: if self.outcome_prob(elo0) - (1 - p)", "elo_model=\"logistic\"): assert elo_model in (\"logistic\", \"normalized\") self.elo_model = elo_model self.a", "normalize llr (if llr is not legal then the implications", "s.analytics(p) print(\"Design parameters\") print(\"=================\") print(\"False positives : %4.2f%%\" % (100", "print(\"LOS : %4.2f%%\" % (100 * a[\"LOS\"],)) print(\"Context\") print(\"=======\") print(", "= var ** 0.5 else: assert False self.s0, self.s1 =", "** 0.5 elif len(results) == 3: self.sigma_pg = var **", "self.s0, self.s1, None) # llr estimate self.llr = N *", "* self.a: self.clamped = True if self.llr < self.a: self.T", "self.sigma_pg return nt * LLRcalc.nelo_divided_by_nt def set_state(self, results): N, self.pdf", "% a[\"elo\"]) print( \"Confidence interval : [%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0],", "else: return elo0 assert res.converged break return sol def analytics(self,", "has probability less than p. \"\"\" avg_elo = (self.elo0 +", "parser.add_argument( \"--elo-model\", help=\"logistic or normalized\", choices=['logistic', 'normalized'], default='logistic', ) parser.add_argument(", "def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"): assert elo_model in", "- p), elo0, elo1, full_output=True, disp=False, ) except ValueError: if", "LogisticElo)\", type=float, default=0.0 ) parser.add_argument( \"--elo1\", help=\"H1 (expressed in LogisticElo)\",", "args.beta elo0 = args.elo0 elo1 = args.elo1 elo_model = args.elo_model", "with LLRcalc if len(results) == 5: self.sigma_pg = (2 *", "Various error conditions must be handled better here! while True:", "= math.log((1 - beta) / alpha) self.elo0 = elo0 self.elo1", "elo_to_score(self, elo): \"\"\" \"elo\" is expressed in our current elo_model.", "/ self.sigma_pg return nt * LLRcalc.nelo_divided_by_nt def set_state(self, results): N,", "if self.llr < self.a: self.T = self.a / slope self.llr", "sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results) a = s.analytics(p) print(\"Design", "parser.error(\"argument --results: expected 3 or 5 arguments\") alpha = args.alpha", "is logistic. \"\"\" if self.elo_model == \"logistic\": return lelo score", "== \"logistic\": return lelo score = LLRcalc.L_(lelo) nt = (score", "analytics(self, p=0.05): ret = {} ret[\"clamped\"] = self.clamped ret[\"a\"] =", "is expressed in our current elo_model. \"\"\" if self.elo_model ==", "a[\"elo\"]) print( \"Confidence interval : [%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1],", "normalized\", choices=['logistic', 'normalized'], default='logistic', ) parser.add_argument( \"--results\", help=\"trinomial of pentanomial", "elo1)) print(\"Confidence level : %4.2f%%\" % (100 * (1 -", "the implications # of this are unclear) slope = self.llr", "/ 2)] ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"] = self.llr return ret", "def lelo_to_elo(self, lelo): \"\"\" For external use. \"elo\" is expressed", "p. \"\"\" avg_elo = (self.elo0 + self.elo1) / 2 delta", "\"Confidence interval : [%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1], 100 *", "# Various error conditions must be handled better here! while", "= elo / LLRcalc.nelo_divided_by_nt return nt * self.sigma_pg + 0.5", "elo): \"\"\" \"elo\" is expressed in our current elo_model. \"\"\"", "LLRcalc.L_(lelo) nt = (score - 0.5) / self.sigma_pg return nt", "(elo0, elo1)) print(\"Confidence level : %4.2f%%\" % (100 * (1", "\"elo\" is expressed in our current elo_model. \"lelo\" is logistic.", "value such that the observed outcome of the test has", "fail). \"\"\" s = LLRcalc.L_(elo) mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0,", "self.LLR_drift_variance(self.pdf, self.s0, self.s1, s) sigma_LLR = math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b,", "if self.elo_model == \"logistic\": return lelo score = LLRcalc.L_(lelo) nt", "self.a: self.T = self.a / slope self.llr = self.a elif", "elo_model. \"\"\" if self.elo_model == \"normalized\": nt = elo /", "better here! while True: elo0 = max(avg_elo - N *", "p)) ) print(\"LOS : %4.2f%%\" % (100 * a[\"LOS\"],)) print(\"Context\")", "(100 * a[\"LOS\"],)) print(\"Context\") print(\"=======\") print( \"LLR [u,l] : %.2f", "import LLRcalc class sprt: def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5,", "not legal then the implications # of this are unclear)", "= self.b / slope self.llr = self.b def outcome_prob(self, elo):", "- 0.5) / self.sigma_pg return nt * LLRcalc.nelo_divided_by_nt def set_state(self,", "= LLRcalc.stats(self.pdf) # code duplication with LLRcalc if len(results) ==", "= LLRcalc.results_to_pdf(results) if self.elo_model == \"normalized\": mu, var = LLRcalc.stats(self.pdf)", ": %4.2f%%\" % (100 * (1 - p),)) print(\"Elo model", "len(results) == 5: self.sigma_pg = (2 * var) ** 0.5", "elo in (self.elo0, self.elo1)] mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1,", "delta, 1000) try: sol, res = scipy.optimize.brentq( lambda elo: self.outcome_prob(elo)", "of pentanomial frequencies, low to high\", nargs=\"*\", type=int, required=True, )", "argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability of a false positve\", type=float, default=0.05", "(expressed in LogisticElo)\", type=float, default=5.0 ) parser.add_argument(\"--level\", help=\"confidence level\", type=float,", "arguments\") alpha = args.alpha beta = args.beta elo0 = args.elo0", "a test with the given elo with worse outcome (faster", "= args.alpha beta = args.beta elo0 = args.elo0 elo1 =", "\"logistic\": return lelo score = LLRcalc.L_(lelo) nt = (score -", "= math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr )", "> -1000 or elo1 < 1000: N *= 2 continue", "ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p / 2), self.lower_cb(1 -", "- p)) ) print(\"LOS : %4.2f%%\" % (100 * a[\"LOS\"],))", "elo_model. \"lelo\" is logistic. \"\"\" if self.elo_model == \"logistic\": return", "avg_elo = (self.elo0 + self.elo1) / 2 delta = self.elo1", "beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results) a = s.analytics(p) print(\"Design parameters\")", "choices=['logistic', 'normalized'], default='logistic', ) parser.add_argument( \"--results\", help=\"trinomial of pentanomial frequencies,", "slope = self.llr / N if self.llr > 1.03 *", "elo_model self.a = math.log(beta / (1 - alpha)) self.b =", "[u,l] : %.2f %s [%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\" if a[\"clamped\"]", "self.clamped ret[\"a\"] = self.a ret[\"b\"] = self.b ret[\"elo\"] = self.lower_cb(0.5)", "\"\"\" The probability of a test with the given elo", "= args.beta elo0 = args.elo0 elo1 = args.elo1 elo_model =", "= self.b def outcome_prob(self, elo): \"\"\" The probability of a", "the test has probability less than p. \"\"\" avg_elo =", "sol, res = scipy.optimize.brentq( lambda elo: self.outcome_prob(elo) - (1 -", "then the implications # of this are unclear) slope =", "elo_model = args.elo_model p = 1 - args.level s =", "s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results) a =", "5: self.sigma_pg = (2 * var) ** 0.5 elif len(results)", "in LogisticElo)\", type=float, default=5.0 ) parser.add_argument(\"--level\", help=\"confidence level\", type=float, default=0.95)", "= self.LLR_drift_variance(self.pdf, self.s0, self.s1, s) sigma_LLR = math.sqrt(var_LLR) return Brownian(a=self.a,", "args.alpha beta = args.beta elo0 = args.elo0 elo1 = args.elo1", "elo value such that the observed outcome of the test", "slope self.llr = self.b def outcome_prob(self, elo): \"\"\" The probability", "continue else: if self.outcome_prob(elo0) - (1 - p) > 0:", "print(\"=================\") print(\"False positives : %4.2f%%\" % (100 * alpha,)) print(\"False", "/ slope self.llr = self.a elif self.llr > self.b: self.T", "if self.elo_model == \"normalized\": mu, var = LLRcalc.stats(self.pdf) # code", "self.llr = self.a elif self.llr > self.b: self.T = self.b", "= [self.lower_cb(p / 2), self.lower_cb(1 - p / 2)] ret[\"LOS\"]", "a false negative\", type=float, default=0.05 ) parser.add_argument( \"--elo0\", help=\"H0 (expressed", ") args = parser.parse_args() results = args.results if len(results) !=", "self.sigma_pg = (2 * var) ** 0.5 elif len(results) ==", "= 1 - args.level s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1,", "% (100 * a[\"LOS\"],)) print(\"Context\") print(\"=======\") print( \"LLR [u,l] :", "* delta, -1000) elo1 = min(avg_elo + N * delta,", "alpha) self.elo0 = elo0 self.elo1 = elo1 self.clamped = False", ": [%.2f,%.2f]\" % (elo0, elo1)) print(\"Confidence level : %4.2f%%\" %", "from __future__ import division import math, copy import argparse from", "a = s.analytics(p) print(\"Design parameters\") print(\"=================\") print(\"False positives : %4.2f%%\"", "self.a elif self.llr > self.b: self.T = self.b / slope", "negatives : %4.2f%%\" % (100 * beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\"", "than p. \"\"\" avg_elo = (self.elo0 + self.elo1) / 2", "(1 - p),)) print(\"Elo model : %s\" % elo_model) print(\"Estimates\")", "brownian import Brownian import scipy import LLRcalc class sprt: def", "N * delta, 1000) try: sol, res = scipy.optimize.brentq( lambda", "observed outcome of the test has probability less than p.", "self.b / slope self.llr = self.b def outcome_prob(self, elo): \"\"\"", "default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic or normalized\", choices=['logistic', 'normalized'], default='logistic', )", "self.a = math.log(beta / (1 - alpha)) self.b = math.log((1", "N if self.llr > 1.03 * self.b or self.llr <", "\"lelo\" is logistic. \"\"\" if self.elo_model == \"logistic\": return lelo", "def outcome_prob(self, elo): \"\"\" The probability of a test with", "in our current elo_model. \"lelo\" is logistic. \"\"\" if self.elo_model", "level\", type=float, default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic or normalized\", choices=['logistic', 'normalized'],", "% (elo0, elo1)) print(\"Confidence level : %4.2f%%\" % (100 *", "estimate self.llr = N * mu_LLR self.T = N #", "2)] ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"] = self.llr return ret if", "= self.a / slope self.llr = self.a elif self.llr >", "N * delta, -1000) elo1 = min(avg_elo + N *", "outcome of the test has probability less than p. \"\"\"", "= min(avg_elo + N * delta, 1000) try: sol, res", "type=float, default=0.0 ) parser.add_argument( \"--elo1\", help=\"H1 (expressed in LogisticElo)\", type=float,", "pentanomial frequencies, low to high\", nargs=\"*\", type=int, required=True, ) args", "given elo with worse outcome (faster fail, slower pass or", "* (1 - p)) ) print(\"LOS : %4.2f%%\" % (100", "parser.add_argument( \"--results\", help=\"trinomial of pentanomial frequencies, low to high\", nargs=\"*\",", "our current elo_model. \"lelo\" is logistic. \"\"\" if self.elo_model ==", "help=\"trinomial of pentanomial frequencies, low to high\", nargs=\"*\", type=int, required=True,", "\"--beta\", help=\"probability of a false negative\", type=float, default=0.05 ) parser.add_argument(", "return ret if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument(", "ret[\"b\"] = self.b ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p /", "elo0 = args.elo0 elo1 = args.elo1 elo_model = args.elo_model p", "are unclear) slope = self.llr / N if self.llr >", "pass or a pass changed into a fail). \"\"\" s", "elo0 self.elo1 = elo1 self.clamped = False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2", "interval : [%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1], 100 * (1", "- alpha)) self.b = math.log((1 - beta) / alpha) self.elo0", "LogisticElo)\", type=float, default=5.0 ) parser.add_argument(\"--level\", help=\"confidence level\", type=float, default=0.95) parser.add_argument(", "y=self.llr ) def lower_cb(self, p): \"\"\" Maximal elo value such", "slope self.llr = self.a elif self.llr > self.b: self.T =", "import math, copy import argparse from brownian import Brownian import", "def analytics(self, p=0.05): ret = {} ret[\"clamped\"] = self.clamped ret[\"a\"]", "with the given elo with worse outcome (faster fail, slower", "positives : %4.2f%%\" % (100 * alpha,)) print(\"False negatives :", "%4.2f%%\" % (100 * a[\"LOS\"],)) print(\"Context\") print(\"=======\") print( \"LLR [u,l]", "self.s1, s) sigma_LLR = math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf(", "s) sigma_LLR = math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T,", "__name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability of", "def set_state(self, results): N, self.pdf = LLRcalc.results_to_pdf(results) if self.elo_model ==", "-1000 or elo1 < 1000: N *= 2 continue else:", "lelo_to_elo(self, lelo): \"\"\" For external use. \"elo\" is expressed in", "N *= 2 continue else: if self.outcome_prob(elo0) - (1 -", "delta = self.elo1 - self.elo0 N = 30 # Various", "mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None) # llr estimate", "= elo1 self.clamped = False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self,", "or 5 arguments\") alpha = args.alpha beta = args.beta elo0", "type=float, default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic or normalized\", choices=['logistic', 'normalized'], default='logistic',", "self.llr < self.a: self.T = self.a / slope self.llr =", "--results: expected 3 or 5 arguments\") alpha = args.alpha beta", "type=float, default=0.05 ) parser.add_argument( \"--beta\", help=\"probability of a false negative\",", "\"\"\" Maximal elo value such that the observed outcome of", "- (1 - p) > 0: return elo1 else: return", "self.T = self.a / slope self.llr = self.a elif self.llr", "LLRcalc.nelo_divided_by_nt return nt * self.sigma_pg + 0.5 else: return LLRcalc.L_(elo)", "sprt: def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"): assert elo_model", "%s [%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\" if a[\"clamped\"] else \"\", a[\"a\"],", "0.5 elif len(results) == 3: self.sigma_pg = var ** 0.5", "= (score - 0.5) / self.sigma_pg return nt * LLRcalc.nelo_divided_by_nt", "- p) > 0: return elo1 else: return elo0 assert", "len(results) == 3: self.sigma_pg = var ** 0.5 else: assert", "[%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1], 100 * (1 - p))", "= max(avg_elo - N * delta, -1000) elo1 = min(avg_elo", "self.sigma_pg = var ** 0.5 else: assert False self.s0, self.s1", "parser.add_argument(\"--level\", help=\"confidence level\", type=float, default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic or normalized\",", "llr (if llr is not legal then the implications #", "slower pass or a pass changed into a fail). \"\"\"", "if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability", "\"--elo1\", help=\"H1 (expressed in LogisticElo)\", type=float, default=5.0 ) parser.add_argument(\"--level\", help=\"confidence", "1000: N *= 2 continue else: if self.outcome_prob(elo0) - (1", "b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr ) def lower_cb(self, p): \"\"\"", "LLRcalc.L_(elo) mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s) sigma_LLR =", "= math.log(beta / (1 - alpha)) self.b = math.log((1 -", "elo1 = args.elo1 elo_model = args.elo_model p = 1 -", "elo0 assert res.converged break return sol def analytics(self, p=0.05): ret", "elo1 = min(avg_elo + N * delta, 1000) try: sol,", "= False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo): \"\"\" \"elo\"", "assert res.converged break return sol def analytics(self, p=0.05): ret =", "= args.results if len(results) != 3 and len(results) != 5:", "import division import math, copy import argparse from brownian import", "changed into a fail). \"\"\" s = LLRcalc.L_(elo) mu_LLR, var_LLR", "N = 30 # Various error conditions must be handled", "self.outcome_prob(elo) - (1 - p), elo0, elo1, full_output=True, disp=False, )", "args.elo1 elo_model = args.elo_model p = 1 - args.level s", "% (100 * alpha,)) print(\"False negatives : %4.2f%%\" % (100", "false negative\", type=float, default=0.05 ) parser.add_argument( \"--elo0\", help=\"H0 (expressed in", "%4.2f%%\" % (100 * beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\" % (elo0,", "- p / 2)] ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"] = self.llr", "score = LLRcalc.L_(lelo) nt = (score - 0.5) / self.sigma_pg", "ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"] = self.llr return ret if __name__", "LLRcalc if len(results) == 5: self.sigma_pg = (2 * var)", "current elo_model. \"\"\" if self.elo_model == \"normalized\": nt = elo", "set_state(self, results): N, self.pdf = LLRcalc.results_to_pdf(results) if self.elo_model == \"normalized\":", "default=0.05 ) parser.add_argument( \"--elo0\", help=\"H0 (expressed in LogisticElo)\", type=float, default=0.0", "nt = elo / LLRcalc.nelo_divided_by_nt return nt * self.sigma_pg +", "nt * LLRcalc.nelo_divided_by_nt def set_state(self, results): N, self.pdf = LLRcalc.results_to_pdf(results)", "(1 - alpha)) self.b = math.log((1 - beta) / alpha)", "ret[\"LLR\"] = self.llr return ret if __name__ == \"__main__\": parser", "__future__ import division import math, copy import argparse from brownian", "= (2 * var) ** 0.5 elif len(results) == 3:", "= LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo): \"\"\" \"elo\" is expressed in", "* self.sigma_pg + 0.5 else: return LLRcalc.L_(elo) def lelo_to_elo(self, lelo):", "the given elo with worse outcome (faster fail, slower pass", ": %.2f %s [%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\" if a[\"clamped\"] else", "/ N if self.llr > 1.03 * self.b or self.llr", "a fail). \"\"\" s = LLRcalc.L_(elo) mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf,", "%4.2f%%\" % (100 * alpha,)) print(\"False negatives : %4.2f%%\" %", "args = parser.parse_args() results = args.results if len(results) != 3", "= N # now normalize llr (if llr is not", "/ LLRcalc.nelo_divided_by_nt return nt * self.sigma_pg + 0.5 else: return", "self.a ret[\"b\"] = self.b ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p", "help=\"H1 (expressed in LogisticElo)\", type=float, default=5.0 ) parser.add_argument(\"--level\", help=\"confidence level\",", "self.elo0 N = 30 # Various error conditions must be", "__init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model=\"logistic\"): assert elo_model in (\"logistic\",", "if len(results) == 5: self.sigma_pg = (2 * var) **", "elo1=elo1, elo_model=elo_model) s.set_state(results) a = s.analytics(p) print(\"Design parameters\") print(\"=================\") print(\"False", "elo_model in (\"logistic\", \"normalized\") self.elo_model = elo_model self.a = math.log(beta", "or normalized\", choices=['logistic', 'normalized'], default='logistic', ) parser.add_argument( \"--results\", help=\"trinomial of", "parser.parse_args() results = args.results if len(results) != 3 and len(results)", "1000) try: sol, res = scipy.optimize.brentq( lambda elo: self.outcome_prob(elo) -", "into a fail). \"\"\" s = LLRcalc.L_(elo) mu_LLR, var_LLR =", "print(\"=========\") print(\"Elo : %.2f\" % a[\"elo\"]) print( \"Confidence interval :", "elo1=5, elo_model=\"logistic\"): assert elo_model in (\"logistic\", \"normalized\") self.elo_model = elo_model", "elo1 self.clamped = False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo):", "self.llr > self.b: self.T = self.b / slope self.llr =", "elo1 else: return elo0 assert res.converged break return sol def", "of a false positve\", type=float, default=0.05 ) parser.add_argument( \"--beta\", help=\"probability", "self.T = N # now normalize llr (if llr is", "return elo1 else: return elo0 assert res.converged break return sol", "var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None) # llr estimate self.llr", "= N * mu_LLR self.T = N # now normalize", "here! while True: elo0 = max(avg_elo - N * delta,", "= (self.elo0 + self.elo1) / 2 delta = self.elo1 -", ") print(\"LOS : %4.2f%%\" % (100 * a[\"LOS\"],)) print(\"Context\") print(\"=======\")", "/ alpha) self.elo0 = elo0 self.elo1 = elo1 self.clamped =", "else: assert False self.s0, self.s1 = [self.elo_to_score(elo) for elo in", "== \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability of a", "print( \"LLR [u,l] : %.2f %s [%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\"", "len(results) != 5: parser.error(\"argument --results: expected 3 or 5 arguments\")", "default=5.0 ) parser.add_argument(\"--level\", help=\"confidence level\", type=float, default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic", "beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\" % (elo0, elo1)) print(\"Confidence level :", "self.a: self.clamped = True if self.llr < self.a: self.T =", "probability less than p. \"\"\" avg_elo = (self.elo0 + self.elo1)", "p / 2)] ret[\"LOS\"] = self.outcome_prob(0) ret[\"LLR\"] = self.llr return", "\"normalized\") self.elo_model = elo_model self.a = math.log(beta / (1 -", "default=0.05 ) parser.add_argument( \"--beta\", help=\"probability of a false negative\", type=float,", "parser.add_argument( \"--beta\", help=\"probability of a false negative\", type=float, default=0.05 )", "logistic. \"\"\" if self.elo_model == \"logistic\": return lelo score =", "unclear) slope = self.llr / N if self.llr > 1.03", "== \"normalized\": mu, var = LLRcalc.stats(self.pdf) # code duplication with", "s = LLRcalc.L_(elo) mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s)", "- args.level s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results)", "elo0 > -1000 or elo1 < 1000: N *= 2", "help=\"logistic or normalized\", choices=['logistic', 'normalized'], default='logistic', ) parser.add_argument( \"--results\", help=\"trinomial", "alpha = args.alpha beta = args.beta elo0 = args.elo0 elo1", "type=float, default=0.05 ) parser.add_argument( \"--elo0\", help=\"H0 (expressed in LogisticElo)\", type=float,", "!= 3 and len(results) != 5: parser.error(\"argument --results: expected 3", "% (100 * (1 - p),)) print(\"Elo model : %s\"", "self.elo1) / 2 delta = self.elo1 - self.elo0 N =", "30 # Various error conditions must be handled better here!", "\"LLR [u,l] : %.2f %s [%.2f,%.2f]\" % (a[\"LLR\"], \"(clamped)\" if", "elo_model=elo_model) s.set_state(results) a = s.analytics(p) print(\"Design parameters\") print(\"=================\") print(\"False positives", "\"--results\", help=\"trinomial of pentanomial frequencies, low to high\", nargs=\"*\", type=int,", "== \"normalized\": nt = elo / LLRcalc.nelo_divided_by_nt return nt *", "= self.lower_cb(0.5) ret[\"ci\"] = [self.lower_cb(p / 2), self.lower_cb(1 - p", "false positve\", type=float, default=0.05 ) parser.add_argument( \"--beta\", help=\"probability of a", "self.clamped = True if self.llr < self.a: self.T = self.a", "* delta, 1000) try: sol, res = scipy.optimize.brentq( lambda elo:", "2 continue else: if self.outcome_prob(elo0) - (1 - p) >", "= self.a elif self.llr > self.b: self.T = self.b /", "ret[\"ci\"] = [self.lower_cb(p / 2), self.lower_cb(1 - p / 2)]", "positve\", type=float, default=0.05 ) parser.add_argument( \"--beta\", help=\"probability of a false", "lower_cb(self, p): \"\"\" Maximal elo value such that the observed", "0.5 else: assert False self.s0, self.s1 = [self.elo_to_score(elo) for elo", "self.clamped = False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo): \"\"\"", "\"normalized\": nt = elo / LLRcalc.nelo_divided_by_nt return nt * self.sigma_pg", "the observed outcome of the test has probability less than", "* mu_LLR self.T = N # now normalize llr (if", "1 - args.level s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model)", "this are unclear) slope = self.llr / N if self.llr", "s.set_state(results) a = s.analytics(p) print(\"Design parameters\") print(\"=================\") print(\"False positives :", "argparse from brownian import Brownian import scipy import LLRcalc class", "break return sol def analytics(self, p=0.05): ret = {} ret[\"clamped\"]", "len(results) != 3 and len(results) != 5: parser.error(\"argument --results: expected", "fail, slower pass or a pass changed into a fail).", "1.03 * self.b or self.llr < 1.03 * self.a: self.clamped", "help=\"H0 (expressed in LogisticElo)\", type=float, default=0.0 ) parser.add_argument( \"--elo1\", help=\"H1", ": [%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1], 100 * (1 -", "# code duplication with LLRcalc if len(results) == 5: self.sigma_pg", "LLRcalc.nelo_divided_by_nt def set_state(self, results): N, self.pdf = LLRcalc.results_to_pdf(results) if self.elo_model", "var ** 0.5 else: assert False self.s0, self.s1 = [self.elo_to_score(elo)", "False self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo): \"\"\" \"elo\" is", "= sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model) s.set_state(results) a = s.analytics(p)", "* (1 - p),)) print(\"Elo model : %s\" % elo_model)", "elo0=0, elo1=5, elo_model=\"logistic\"): assert elo_model in (\"logistic\", \"normalized\") self.elo_model =", "assert False self.s0, self.s1 = [self.elo_to_score(elo) for elo in (self.elo0,", "division import math, copy import argparse from brownian import Brownian", "except ValueError: if elo0 > -1000 or elo1 < 1000:", "ValueError: if elo0 > -1000 or elo1 < 1000: N", "expressed in our current elo_model. \"lelo\" is logistic. \"\"\" if", "5 arguments\") alpha = args.alpha beta = args.beta elo0 =", "implications # of this are unclear) slope = self.llr /", "or self.llr < 1.03 * self.a: self.clamped = True if", "LLRcalc.LLR_drift_variance_alt2 def elo_to_score(self, elo): \"\"\" \"elo\" is expressed in our", "print(\"[Elo0,Elo1] : [%.2f,%.2f]\" % (elo0, elo1)) print(\"Confidence level : %4.2f%%\"", "print(\"Confidence level : %4.2f%%\" % (100 * (1 - p),))", "= args.elo0 elo1 = args.elo1 elo_model = args.elo_model p =", "if self.outcome_prob(elo0) - (1 - p) > 0: return elo1", "return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr ) def lower_cb(self,", "self.llr > 1.03 * self.b or self.llr < 1.03 *", "nt = (score - 0.5) / self.sigma_pg return nt *", "args.results if len(results) != 3 and len(results) != 5: parser.error(\"argument", "None) # llr estimate self.llr = N * mu_LLR self.T", "(1 - p), elo0, elo1, full_output=True, disp=False, ) except ValueError:", "from brownian import Brownian import scipy import LLRcalc class sprt:", "- (1 - p), elo0, elo1, full_output=True, disp=False, ) except", "(faster fail, slower pass or a pass changed into a", "* beta,)) print(\"[Elo0,Elo1] : [%.2f,%.2f]\" % (elo0, elo1)) print(\"Confidence level", "var = LLRcalc.stats(self.pdf) # code duplication with LLRcalc if len(results)", "< self.a: self.T = self.a / slope self.llr = self.a", "(100 * (1 - p),)) print(\"Elo model : %s\" %", "= {} ret[\"clamped\"] = self.clamped ret[\"a\"] = self.a ret[\"b\"] =", ") parser.add_argument(\"--level\", help=\"confidence level\", type=float, default=0.95) parser.add_argument( \"--elo-model\", help=\"logistic or", "\"--elo0\", help=\"H0 (expressed in LogisticElo)\", type=float, default=0.0 ) parser.add_argument( \"--elo1\",", "self.llr = self.b def outcome_prob(self, elo): \"\"\" The probability of", "math.log((1 - beta) / alpha) self.elo0 = elo0 self.elo1 =", "* a[\"LOS\"],)) print(\"Context\") print(\"=======\") print( \"LLR [u,l] : %.2f %s", "* var) ** 0.5 elif len(results) == 3: self.sigma_pg =", "> self.b: self.T = self.b / slope self.llr = self.b", "help=\"probability of a false negative\", type=float, default=0.05 ) parser.add_argument( \"--elo0\",", "(self.elo0 + self.elo1) / 2 delta = self.elo1 - self.elo0", "parser.add_argument( \"--elo0\", help=\"H0 (expressed in LogisticElo)\", type=float, default=0.0 ) parser.add_argument(", ") parser.add_argument( \"--results\", help=\"trinomial of pentanomial frequencies, low to high\",", "= self.outcome_prob(0) ret[\"LLR\"] = self.llr return ret if __name__ ==", "probability of a test with the given elo with worse", "/ (1 - alpha)) self.b = math.log((1 - beta) /", "such that the observed outcome of the test has probability", "that the observed outcome of the test has probability less", "/ 2), self.lower_cb(1 - p / 2)] ret[\"LOS\"] = self.outcome_prob(0)", "return lelo score = LLRcalc.L_(lelo) nt = (score - 0.5)", "N * mu_LLR self.T = N # now normalize llr", "negative\", type=float, default=0.05 ) parser.add_argument( \"--elo0\", help=\"H0 (expressed in LogisticElo)\",", "= s.analytics(p) print(\"Design parameters\") print(\"=================\") print(\"False positives : %4.2f%%\" %", "= self.a ret[\"b\"] = self.b ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"] =", ": %s\" % elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo : %.2f\" %", "elo / LLRcalc.nelo_divided_by_nt return nt * self.sigma_pg + 0.5 else:", "legal then the implications # of this are unclear) slope", "* self.b or self.llr < 1.03 * self.a: self.clamped =", "True: elo0 = max(avg_elo - N * delta, -1000) elo1", "res = scipy.optimize.brentq( lambda elo: self.outcome_prob(elo) - (1 - p),", "= elo_model self.a = math.log(beta / (1 - alpha)) self.b", "sigma_LLR = math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr", "(1 - p) > 0: return elo1 else: return elo0", "= elo0 self.elo1 = elo1 self.clamped = False self.LLR_drift_variance =", "= True if self.llr < self.a: self.T = self.a /", "print(\"Elo : %.2f\" % a[\"elo\"]) print( \"Confidence interval : [%.2f,%.2f]", "return nt * self.sigma_pg + 0.5 else: return LLRcalc.L_(elo) def", "self.sigma_pg + 0.5 else: return LLRcalc.L_(elo) def lelo_to_elo(self, lelo): \"\"\"", "= scipy.optimize.brentq( lambda elo: self.outcome_prob(elo) - (1 - p), elo0,", "3: self.sigma_pg = var ** 0.5 else: assert False self.s0,", "print( \"Confidence interval : [%.2f,%.2f] (%4.2f%%)\" % (a[\"ci\"][0], a[\"ci\"][1], 100", "print(\"False positives : %4.2f%%\" % (100 * alpha,)) print(\"False negatives", "of a false negative\", type=float, default=0.05 ) parser.add_argument( \"--elo0\", help=\"H0", "return sol def analytics(self, p=0.05): ret = {} ret[\"clamped\"] =", "copy import argparse from brownian import Brownian import scipy import", "100 * (1 - p)) ) print(\"LOS : %4.2f%%\" %", "current elo_model. \"lelo\" is logistic. \"\"\" if self.elo_model == \"logistic\":", "delta, -1000) elo1 = min(avg_elo + N * delta, 1000)", "ret[\"a\"] = self.a ret[\"b\"] = self.b ret[\"elo\"] = self.lower_cb(0.5) ret[\"ci\"]", "self.s0, self.s1 = [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)] mu_LLR,", "(expressed in LogisticElo)\", type=float, default=0.0 ) parser.add_argument( \"--elo1\", help=\"H1 (expressed", "elo_model) print(\"Estimates\") print(\"=========\") print(\"Elo : %.2f\" % a[\"elo\"]) print( \"Confidence", "math.log(beta / (1 - alpha)) self.b = math.log((1 - beta)", "self.elo1)] mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None) # llr", "/ slope self.llr = self.b def outcome_prob(self, elo): \"\"\" The", "\"\"\" s = LLRcalc.L_(elo) mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1,", "\"elo\" is expressed in our current elo_model. \"\"\" if self.elo_model", "math.sqrt(var_LLR) return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr ) def", "lelo score = LLRcalc.L_(lelo) nt = (score - 0.5) /", "code duplication with LLRcalc if len(results) == 5: self.sigma_pg =", "default=0.0 ) parser.add_argument( \"--elo1\", help=\"H1 (expressed in LogisticElo)\", type=float, default=5.0", "(100 * alpha,)) print(\"False negatives : %4.2f%%\" % (100 *", "llr estimate self.llr = N * mu_LLR self.T = N", "= argparse.ArgumentParser() parser.add_argument( \"--alpha\", help=\"probability of a false positve\", type=float,", "Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf( T=self.T, y=self.llr ) def lower_cb(self, p):", "Brownian import scipy import LLRcalc class sprt: def __init__(self, alpha=0.05,", "self.elo_model == \"normalized\": mu, var = LLRcalc.stats(self.pdf) # code duplication", "return LLRcalc.L_(elo) def lelo_to_elo(self, lelo): \"\"\" For external use. \"elo\"", "For external use. \"elo\" is expressed in our current elo_model.", "= self.clamped ret[\"a\"] = self.a ret[\"b\"] = self.b ret[\"elo\"] =", "< 1000: N *= 2 continue else: if self.outcome_prob(elo0) -", "print(\"=======\") print( \"LLR [u,l] : %.2f %s [%.2f,%.2f]\" % (a[\"LLR\"],", "alpha)) self.b = math.log((1 - beta) / alpha) self.elo0 =", "in (self.elo0, self.elo1)] mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None)", "ret[\"clamped\"] = self.clamped ret[\"a\"] = self.a ret[\"b\"] = self.b ret[\"elo\"]" ]
[ "= 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG =", "= 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS =", "packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code = ev_code self.packet_len = packet_len self.recv_data", "local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status = status", "num_of_packets_to_send = None events_list = [] bdaddr = '00:00:00:00:00:00' static_addr", "Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets: int suggested_max_tx_time: int def __init__(self): self.set()", "ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code = ev_code self.packet_len = packet_len", "2.0 (the # \"License\"); you may not use this file", "max_data_len = None phy = None ev_num_comp_pkts = None num_of_completed_packets_cnt", "self.set() def set(self, packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0, data=b''): super().set(packet_type)", "= num_handles self.connection_handle = connection_handle self.num_completed_packets = num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta):", "self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass", "__init__(self): self.set() def set(self, status = 0, num_hci_cmd_packets=0, opcode=0): self.status", "ba_addr_to_str(addr_ba: bytearray): addr_str = addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for i in", "############ # FUNCTIONS ############ def get_opcode(ogf: int, ocf: int): return", "scanning_filter_policy)) @dataclass class HCI_Connect: le_scan_interval: int le_scan_window: int initiator_filter_policy: int", "= status self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len =", "max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min,", "############ # DEFINES ############ AF_BLUETOOTH = 31 HCI_CHANNEL_USER = 1", "= connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_length =", "OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY", "See the License for the # specific language governing permissions", "status self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets", "total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status = status self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets", "set(self, le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0, \\", "supervision_timeout: int central_clock_accuracy: int def __init__(self): self.set() def set(self, subevent_code=0,", "le_scan_window, own_address_type, scanning_filter_policy)) @dataclass class HCI_Connect: le_scan_interval: int le_scan_window: int", "= 1 suggested_dflt_data_len = None max_data_len = None phy =", "def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type = le_scan_type", "= connection_handle self.num_completed_packets = num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm:", "self.le_scan_window = le_scan_window self.initiator_filter_policy = initiator_filter_policy self.peer_address_type = peer_address_type self.peer_address", "num_handles self.connection_handle = connection_handle self.num_completed_packets = num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle:", "int min_ce_length: int max_ce_length: int ba_full_message: bytearray def __init__(self): self.set()", "int max_latency: int supervision_timeout: int min_ce_length: int max_ce_length: int ba_full_message:", "':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2, -2))[1:] def gen_static_rand_addr(): while", "= advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type, own_address_type, peer_address_type,", "def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0): self.status = status self.connection_handle", "connection_interval self.peripheral_latency = peripheral_latency self.supervision_timeout = supervision_timeout self.central_clock_accuracy = central_clock_accuracy", "class L2CAP_Data_Send: pdu_length: int channel_id: int data: bytearray ba_full_message: bytearray", "connection_handle=0, reason=0): self.status = status self.connection_handle = connection_handle self.reason =", "data=b''): self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle = connection_handle self.pb_flag = pb_flag", "max_latency=0, supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval = le_scan_interval self.le_scan_window =", "= bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout,", "= supervision_timeout self.min_ce_length = min_ce_length self.max_ce_length = max_ce_length self.ba_full_message =", "pb_flag self.bc_flag = bc_flag self.data_total_len = total_data_len self.data = data", "iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass class LE_Read_PHY: status: int connection_handle:", "bytearray def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''): self.pdu_length", "See the NOTICE file # distributed with this work for", "def __init__(self): self.set() def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0): self.status", "self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time @dataclass class Max_Data_Length(): status:", "Apache License, Version 2.0 (the # \"License\"); you may not", "HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP", "25 ############ # GLOBAL VAR ############ num_of_bytes_to_send = None #", "= \"{0:0{1}x}\".format(addr_int, 12) addr = \":\".join(addr_hex[i:i+2] for i in range(0,", "int def __init__(self): self.set() def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0):", "packet_len: int data: bytearray ba_full_message: bytearray def __init__(self): self.set() def", "class Max_Data_Length(): status: int supported_max_tx_octets: int supported_max_tx_time: int supported_max_rx_octets: int", "peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status = status self.connection_handle = connection_handle", "self.data = data self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle & 0x0eff)", "def __init__(self): self.set() def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets =", "self.set() def set(self, advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0, own_address_type=0, peer_address_type=0, \\", "super().set(subevent_code) self.status = status self.connection_handle = connection_handle self.role = role", "@dataclass class HCI_Advertising: advertising_interval_min: int advertising_interval_max: int advertising_type: int own_address_type:", "int channel_id: int data: bytearray def __init__(self): self.set() def set(self,", "connection_interval_min=0, connection_interval_max=0, \\ max_latency=0, supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval =", "= [] bdaddr = '00:00:00:00:00:00' static_addr = '00:00:00:00:00:00' le_read_buffer_size =", "0x04 OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL = 0x08", "WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############ # GLOBAL VAR", "binascii import unhexlify import random ############ # DEFINES ############ AF_BLUETOOTH", "in range(0,len(x))]), 2) addr_hex = \"{0:0{1}x}\".format(addr_int, 12) addr = \":\".join(addr_hex[i:i+2]", "use this file except in compliance # with the License.", "advertising_type self.own_address_type = own_address_type self.peer_address_type = peer_address_type self.peer_address = peer_address", "bdaddr = '00:00:00:00:00:00' static_addr = '00:00:00:00:00:00' le_read_buffer_size = None conn_handle", "= connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy ############ #", "supported_max_rx_time @dataclass class LE_Read_Buffer_Size: status: int le_acl_data_packet_length: int total_num_le_acl_data_packets: int", "class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int role: int peer_address_type: int", "peer_address_type, own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address)", "= data @dataclass class HCI_Recv_L2CAP_Data: pdu_length: int channel_id: int data:", "= 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP =", "return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode self.return_parameters = return_parameters", "# GLOBAL VAR CLASSES ############ @dataclass class Suggested_Dflt_Data_Length(): status: int", "status self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time @dataclass class Max_Data_Length():", "= 1 requested_tx_time = 1 suggested_dflt_data_len = None max_data_len =", "int): return ((ocf & 0x03ff)|(ogf << 10)) def get_ogf_ocf(opcode: int):", "min_ce_length, max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba ############ #", "x[:-2] and 1 in x[:-2]: x[0] = 1 x[1] =", "work for additional information # regarding copyright ownership. The ASF", "0x03ff return ogf, ocf def cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def", "self.peer_address = peer_address self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy = advertising_filter_policy self.ba_full_message", "# software distributed under the License is distributed on an", "= 0x01 HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP =", "= 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS =", "the License. You may obtain a copy of the License", "set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle =", "= le_scan_window self.initiator_filter_policy = initiator_filter_policy self.peer_address_type = peer_address_type self.peer_address =", "int reason: int def __init__(self): self.set() def set(self, status=0, connection_handle=0,", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "= peer_address self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy = advertising_filter_policy self.ba_full_message =", "self.peer_address_type = peer_address_type self.peer_address = peer_address self.own_address_type = own_address_type self.connection_interval_min", "self.max_tx_octets = max_tx_octets self.max_tx_time = max_tx_time self.max_rx_octets = max_rx_octets self.max_rx_time", "subevent_code=0): self.subevent_code = subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle:", "this work for additional information # regarding copyright ownership. The", "= advertising_channel_map self.advertising_filter_policy = advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max,", "packet_type: int ogf: int ocf: int packet_len: int data: bytearray", "the NOTICE file # distributed with this work for additional", "= peer_address self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval =", "import dataclass import struct from binascii import unhexlify import random", "num_hci_command_packets: int opcode: int return_parameters: int def __init__(self): self.set() def", "self.recv_data = recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag: int", "return ((ocf & 0x03ff)|(ogf << 10)) def get_ogf_ocf(opcode: int): ogf", "= 0 STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT =", "= 0x0e HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP =", "# FUNCTIONS ############ def get_opcode(ogf: int, ocf: int): return ((ocf", "__init__(self): self.set() def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status = status", "= max_rx_time self.triggered = triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int", "<< 12) | (self.bc_flag << 14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class", "__init__(self): self.set() def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets", "= 0x05 HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT =", "= 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED =", "= None max_data_len = None phy = None ev_num_comp_pkts =", "@dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len: int recv_data: bytearray current_event:", "= 0x02 HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP =", "self.status = status self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time @dataclass", "peripheral_latency self.supervision_timeout = supervision_timeout self.central_clock_accuracy = central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta):", "= supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time @dataclass class LE_Read_Buffer_Size: status: int", "self.peripheral_latency = peripheral_latency self.supervision_timeout = supervision_timeout self.central_clock_accuracy = central_clock_accuracy @dataclass", "import struct from binascii import unhexlify import random ############ #", "max_latency: int supervision_timeout: int min_ce_length: int max_ce_length: int ba_full_message: bytearray", "= scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy)) @dataclass", "int pb_flag: int bc_flag: int data_total_length: int data: bytearray ba_full_message:", "HCI_ACL_Data_Send: packet_type: int connection_handle: int pb_flag: int bc_flag: int data_total_length:", "self.bc_flag = bc_flag self.data_total_len = total_data_len self.data = data @dataclass", "max_rx_octets self.max_rx_time = max_rx_time self.triggered = triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta):", "supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time @dataclass class LE_Read_Buffer_Size:", "0x04 HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS = 0x0f", "HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP", "num_of_bytes_to_send = None # based on supported_max_tx_octets num_of_packets_to_send = None", "############ @dataclass class Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets: int suggested_max_tx_time: int", "max_ce_length: int ba_full_message: bytearray def __init__(self): self.set() def set(self, le_scan_interval=0,", "= bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type, own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba", "connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_length = len(data)", "cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba @dataclass class HCI_Scan: le_scan_type: int le_scan_interval:", "int suggested_max_tx_time: int def __init__(self): self.set() def set(self, status=0, suggested_max_tx_octets=0,", "self.set() def set(self, status=0, connection_handle=0, reason=0): self.status = status self.connection_handle", "self.set() def set(self, num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles = num_handles self.connection_handle", "############ # GLOBAL VAR CLASSES ############ @dataclass class Suggested_Dflt_Data_Length(): status:", "status: int supported_max_tx_octets: int supported_max_tx_time: int supported_max_rx_octets: int supported_max_rx_time: int", "= \":\".join(addr_hex[i:i+2] for i in range(0, len(addr_hex), 2)) return addr.upper()", "= 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY =", "= total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass class", "= suggested_max_tx_time @dataclass class Max_Data_Length(): status: int supported_max_tx_octets: int supported_max_tx_time:", "class LE_Read_PHY: status: int connection_handle: int tx_phy: int rx_phy: int", "pdu_length: int channel_id: int data: bytearray def __init__(self): self.set() def", "(self.bc_flag << 14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send: pdu_length: int", "bc_flag self.data_total_len = total_data_len self.data = data @dataclass class HCI_Recv_L2CAP_Data:", "int): ogf = opcode >> 10 ocf = opcode &", "rx_phy @dataclass class HCI_Number_Of_Completed_Packets: num_handles: int connection_handle: int num_completed_packets: int", "def __init__(self): self.set() def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status =", "= num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm: int def __init__(self):", "algorithm ############ # PARAMETERS ############ @dataclass class HCI_Advertising: advertising_interval_min: int", "own_address_type self.scanning_filter_policy = scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type,", "max_latency self.supervision_timeout = supervision_timeout self.min_ce_length = min_ce_length self.max_ce_length = max_ce_length", "int def __init__(self): self.set() def set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0,", "data: bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self, connection_handle=0,", "int peer_resolvable_private_address: int connection_interval: int peripheral_latency: int supervision_timeout: int central_clock_accuracy:", "class HCI_Connect: le_scan_interval: int le_scan_window: int initiator_filter_policy: int peer_address_type: int", "int peer_address: str own_address_type: int connection_interval_min: int connection_interval_max: int max_latency:", "limitations # under the License. # from dataclasses import dataclass", "tx_phy=0, rx_phy=0): self.status = status self.connection_handle = connection_handle self.tx_phy =", "class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm: int def __init__(self): self.set() def", "num_hci_cmd_packets self.opcode = opcode self.return_parameters = return_parameters @dataclass class HCI_Ev_Cmd_Status:", "= total_num_iso_data_packets @dataclass class LE_Read_PHY: status: int connection_handle: int tx_phy:", "self.peer_address_type = peer_address_type self.peer_address = peer_address self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy", "int triggered: int def __init__(self): self.set() def set(self, subevent_code=0, conn_handle=0,", "0x0001 PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT = 5", "# \"License\"); you may not use this file except in", "def set(self, status = 0, num_hci_cmd_packets=0, opcode=0): self.status = status", "self.max_latency = max_latency self.supervision_timeout = supervision_timeout self.min_ce_length = min_ce_length self.max_ce_length", "############ @dataclass class HCI_Ev_Disconn_Complete: status: int connection_handle: int reason: int", "int suggested_max_tx_octets: int suggested_max_tx_time: int def __init__(self): self.set() def set(self,", "__init__(self): self.set() def set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code =", "0x0001 OCF_RESET = 0X0003 OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS = 0x0002", "def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle", "le_scan_type: int le_scan_interval: int le_scan_window: int own_address_type: int scanning_filter_policy: int", "status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status = status self.supported_max_tx_octets =", "connection_handle self.role = role self.peer_address_type = peer_address_type self.peer_address = peer_address", "writing, # software distributed under the License is distributed on", "self.packet_type, ((self.connection_handle & 0x0eff) | (self.pb_flag << 12) | (self.bc_flag", "HCI_ACL_DATA_PACKET self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag", "& 0x03ff return ogf, ocf def cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1]", "= max_tx_time self.max_rx_octets = max_rx_octets self.max_rx_time = max_rx_time self.triggered =", "advertising_interval_max self.advertising_type = advertising_type self.own_address_type = own_address_type self.peer_address_type = peer_address_type", "self.set() def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type = HCI_ACL_DATA_PACKET", "self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass class LE_Read_PHY: status: int connection_handle: int", "advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type, own_address_type, peer_address_type, advertising_channel_map,", "= 0x0001 PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT =", "le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status = status self.le_acl_data_packet_length = le_acl_data_packet_length", "dataclass import struct from binascii import unhexlify import random ############", "# # Unless required by applicable law or agreed to", "for i in range(0, len(addr_hex), 2)) return addr.upper() ############ #", "Version 2.0 (the # \"License\"); you may not use this", "peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0, \\ max_latency=0, supervision_timeout=0, min_ce_length=0, \\", "= 0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY =", "unhexlify import random ############ # DEFINES ############ AF_BLUETOOTH = 31", "one # or more contributor license agreements. See the NOTICE", "0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c", "class HCI_Ev_LE_Meta: subevent_code: int def __init__(self): self.set() def set(self, subevent_code=0):", "own_address_type: int connection_interval_min: int connection_interval_max: int max_latency: int supervision_timeout: int", "le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0,", "while True: x = [random.randint(0,1) for _ in range(0,48)] if", "data: bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self, pdu_length=0,", "def __init__(self): self.set() def set(self, status = 0, num_hci_cmd_packets=0, opcode=0):", "2)) return addr.upper() ############ # GLOBAL VAR CLASSES ############ @dataclass", "NOTICE file # distributed with this work for additional information", "peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0, \\ max_latency=0, supervision_timeout=0,", "int packet_len: int data: bytearray ba_full_message: bytearray def __init__(self): self.set()", "this file except in compliance # with the License. You", "PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT", "def __init__(self): self.set() def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0,", "peer_address self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy = advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB',", "0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006", "pdu_length self.channel_id = channel_id self.data = data fmt_conf = \"<HH\"", "= 0x04 HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS =", "recv_data: bytearray current_event: None def __init__(self): self.set() def set(self,packet_type=0, ev_code=0,", "= pb_flag self.bc_flag = bc_flag self.data_total_len = total_data_len self.data =", "HCI_Number_Of_Completed_Packets: num_handles: int connection_handle: int num_completed_packets: int def __init__(self): self.set()", "= peer_addr_ba ############ # RX / TX ############ @dataclass class", "\\ advertising_type=0, own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min", "connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code) self.status = status self.connection_handle = connection_handle", "int packet_len: int recv_data: bytearray current_event: None def __init__(self): self.set()", "not use this file except in compliance # with the", "int le_scan_window: int initiator_filter_policy: int peer_address_type: int peer_address: str own_address_type:", "le_scan_interval self.le_scan_window = le_scan_window self.initiator_filter_policy = initiator_filter_policy self.peer_address_type = peer_address_type", "\\ initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0, \\", "and 1 in x[:-2]: x[0] = 1 x[1] = 1", "int def __init__(self): self.set() def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets", "= 0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE =", "Unless required by applicable law or agreed to in writing,", "int ba_full_message: bytearray def __init__(self): self.set() def set(self, advertising_interval_min=0, advertising_interval_max=0,", "own_address_type, scanning_filter_policy)) @dataclass class HCI_Connect: le_scan_interval: int le_scan_window: int initiator_filter_policy:", "subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle = connection_handle self.algorithm = algorithm", "= 0, num_hci_cmd_packets=0, opcode=0): self.status = status self.num_hci_command_packets = num_hci_cmd_packets", "bytearray): addr_str = addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for i in range(len(addr_str),", "self.opcode = opcode self.return_parameters = return_parameters @dataclass class HCI_Ev_Cmd_Status: status:", "HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT", "int(\"\".join([str(x[i]) for i in range(0,len(x))]), 2) addr_hex = \"{0:0{1}x}\".format(addr_int, 12)", "OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS", "def get_opcode(ogf: int, ocf: int): return ((ocf & 0x03ff)|(ogf <<", "addr_hex = \"{0:0{1}x}\".format(addr_int, 12) addr = \":\".join(addr_hex[i:i+2] for i in", "opcode: int def __init__(self): self.set() def set(self, status = 0,", "= 0x0009 OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 =", "peer_address_type: int peer_address: str own_address_type: int connection_interval_min: int connection_interval_max: int", "not pdu_length: self.pdu_length = len(data) else: self.pdu_length = pdu_length self.channel_id", "static_addr = '00:00:00:00:00:00' le_read_buffer_size = None conn_handle = 0 requested_tx_octets", "OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE", "0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14", "for _ in range(0,48)] if 0 in x[:-2] and 1", "role: int peer_address_type: int peer_address: str local_resolvable_private_address: int peer_resolvable_private_address: int", "status: int connection_handle: int reason: int def __init__(self): self.set() def", "1 suggested_dflt_data_len = None max_data_len = None phy = None", "peripheral_latency: int supervision_timeout: int central_clock_accuracy: int def __init__(self): self.set() def", "channel_id: int data: bytearray def __init__(self): self.set() def set(self, pdu_length=0,", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "advertising_interval_max, advertising_type, own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7]", "class HCI_Advertising: advertising_interval_min: int advertising_interval_max: int advertising_type: int own_address_type: int", "connection_handle: int reason: int def __init__(self): self.set() def set(self, status=0,", "bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length,", "= peer_address_type self.peer_address = peer_address self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy =", "central_clock_accuracy=0): super().set(subevent_code) self.status = status self.connection_handle = connection_handle self.role =", "self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode @dataclass class HCI_Ev_LE_Meta: subevent_code:", "own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0, \\ max_latency=0, supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0):", "def set(self, subevent_code=0, status=0, connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code) self.status =", "<< 10)) def get_ogf_ocf(opcode: int): ogf = opcode >> 10", "total_num_iso_data_packets=0): self.status = status self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets", "= 0x000d OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN =", "and limitations # under the License. # from dataclasses import", "self.set() def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0): self.status = status", "len(addr_hex), 2)) return addr.upper() ############ # GLOBAL VAR CLASSES ############", "def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status = status self.suggested_max_tx_octets =", "int ogf: int ocf: int packet_len: int data: bytearray ba_full_message:", "max_tx_octets: int max_tx_time: int max_rx_octets: int max_rx_time: int triggered: int", "def __init__(self): self.set() def set(self, status=0, connection_handle=0, reason=0): self.status =", "data fmt_conf = \"<HH\" self.ba_full_message = bytearray(struct.pack(fmt_conf, self.pdu_length, self.channel_id)) self.ba_full_message.extend(data)", "Licensed to the Apache Software Foundation (ASF) under one #", "self.set() def set(self, subevent_code=0): self.subevent_code = subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta):", "int def __init__(self): self.set() def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status", "set(self, pdu_length=0, channel_id=0, data=b''): if not pdu_length: self.pdu_length = len(data)", "iso_data_packet_len: int total_num_iso_data_packets: int def __init__(self): self.set() def set(self, status=0,", "suggested_max_tx_octets: int suggested_max_tx_time: int def __init__(self): self.set() def set(self, status=0,", "0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b", "conn_handle = 0 requested_tx_octets = 1 requested_tx_time = 1 suggested_dflt_data_len", "return_parameters: int def __init__(self): self.set() def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''):", "= opcode & 0x03ff return ogf, ocf def cmd_addr_to_ba(addr_str: str):", "= 0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN =", "self.tx_phy = tx_phy self.rx_phy = rx_phy ############ # EVENTS ############", "ba_full_message: bytearray def __init__(self): self.set() def set(self, ogf=0, ocf=0, data=b''):", "# KIND, either express or implied. See the License for", "def __init__(self): self.set() def set(self, num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles =", "int connection_interval: int peripheral_latency: int supervision_timeout: int central_clock_accuracy: int def", "self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy = advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min,", "= int(\"\".join([str(x[i]) for i in range(0,len(x))]), 2) addr_hex = \"{0:0{1}x}\".format(addr_int,", "__init__(self): self.set() def set(self, subevent_code=0): self.subevent_code = subevent_code @dataclass class", "set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status = status self.le_acl_data_packet_length", "class HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode: int return_parameters: int def __init__(self):", "int recv_data: bytearray current_event: None def __init__(self): self.set() def set(self,packet_type=0,", "self.max_rx_time = max_rx_time self.triggered = triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status:", "0 num_of_completed_packets_time = 0 ############ # FUNCTIONS ############ def get_opcode(ogf:", "int max_tx_octets: int max_tx_time: int max_rx_octets: int max_rx_time: int triggered:", "You may obtain a copy of the License at #", "triggered: int def __init__(self): self.set() def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0,", "= 0x14 HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT =", "reason @dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode: int return_parameters: int", "0 STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25", "min_ce_length self.max_ce_length = max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy,", "data: bytearray def __init__(self): self.set() def set(self, packet_type=0, connection_handle=0, pb_flag=0,", "def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''): self.pdu_length =", "OF ANY # KIND, either express or implied. See the", "= peer_addr_ba @dataclass class HCI_Scan: le_scan_type: int le_scan_interval: int le_scan_window:", "int peer_address_type: int peer_address: str local_resolvable_private_address: int peer_resolvable_private_address: int connection_interval:", "total_data_len self.data = data @dataclass class HCI_Recv_L2CAP_Data: pdu_length: int channel_id:", "Apache Software Foundation (ASF) under one # or more contributor", "set(self,packet_type=0): self.packet_type = packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len:", "int supervision_timeout: int central_clock_accuracy: int def __init__(self): self.set() def set(self,", "requested_tx_octets = 1 requested_tx_time = 1 suggested_dflt_data_len = None max_data_len", "self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.initiator_filter_policy = initiator_filter_policy self.peer_address_type", "self.ba_full_message[6:6] = peer_addr_ba ############ # RX / TX ############ @dataclass", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "data: bytearray def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''):", "HCI_Cmd_Send: packet_type: int ogf: int ocf: int packet_len: int data:", "self.packet_type = HCI_COMMAND_PACKET self.ogf = ogf self.ocf = ocf self.opcode", "under the License is distributed on an # \"AS IS\"", "int data_total_length: int data: bytearray ba_full_message: bytearray def __init__(self): self.set()", "in x[:-2] and 1 in x[:-2]: x[0] = 1 x[1]", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "int peer_address_type: int peer_address: str advertising_channel_map: int advertising_filter_policy: int ba_full_message:", "status=0, connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code) self.status = status self.connection_handle =", "set(self, num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles = num_handles self.connection_handle = connection_handle", "= peer_address_type self.peer_address = peer_address self.own_address_type = own_address_type self.connection_interval_min =", "= subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int role:", "@dataclass class HCI_Receive: packet_type: int def __init__(self): self.set() def set(self,packet_type=0):", "= advertising_interval_min self.advertising_interval_max = advertising_interval_max self.advertising_type = advertising_type self.own_address_type =", "License. # from dataclasses import dataclass import struct from binascii", "= return_parameters @dataclass class HCI_Ev_Cmd_Status: status: int num_hci_command_packets: int opcode:", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "= 0x08 OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 =", "self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle & 0x0eff) | (self.pb_flag <<", "self.le_scan_window = le_scan_window self.own_address_type = own_address_type self.scanning_filter_policy = scanning_filter_policy self.ba_full_message", "max_tx_octets self.max_tx_time = max_tx_time self.max_rx_octets = max_rx_octets self.max_rx_time = max_rx_time", "le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type = le_scan_type self.le_scan_interval = le_scan_interval self.le_scan_window", "self.advertising_type = advertising_type self.own_address_type = own_address_type self.peer_address_type = peer_address_type self.peer_address", "file # distributed with this work for additional information #", "le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy)) @dataclass class HCI_Connect: le_scan_interval: int le_scan_window:", "set(self, subevent_code=0, status=0, connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code) self.status = status", "WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############ # GLOBAL VAR ############ num_of_bytes_to_send =", "connection_interval_min: int connection_interval_max: int max_latency: int supervision_timeout: int min_ce_length: int", "= 0X0003 OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR =", "self.num_completed_packets = num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm: int def", "= conn_handle self.max_tx_octets = max_tx_octets self.max_tx_time = max_tx_time self.max_rx_octets =", "= max_latency self.supervision_timeout = supervision_timeout self.min_ce_length = min_ce_length self.max_ce_length =", "= opcode @dataclass class HCI_Ev_LE_Meta: subevent_code: int def __init__(self): self.set()", "self.max_tx_time = max_tx_time self.max_rx_octets = max_rx_octets self.max_rx_time = max_rx_time self.triggered", "def set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code = ev_code self.packet_len", "self.connection_handle = connection_handle self.algorithm = algorithm ############ # PARAMETERS ############", "self.pdu_length = len(data) else: self.pdu_length = pdu_length self.channel_id = channel_id", "int le_acl_data_packet_length: int total_num_le_acl_data_packets: int iso_data_packet_len: int total_num_iso_data_packets: int def", "int supported_max_rx_time: int def __init__(self): self.set() def set(self, status=0, supported_max_tx_octets=0,", "opcode & 0x03ff return ogf, ocf def cmd_addr_to_ba(addr_str: str): return", "0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e", "status: int num_hci_command_packets: int opcode: int def __init__(self): self.set() def", "License, Version 2.0 (the # \"License\"); you may not use", "advertising_type=0, own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min =", "connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy @dataclass class HCI_Number_Of_Completed_Packets:", "central_clock_accuracy: int def __init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0,", "HCI_Advertising: advertising_interval_min: int advertising_interval_max: int advertising_type: int own_address_type: int peer_address_type:", "status self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode @dataclass class HCI_Ev_LE_Meta:", "self.min_ce_length = min_ce_length self.max_ce_length = max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval,", "self.advertising_interval_max = advertising_interval_max self.advertising_type = advertising_type self.own_address_type = own_address_type self.peer_address_type", ">> 10 ocf = opcode & 0x03ff return ogf, ocf", "0x000c OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023", "HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG", "OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN", "= le_scan_window self.own_address_type = own_address_type self.scanning_filter_policy = scanning_filter_policy self.ba_full_message =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba", "def __init__(self): self.set() def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0):", "connection_interval_min self.connection_interval_max = connection_interval_max self.max_latency = max_latency self.supervision_timeout = supervision_timeout", "role self.peer_address_type = peer_address_type self.peer_address = peer_address self.local_resolvable_private_address = local_resolvable_private_address", "class Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets: int suggested_max_tx_time: int def __init__(self):", "self.channel_id = channel_id self.data = data @dataclass class HCI_Cmd_Send: packet_type:", "packet_len self.recv_data = recv_data self.recv_data = recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive):", "HCI_Connect: le_scan_interval: int le_scan_window: int initiator_filter_policy: int peer_address_type: int peer_address:", "= status self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets =", "self.reason = reason @dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode: int", "int peer_address: str local_resolvable_private_address: int peer_resolvable_private_address: int connection_interval: int peripheral_latency:", "= len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type, self.opcode,", "= le_scan_interval self.le_scan_window = le_scan_window self.initiator_filter_policy = initiator_filter_policy self.peer_address_type =", "le_scan_window self.initiator_filter_policy = initiator_filter_policy self.peer_address_type = peer_address_type self.peer_address = peer_address", "supported_max_tx_time: int supported_max_rx_octets: int supported_max_rx_time: int def __init__(self): self.set() def", "pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle = connection_handle self.pb_flag", "0x0009 OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002", "return ogf, ocf def cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba:", "initiator_filter_policy self.peer_address_type = peer_address_type self.peer_address = peer_address self.own_address_type = own_address_type", "= supported_max_rx_time @dataclass class LE_Read_Buffer_Size: status: int le_acl_data_packet_length: int total_num_le_acl_data_packets:", "= supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time @dataclass class", "super().set(packet_type) self.ev_code = ev_code self.packet_len = packet_len self.recv_data = recv_data", "__init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code)", "int total_num_le_acl_data_packets: int iso_data_packet_len: int total_num_iso_data_packets: int def __init__(self): self.set()", "= connection_handle self.reason = reason @dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets: int", "def __init__(self): self.set() def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0):", "__init__(self): self.set() def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status", "self.num_handles = num_handles self.connection_handle = connection_handle self.num_completed_packets = num_completed_packets class", "addr = \":\".join(addr_hex[i:i+2] for i in range(0, len(addr_hex), 2)) return", "total_num_le_acl_data_packets: int iso_data_packet_len: int total_num_iso_data_packets: int def __init__(self): self.set() def", "self.connection_handle = connection_handle self.role = role self.peer_address_type = peer_address_type self.peer_address", "def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code)", "= reason @dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode: int return_parameters:", "set(self, subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle = connection_handle self.algorithm =", "set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0): self.status = status self.connection_handle =", "own_address_type self.connection_interval_min = connection_interval_min self.connection_interval_max = connection_interval_max self.max_latency = max_latency", "True: x = [random.randint(0,1) for _ in range(0,48)] if 0", "def cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray): addr_str =", "= own_address_type self.scanning_filter_policy = scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window,", "central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets: int max_tx_time: int", "= bc_flag self.data_total_len = total_data_len self.data = data @dataclass class", "status=0, connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0,", "num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles = num_handles self.connection_handle = connection_handle self.num_completed_packets", "= 0 requested_tx_octets = 1 requested_tx_time = 1 suggested_dflt_data_len =", "self.own_address_type = own_address_type self.scanning_filter_policy = scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval,", "total_num_iso_data_packets: int def __init__(self): self.set() def set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0,", "__init__(self): self.set() def set(self, status=0, connection_handle=0, reason=0): self.status = status", "bytearray def __init__(self): self.set() def set(self, advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0,", "@dataclass class HCI_Cmd_Send: packet_type: int ogf: int ocf: int packet_len:", "bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy)) @dataclass class HCI_Connect: le_scan_interval: int", "= num_hci_cmd_packets self.opcode = opcode @dataclass class HCI_Ev_LE_Meta: subevent_code: int", "connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle = connection_handle self.algorithm = algorithm ############", "int def __init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0, role=0,", "supervision_timeout: int min_ce_length: int max_ce_length: int ba_full_message: bytearray def __init__(self):", "iso_data_packet_len=0, total_num_iso_data_packets=0): self.status = status self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets =", "def get_ogf_ocf(opcode: int): ogf = opcode >> 10 ocf =", "# DEFINES ############ AF_BLUETOOTH = 31 HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET", "ev_code: int packet_len: int recv_data: bytearray current_event: None def __init__(self):", "connection_handle: int pb_flag: int bc_flag: int data_total_len: int data: bytearray", "self.opcode = get_opcode(ogf, ocf) self.packet_len = len(data) self.data = data", "le_scan_window self.own_address_type = own_address_type self.scanning_filter_policy = scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type,", "subevent_code=0, status=0, connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code) self.status = status self.connection_handle", "ocf self.opcode = get_opcode(ogf, ocf) self.packet_len = len(data) self.data =", "RX / TX ############ @dataclass class HCI_Receive: packet_type: int def", "= 1 x[1] = 1 break addr_int = int(\"\".join([str(x[i]) for", "0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN = 0x0022", "\\ connection_interval_min=0, connection_interval_max=0, \\ max_latency=0, supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval", "connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_len = total_data_len", "express or implied. See the License for the # specific", "ogf=0, ocf=0, data=b''): self.packet_type = HCI_COMMAND_PACKET self.ogf = ogf self.ocf", "############ AF_BLUETOOTH = 31 HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET = 0x01", "data_total_length: int data: bytearray ba_full_message: bytearray def __init__(self): self.set() def", "int supported_max_rx_octets: int supported_max_rx_time: int def __init__(self): self.set() def set(self,", "self.channel_id = channel_id self.data = data fmt_conf = \"<HH\" self.ba_full_message", "may obtain a copy of the License at # #", "[random.randint(0,1) for _ in range(0,48)] if 0 in x[:-2] and", "self.supervision_timeout = supervision_timeout self.min_ce_length = min_ce_length self.max_ce_length = max_ce_length self.ba_full_message", "= local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval = connection_interval self.peripheral_latency =", "None conn_handle = 0 requested_tx_octets = 1 requested_tx_time = 1", "local_resolvable_private_address: int peer_resolvable_private_address: int connection_interval: int peripheral_latency: int supervision_timeout: int", "tx_phy: int rx_phy: int def __init__(self): self.set() def set(self, subevent_code=0,", "############ # RX / TX ############ @dataclass class HCI_Receive: packet_type:", "def __init__(self): self.set() def set(self, packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0,", "= '00:00:00:00:00:00' static_addr = '00:00:00:00:00:00' le_read_buffer_size = None conn_handle =", "= algorithm ############ # PARAMETERS ############ @dataclass class HCI_Advertising: advertising_interval_min:", "= cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba @dataclass class HCI_Scan: le_scan_type: int", "Foundation (ASF) under one # or more contributor license agreements.", "status self.connection_handle = connection_handle self.role = role self.peer_address_type = peer_address_type", "<reponame>t3zeng/mynewt-nimble<filename>tools/hci_throughput/hci.py # # Licensed to the Apache Software Foundation (ASF)", "advertising_interval_min self.advertising_interval_max = advertising_interval_max self.advertising_type = advertising_type self.own_address_type = own_address_type", "= triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int tx_phy:", "scanning_filter_policy=0): self.le_scan_type = le_scan_type self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window", "= 25 ############ # GLOBAL VAR ############ num_of_bytes_to_send = None", "in compliance # with the License. You may obtain a", "# to you under the Apache License, Version 2.0 (the", "License for the # specific language governing permissions and limitations", "self.packet_len = len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type,", "= 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS =", "self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time", "class HCI_Ev_Disconn_Complete: status: int connection_handle: int reason: int def __init__(self):", "= '00:00:00:00:00:00' le_read_buffer_size = None conn_handle = 0 requested_tx_octets =", "int supervision_timeout: int min_ce_length: int max_ce_length: int ba_full_message: bytearray def", "int data_total_len: int data: bytearray def __init__(self): self.set() def set(self,", "############ @dataclass class HCI_Advertising: advertising_interval_min: int advertising_interval_max: int advertising_type: int", "bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send: packet_type: int", "for i in range(0,len(x))]), 2) addr_hex = \"{0:0{1}x}\".format(addr_int, 12) addr", "data self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle & 0x0eff) | (self.pb_flag", "int num_hci_command_packets: int opcode: int def __init__(self): self.set() def set(self,", "ev_num_comp_pkts = None num_of_completed_packets_cnt = 0 num_of_completed_packets_time = 0 ############", "peer_address_type: int peer_address: str local_resolvable_private_address: int peer_resolvable_private_address: int connection_interval: int", "peer_address_type self.peer_address = peer_address self.advertising_channel_map = advertising_channel_map self.advertising_filter_policy = advertising_filter_policy", "self.data = data @dataclass class HCI_Cmd_Send: packet_type: int ogf: int", "= recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag: int bc_flag:", "= ocf self.opcode = get_opcode(ogf, ocf) self.packet_len = len(data) self.data", "num_completed_packets=0): self.num_handles = num_handles self.connection_handle = connection_handle self.num_completed_packets = num_completed_packets", "= peer_resolvable_private_address self.connection_interval = connection_interval self.peripheral_latency = peripheral_latency self.supervision_timeout =", "tx_phy self.rx_phy = rx_phy ############ # EVENTS ############ @dataclass class", "None # based on supported_max_tx_octets num_of_packets_to_send = None events_list =", "class HCI_Recv_L2CAP_Data: pdu_length: int channel_id: int data: bytearray def __init__(self):", "= ev_code self.packet_len = packet_len self.recv_data = recv_data self.recv_data =", "self.initiator_filter_policy = initiator_filter_policy self.peer_address_type = peer_address_type self.peer_address = peer_address self.own_address_type", "class HCI_Ev_Cmd_Status: status: int num_hci_command_packets: int opcode: int def __init__(self):", "License is distributed on an # \"AS IS\" BASIS, WITHOUT", "@dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int role: int peer_address_type:", "= None phy = None ev_num_comp_pkts = None num_of_completed_packets_cnt =", "0x0031 OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001", "class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len: int recv_data: bytearray current_event: None", "set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status = status self.suggested_max_tx_octets = suggested_max_tx_octets", "connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0):", "int peripheral_latency: int supervision_timeout: int central_clock_accuracy: int def __init__(self): self.set()", "<< 14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send: pdu_length: int channel_id:", "status: int suggested_max_tx_octets: int suggested_max_tx_time: int def __init__(self): self.set() def", "int total_num_iso_data_packets: int def __init__(self): self.set() def set(self, status=0, le_acl_data_packet_length=0,", "peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status", "ogf: int ocf: int packet_len: int data: bytearray ba_full_message: bytearray", "range(0,48)] if 0 in x[:-2] and 1 in x[:-2]: x[0]", "self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.own_address_type = own_address_type self.scanning_filter_policy", "permissions and limitations # under the License. # from dataclasses", "advertising_type: int own_address_type: int peer_address_type: int peer_address: str advertising_channel_map: int", "HCI_Scan: le_scan_type: int le_scan_interval: int le_scan_window: int own_address_type: int scanning_filter_policy:", "\\ peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0, \\ max_latency=0, supervision_timeout=0, min_ce_length=0,", "= opcode >> 10 ocf = opcode & 0x03ff return", "0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a", "int def __init__(self): self.set() def set(self, subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code)", "def __init__(self): self.set() def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type", "HCI_COMMAND_PACKET self.ogf = ogf self.ocf = ocf self.opcode = get_opcode(ogf,", "the Apache Software Foundation (ASF) under one # or more", "bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type, own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba =", "= data self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle & 0x0eff) |", "int opcode: int def __init__(self): self.set() def set(self, status =", "= advertising_type self.own_address_type = own_address_type self.peer_address_type = peer_address_type self.peer_address =", "= packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len: int recv_data:", "struct from binascii import unhexlify import random ############ # DEFINES", "@dataclass class HCI_ACL_Data_Send: packet_type: int connection_handle: int pb_flag: int bc_flag:", "except in compliance # with the License. You may obtain", "addr_str = addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2,", "= status self.connection_handle = connection_handle self.reason = reason @dataclass class", "self.opcode = opcode @dataclass class HCI_Ev_LE_Meta: subevent_code: int def __init__(self):", "int def __init__(self): self.set() def set(self, subevent_code=0): self.subevent_code = subevent_code", "license agreements. See the NOTICE file # distributed with this", "required by applicable law or agreed to in writing, #", "self.conn_handle = conn_handle self.max_tx_octets = max_tx_octets self.max_tx_time = max_tx_time self.max_rx_octets", "le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length))", "0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN = 0x000d", "int def __init__(self): self.set() def set(self, status = 0, num_hci_cmd_packets=0,", "= peripheral_latency self.supervision_timeout = supervision_timeout self.central_clock_accuracy = central_clock_accuracy @dataclass class", "suggested_dflt_data_len = None max_data_len = None phy = None ev_num_comp_pkts", "int data: bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self,", "data @dataclass class HCI_Recv_L2CAP_Data: pdu_length: int channel_id: int data: bytearray", "-2, -2))[1:] def gen_static_rand_addr(): while True: x = [random.randint(0,1) for", "self.scanning_filter_policy = scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy))", "reason=0): self.status = status self.connection_handle = connection_handle self.reason = reason", "1 in x[:-2]: x[0] = 1 x[1] = 1 break", "int own_address_type: int scanning_filter_policy: int ba_full_message: bytearray def __init__(self): self.set()", "events_list = [] bdaddr = '00:00:00:00:00:00' static_addr = '00:00:00:00:00:00' le_read_buffer_size", "0x01 HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP = 0x05", "self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_length = len(data) self.data", "tx_phy self.rx_phy = rx_phy @dataclass class HCI_Number_Of_Completed_Packets: num_handles: int connection_handle:", "int supported_max_tx_octets: int supported_max_tx_time: int supported_max_rx_octets: int supported_max_rx_time: int def", "rx_phy=0): super().set(subevent_code) self.status = status self.connection_handle = connection_handle self.tx_phy =", "bytearray def __init__(self): self.set() def set(self, packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0,", "AF_BLUETOOTH = 31 HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET", "connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy ############ # EVENTS", "self.advertising_filter_policy = advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type, own_address_type,", "= advertising_interval_max self.advertising_type = advertising_type self.own_address_type = own_address_type self.peer_address_type =", "(ASF) under one # or more contributor license agreements. See", "OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL", "# or more contributor license agreements. See the NOTICE file", "self.connection_interval = connection_interval self.peripheral_latency = peripheral_latency self.supervision_timeout = supervision_timeout self.central_clock_accuracy", "self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_length", "self.set() def set(self, le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00',", "0, num_hci_cmd_packets=0, opcode=0): self.status = status self.num_hci_command_packets = num_hci_cmd_packets self.opcode", "self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass class LE_Read_PHY: status:", "advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min self.advertising_interval_max = advertising_interval_max self.advertising_type = advertising_type", "self.data = data fmt_conf = \"<HH\" self.ba_full_message = bytearray(struct.pack(fmt_conf, self.pdu_length,", "0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY = 0x0030", "= len(data) else: self.pdu_length = pdu_length self.channel_id = channel_id self.data", "len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle &", "own_address_type=0, scanning_filter_policy=0): self.le_scan_type = le_scan_type self.le_scan_interval = le_scan_interval self.le_scan_window =", "advertising_interval_min, advertising_interval_max, advertising_type, own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address)", "int iso_data_packet_len: int total_num_iso_data_packets: int def __init__(self): self.set() def set(self,", "= connection_interval self.peripheral_latency = peripheral_latency self.supervision_timeout = supervision_timeout self.central_clock_accuracy =", "0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY = 0x0031", "subevent_code: int def __init__(self): self.set() def set(self, subevent_code=0): self.subevent_code =", "num_hci_cmd_packets=0, opcode=0): self.status = status self.num_hci_command_packets = num_hci_cmd_packets self.opcode =", "0x03ff)|(ogf << 10)) def get_ogf_ocf(opcode: int): ogf = opcode >>", "self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_len", "self.status = status self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len", "ba_full_message: bytearray def __init__(self): self.set() def set(self, le_scan_interval=0, le_scan_window=0, \\", "OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR", "= max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type, own_address_type,", "set(self, subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle", "data self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class", "@dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets: int max_tx_time: int max_rx_octets:", "def __init__(self): self.set() def set(self, subevent_code=0): self.subevent_code = subevent_code @dataclass", "_ in range(0,48)] if 0 in x[:-2] and 1 in", "pdu_length=0, channel_id=0, data=b''): self.pdu_length = pdu_length self.channel_id = channel_id self.data", "GLOBAL VAR CLASSES ############ @dataclass class Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets:", "ba_full_message: bytearray def __init__(self): self.set() def set(self, advertising_interval_min=0, advertising_interval_max=0, \\", "__init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''): if not pdu_length:", "| (self.pb_flag << 12) | (self.bc_flag << 14)), self.data_total_length)) self.ba_full_message.extend(self.data)", "= 0x03 OCF_SET_EVENT_MASK = 0x0001 OCF_RESET = 0X0003 OGF_INFO_PARAM =", "bytearray def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''): if", "= pdu_length self.channel_id = channel_id self.data = data fmt_conf =", "self.packet_type = packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len: int", "0x14 HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT = 0x08", "int advertising_interval_max: int advertising_type: int own_address_type: int peer_address_type: int peer_address:", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "@dataclass class HCI_Recv_L2CAP_Data: pdu_length: int channel_id: int data: bytearray def", "int data: bytearray def __init__(self): self.set() def set(self, packet_type=0, connection_handle=0,", "= connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy @dataclass class", "PARAMETERS ############ @dataclass class HCI_Advertising: advertising_interval_min: int advertising_interval_max: int advertising_type:", "self.bc_flag = bc_flag self.data_total_length = len(data) self.data = data self.ba_full_message", "max_rx_time self.triggered = triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle:", "= connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_len =", "addr_int = int(\"\".join([str(x[i]) for i in range(0,len(x))]), 2) addr_hex =", "# based on supported_max_tx_octets num_of_packets_to_send = None events_list = []", "= max_rx_octets self.max_rx_time = max_rx_time self.triggered = triggered @dataclass class", "suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time @dataclass class Max_Data_Length(): status: int supported_max_tx_octets:", "self.pdu_length = pdu_length self.channel_id = channel_id self.data = data @dataclass", "__init__(self): self.set() def set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status", "OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2", "role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= role self.peer_address_type = peer_address_type self.peer_address = peer_address self.local_resolvable_private_address =", "the Apache License, Version 2.0 (the # \"License\"); you may", "class HCI_Number_Of_Completed_Packets: num_handles: int connection_handle: int num_completed_packets: int def __init__(self):", "self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval = connection_interval self.peripheral_latency = peripheral_latency self.supervision_timeout", "int connection_handle: int reason: int def __init__(self): self.set() def set(self,", "'00:00:00:00:00:00' le_read_buffer_size = None conn_handle = 0 requested_tx_octets = 1", "you under the Apache License, Version 2.0 (the # \"License\");", "governing permissions and limitations # under the License. # from", "def set(self,packet_type=0): self.packet_type = packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int", "self.data_total_len = total_data_len self.data = data @dataclass class HCI_Recv_L2CAP_Data: pdu_length:", "peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba @dataclass class HCI_Scan: le_scan_type:", "self.set() def set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code = ev_code", "def set(self, ogf=0, ocf=0, data=b''): self.packet_type = HCI_COMMAND_PACKET self.ogf =", "max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba ############ # RX", "advertising_interval_max=0, \\ advertising_type=0, own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0):", "int bc_flag: int data_total_len: int data: bytearray def __init__(self): self.set()", "HCI_Recv_L2CAP_Data: pdu_length: int channel_id: int data: bytearray def __init__(self): self.set()", "int role: int peer_address_type: int peer_address: str local_resolvable_private_address: int peer_resolvable_private_address:", "# Unless required by applicable law or agreed to in", "self.peer_address_type = peer_address_type self.peer_address = peer_address self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address", "int le_scan_window: int own_address_type: int scanning_filter_policy: int ba_full_message: bytearray def", "int tx_phy: int rx_phy: int def __init__(self): self.set() def set(self,", "Max_Data_Length(): status: int supported_max_tx_octets: int supported_max_tx_time: int supported_max_rx_octets: int supported_max_rx_time:", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "__init__(self): self.set() def set(self, num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles = num_handles", "connection_handle: int num_completed_packets: int def __init__(self): self.set() def set(self, num_handles=0,", "phy = None ev_num_comp_pkts = None num_of_completed_packets_cnt = 0 num_of_completed_packets_time", "0x08 OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK = 0x0001 OCF_RESET = 0X0003", "1 requested_tx_time = 1 suggested_dflt_data_len = None max_data_len = None", "1 WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############ # GLOBAL", "advertising_channel_map, advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba @dataclass class", "data=b''): super().set(packet_type) self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag =", "more contributor license agreements. See the NOTICE file # distributed", "@dataclass class LE_Read_PHY: status: int connection_handle: int tx_phy: int rx_phy:", "super().set(subevent_code) self.conn_handle = conn_handle self.max_tx_octets = max_tx_octets self.max_tx_time = max_tx_time", "own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min", "@dataclass class L2CAP_Data_Send: pdu_length: int channel_id: int data: bytearray ba_full_message:", "int own_address_type: int peer_address_type: int peer_address: str advertising_channel_map: int advertising_filter_policy:", "0X0003 OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR = 0x0009", "def set(self, subevent_code=0): self.subevent_code = subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status:", "specific language governing permissions and limitations # under the License.", "peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min self.advertising_interval_max = advertising_interval_max", "triggered=0): super().set(subevent_code) self.conn_handle = conn_handle self.max_tx_octets = max_tx_octets self.max_tx_time =", "le_scan_window, initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba", "num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode self.return_parameters", "__init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''): self.pdu_length = pdu_length", "bytearray def __init__(self): self.set() def set(self, ogf=0, ocf=0, data=b''): self.packet_type", "advertising_type, own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] =", "supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status = status self.connection_handle = connection_handle self.role", "distributed with this work for additional information # regarding copyright", "self.return_parameters = return_parameters @dataclass class HCI_Ev_Cmd_Status: status: int num_hci_command_packets: int", "conn_handle self.max_tx_octets = max_tx_octets self.max_tx_time = max_tx_time self.max_rx_octets = max_rx_octets", "class LE_Read_Buffer_Size: status: int le_acl_data_packet_length: int total_num_le_acl_data_packets: int iso_data_packet_len: int", "le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets", "self.rx_phy = rx_phy @dataclass class HCI_Number_Of_Completed_Packets: num_handles: int connection_handle: int", "data @dataclass class HCI_Cmd_Send: packet_type: int ogf: int ocf: int", "for the # specific language governing permissions and limitations #", "int algorithm: int def __init__(self): self.set() def set(self, subevent_code=0, connection_handle=0,", "pdu_length=0, channel_id=0, data=b''): if not pdu_length: self.pdu_length = len(data) else:", "range(0,len(x))]), 2) addr_hex = \"{0:0{1}x}\".format(addr_int, 12) addr = \":\".join(addr_hex[i:i+2] for", "the License. # from dataclasses import dataclass import struct from", "data: bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self, ogf=0,", "i in range(0,len(x))]), 2) addr_hex = \"{0:0{1}x}\".format(addr_int, 12) addr =", "requested_tx_time = 1 suggested_dflt_data_len = None max_data_len = None phy", "@dataclass class LE_Read_Buffer_Size: status: int le_acl_data_packet_length: int total_num_le_acl_data_packets: int iso_data_packet_len:", "0 in x[:-2] and 1 in x[:-2]: x[0] = 1", "peer_address_type self.peer_address = peer_address self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address", "HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag: int bc_flag: int data_total_len: int data:", "bytearray def __init__(self): self.set() def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''):", "# RX / TX ############ @dataclass class HCI_Receive: packet_type: int", "else: self.pdu_length = pdu_length self.channel_id = channel_id self.data = data", "= 0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE =", "to you under the Apache License, Version 2.0 (the #", "channel_id=0, data=b''): self.pdu_length = pdu_length self.channel_id = channel_id self.data =", "may not use this file except in compliance # with", "0 ############ # FUNCTIONS ############ def get_opcode(ogf: int, ocf: int):", "super().set(packet_type) self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag = bc_flag", "HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode: int return_parameters: int def __init__(self): self.set()", "ev_code self.packet_len = packet_len self.recv_data = recv_data self.recv_data = recv_data[:packet_len]", "le_scan_window: int initiator_filter_policy: int peer_address_type: int peer_address: str own_address_type: int", "in range(0,48)] if 0 in x[:-2] and 1 in x[:-2]:", "1 x[1] = 1 break addr_int = int(\"\".join([str(x[i]) for i", "= own_address_type self.connection_interval_min = connection_interval_min self.connection_interval_max = connection_interval_max self.max_latency =", "self.data_total_length = len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type,", "advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba @dataclass class HCI_Scan:", "= recv_data self.recv_data = recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int", "= 5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############ # GLOBAL VAR ############", "def __init__(self): self.set() def set(self, subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle", "= 0x002f OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY =", "self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets", "conn_handle: int max_tx_octets: int max_tx_time: int max_rx_octets: int max_rx_time: int", "= None num_of_completed_packets_cnt = 0 num_of_completed_packets_time = 0 ############ #", "/ TX ############ @dataclass class HCI_Receive: packet_type: int def __init__(self):", "= cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba ############ # RX / TX", "additional information # regarding copyright ownership. The ASF licenses this", "int bc_flag: int data_total_length: int data: bytearray ba_full_message: bytearray def", "conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle = conn_handle", "0x0030 OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC = 0x003f", "OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY", "ocf) self.packet_len = len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHB',", "self.subevent_code = subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int", "@dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int tx_phy: int rx_phy:", "0x0002 OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK = 0x0001", "OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE", "algorithm=0): super().set(subevent_code) self.connection_handle = connection_handle self.algorithm = algorithm ############ #", "self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time @dataclass", "def set(self, advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0, own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00',", "= suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time @dataclass class Max_Data_Length(): status: int", "# specific language governing permissions and limitations # under the", "self.max_rx_octets = max_rx_octets self.max_rx_time = max_rx_time self.triggered = triggered @dataclass", "0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT = 0x08 OGF_HOST_CTL = 0x03", "status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status = status self.le_acl_data_packet_length =", "you may not use this file except in compliance #", "class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int tx_phy: int rx_phy: int", "0x0032 OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE = 0", "rx_phy ############ # EVENTS ############ @dataclass class HCI_Ev_Disconn_Complete: status: int", "based on supported_max_tx_octets num_of_packets_to_send = None events_list = [] bdaddr", "__init__(self): self.set() def set(self, ogf=0, ocf=0, data=b''): self.packet_type = HCI_COMMAND_PACKET", "own_address_type: int peer_address_type: int peer_address: str advertising_channel_map: int advertising_filter_policy: int", "0x02 HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP = 0x0e", "own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6]", "int def __init__(self): self.set() def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0,", "OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK = 0x0001 OCF_RESET = 0X0003 OGF_INFO_PARAM", "# under the License. # from dataclasses import dataclass import", "with this work for additional information # regarding copyright ownership.", "& 0x0eff) | (self.pb_flag << 12) | (self.bc_flag << 14)),", "int def __init__(self): self.set() def set(self, status=0, connection_handle=0, reason=0): self.status", "\\ advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min self.advertising_interval_max = advertising_interval_max self.advertising_type =", "supervision_timeout self.central_clock_accuracy = central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets:", "connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle = connection_handle", "connection_handle=0, tx_phy=0, rx_phy=0): self.status = status self.connection_handle = connection_handle self.tx_phy", "OCF_SET_EVENT_MASK = 0x0001 OCF_RESET = 0X0003 OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS", "0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c", "connection_handle self.algorithm = algorithm ############ # PARAMETERS ############ @dataclass class", "random ############ # DEFINES ############ AF_BLUETOOTH = 31 HCI_CHANNEL_USER =", "opcode >> 10 ocf = opcode & 0x03ff return ogf,", "bytearray def __init__(self): self.set() def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0,", "regarding copyright ownership. The ASF licenses this file # to", "self.ba_full_message[7:7] = peer_addr_ba @dataclass class HCI_Scan: le_scan_type: int le_scan_interval: int", "or agreed to in writing, # software distributed under the", "# PARAMETERS ############ @dataclass class HCI_Advertising: advertising_interval_min: int advertising_interval_max: int", "peer_address self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval = connection_interval", "LE_Read_Buffer_Size: status: int le_acl_data_packet_length: int total_num_le_acl_data_packets: int iso_data_packet_len: int total_num_iso_data_packets:", "HCI_EVENT_PACKET = 0x04 HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS", "= min_ce_length self.max_ce_length = max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window,", "advertising_channel_map: int advertising_filter_policy: int ba_full_message: bytearray def __init__(self): self.set() def", "10 ocf = opcode & 0x03ff return ogf, ocf def", "supported_max_rx_octets: int supported_max_rx_time: int def __init__(self): self.set() def set(self, status=0,", "############ num_of_bytes_to_send = None # based on supported_max_tx_octets num_of_packets_to_send =", "self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send: packet_type: int connection_handle: int pb_flag: int", "= 0 ############ # FUNCTIONS ############ def get_opcode(ogf: int, ocf:", "le_acl_data_packet_length: int total_num_le_acl_data_packets: int iso_data_packet_len: int total_num_iso_data_packets: int def __init__(self):", "connection_handle self.reason = reason @dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode:", "OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE", "in range(len(addr_str), -2, -2))[1:] def gen_static_rand_addr(): while True: x =", "self.ev_code = ev_code self.packet_len = packet_len self.recv_data = recv_data self.recv_data", "= None ev_num_comp_pkts = None num_of_completed_packets_cnt = 0 num_of_completed_packets_time =", "def set(self, subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle = connection_handle self.algorithm", "__init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00',", "= 0x0002 OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK =", "int peer_address_type: int peer_address: str own_address_type: int connection_interval_min: int connection_interval_max:", "class HCI_Scan: le_scan_type: int le_scan_interval: int le_scan_window: int own_address_type: int", "range(0, len(addr_hex), 2)) return addr.upper() ############ # GLOBAL VAR CLASSES", "HCI_Ev_LE_Meta: subevent_code: int def __init__(self): self.set() def set(self, subevent_code=0): self.subevent_code", "supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status = status self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time", "in x[:-2]: x[0] = 1 x[1] = 1 break addr_int", "= connection_interval_min self.connection_interval_max = connection_interval_max self.max_latency = max_latency self.supervision_timeout =", "self.set() def set(self,packet_type=0): self.packet_type = packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code:", "KIND, either express or implied. See the License for the", "@dataclass class Max_Data_Length(): status: int supported_max_tx_octets: int supported_max_tx_time: int supported_max_rx_octets:", "scanning_filter_policy self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy)) @dataclass class", "self.data = data @dataclass class HCI_Recv_L2CAP_Data: pdu_length: int channel_id: int", "channel_id self.data = data @dataclass class HCI_Cmd_Send: packet_type: int ogf:", "peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status = status self.connection_handle", "= 31 HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET =", "peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba ############ # RX /", "= 0x0031 OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR =", "opcode: int return_parameters: int def __init__(self): self.set() def set(self, num_hci_cmd_packets=0,", "self.recv_data = recv_data self.recv_data = recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle:", "int initiator_filter_policy: int peer_address_type: int peer_address: str own_address_type: int connection_interval_min:", "= iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass class LE_Read_PHY: status: int", "self.status = status self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode @dataclass", "def set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status = status", "-2))[1:] def gen_static_rand_addr(): while True: x = [random.randint(0,1) for _", "__init__(self): self.set() def set(self,packet_type=0): self.packet_type = packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive):", "__init__(self): self.set() def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''): self.packet_type =", "= bc_flag self.data_total_length = len(data) self.data = data self.ba_full_message =", "peer_address: str advertising_channel_map: int advertising_filter_policy: int ba_full_message: bytearray def __init__(self):", "scanning_filter_policy: int ba_full_message: bytearray def __init__(self): self.set() def set(self, le_scan_type=0,", "= status self.connection_handle = connection_handle self.tx_phy = tx_phy self.rx_phy =", "set(self, advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0, own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0,", "connection_interval_max self.max_latency = max_latency self.supervision_timeout = supervision_timeout self.min_ce_length = min_ce_length", "# Licensed to the Apache Software Foundation (ASF) under one", "= 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE =", "def __init__(self): self.set() def set(self,packet_type=0): self.packet_type = packet_type @dataclass class", "self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send: pdu_length: int channel_id: int data:", "= 0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT = 0x08 OGF_HOST_CTL =", "self.connection_handle = connection_handle self.reason = reason @dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets:", "FUNCTIONS ############ def get_opcode(ogf: int, ocf: int): return ((ocf &", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "= 0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE =", "return_parameters @dataclass class HCI_Ev_Cmd_Status: status: int num_hci_command_packets: int opcode: int", "advertising_channel_map self.advertising_filter_policy = advertising_filter_policy self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type,", "# from dataclasses import dataclass import struct from binascii import", "= 0x0032 OGF_VENDOR_SPECIFIC = 0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE =", "Software Foundation (ASF) under one # or more contributor license", "connection_interval: int peripheral_latency: int supervision_timeout: int central_clock_accuracy: int def __init__(self):", "\\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min self.advertising_interval_max =", "= connection_handle self.algorithm = algorithm ############ # PARAMETERS ############ @dataclass", "# regarding copyright ownership. The ASF licenses this file #", "= supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time =", "self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send: pdu_length: int channel_id: int data: bytearray", "set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode", "channel_id=0, data=b''): if not pdu_length: self.pdu_length = len(data) else: self.pdu_length", "peer_addr_ba @dataclass class HCI_Scan: le_scan_type: int le_scan_interval: int le_scan_window: int", "int connection_interval_min: int connection_interval_max: int max_latency: int supervision_timeout: int min_ce_length:", "0x3e CONN_TIMEOUT = 0x08 OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK = 0x0001", "def set(self, pdu_length=0, channel_id=0, data=b''): self.pdu_length = pdu_length self.channel_id =", "status: int connection_handle: int tx_phy: int rx_phy: int def __init__(self):", "OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY", "this file # to you under the Apache License, Version", "= num_hci_cmd_packets self.opcode = opcode self.return_parameters = return_parameters @dataclass class", "suggested_max_tx_time @dataclass class Max_Data_Length(): status: int supported_max_tx_octets: int supported_max_tx_time: int", "i in range(0, len(addr_hex), 2)) return addr.upper() ############ # GLOBAL", "self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send: packet_type: int connection_handle: int pb_flag:", "data_total_len: int data: bytearray def __init__(self): self.set() def set(self, packet_type=0,", "= 0x04 OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL =", "ocf: int packet_len: int data: bytearray ba_full_message: bytearray def __init__(self):", "ocf: int): return ((ocf & 0x03ff)|(ogf << 10)) def get_ogf_ocf(opcode:", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "own_address_type: int scanning_filter_policy: int ba_full_message: bytearray def __init__(self): self.set() def", "int max_rx_time: int triggered: int def __init__(self): self.set() def set(self,", "############ def get_opcode(ogf: int, ocf: int): return ((ocf & 0x03ff)|(ogf", "= connection_interval_max self.max_latency = max_latency self.supervision_timeout = supervision_timeout self.min_ce_length =", "self.own_address_type = own_address_type self.connection_interval_min = connection_interval_min self.connection_interval_max = connection_interval_max self.max_latency", "connection_handle: int tx_phy: int rx_phy: int def __init__(self): self.set() def", "bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self, ogf=0, ocf=0,", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray): addr_str = addr_ba.hex().upper() return ':'.join(addr_str[i:i+2]", "to the Apache Software Foundation (ASF) under one # or", "\"License\"); you may not use this file except in compliance", "self.ba_full_message = bytearray(struct.pack('<HHBBBBB', advertising_interval_min, advertising_interval_max, advertising_type, own_address_type, peer_address_type, advertising_channel_map, advertising_filter_policy))", "int max_rx_octets: int max_rx_time: int triggered: int def __init__(self): self.set()", "max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba ############", "# distributed with this work for additional information # regarding", "len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len))", "@dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag: int bc_flag: int data_total_len:", "self.status = status self.connection_handle = connection_handle self.reason = reason @dataclass", "OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN", "addr.upper() ############ # GLOBAL VAR CLASSES ############ @dataclass class Suggested_Dflt_Data_Length():", "= 1 WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############ #", "self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min, connection_interval_max,", "max_rx_time: int triggered: int def __init__(self): self.set() def set(self, subevent_code=0,", "data=b''): self.pdu_length = pdu_length self.channel_id = channel_id self.data = data", "le_scan_type self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.own_address_type = own_address_type", "def set(self, status=0, connection_handle=0, reason=0): self.status = status self.connection_handle =", "bc_flag self.data_total_length = len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHH',", "\\ max_ce_length=0): self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.initiator_filter_policy =", "int connection_handle: int num_completed_packets: int def __init__(self): self.set() def set(self,", "for additional information # regarding copyright ownership. The ASF licenses", "= None events_list = [] bdaddr = '00:00:00:00:00:00' static_addr =", "= peer_address_type self.peer_address = peer_address self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address =", "packet_type: int connection_handle: int pb_flag: int bc_flag: int data_total_length: int", "def __init__(self): self.set() def set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code", "((ocf & 0x03ff)|(ogf << 10)) def get_ogf_ocf(opcode: int): ogf =", "packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0, data=b''): super().set(packet_type) self.connection_handle = connection_handle", "= le_scan_type self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.own_address_type =", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= channel_id self.data = data @dataclass class HCI_Cmd_Send: packet_type: int", "set(self, status=0, connection_handle=0, reason=0): self.status = status self.connection_handle = connection_handle", "total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets = total_num_iso_data_packets @dataclass class LE_Read_PHY:", "num_of_completed_packets_cnt = 0 num_of_completed_packets_time = 0 ############ # FUNCTIONS ############", "max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle = conn_handle self.max_tx_octets =", "= rx_phy ############ # EVENTS ############ @dataclass class HCI_Ev_Disconn_Complete: status:", "= status self.connection_handle = connection_handle self.role = role self.peer_address_type =", "supervision_timeout self.min_ce_length = min_ce_length self.max_ce_length = max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH',", "@dataclass class HCI_Scan: le_scan_type: int le_scan_interval: int le_scan_window: int own_address_type:", "status self.connection_handle = connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy", "0x0e HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a", "= HCI_ACL_DATA_PACKET self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag =", "le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type = le_scan_type self.le_scan_interval =", "0x0eff) | (self.pb_flag << 12) | (self.bc_flag << 14)), self.data_total_length))", "None max_data_len = None phy = None ev_num_comp_pkts = None", "int supported_max_tx_time: int supported_max_rx_octets: int supported_max_rx_time: int def __init__(self): self.set()", "None ev_num_comp_pkts = None num_of_completed_packets_cnt = 0 num_of_completed_packets_time = 0", "def __init__(self): self.set() def set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0):", "= get_opcode(ogf, ocf) self.packet_len = len(data) self.data = data self.ba_full_message", "= data @dataclass class HCI_Cmd_Send: packet_type: int ogf: int ocf:", "packet_type @dataclass class HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len: int recv_data: bytearray", "the License for the # specific language governing permissions and", "@dataclass class HCI_Ev_LE_Meta: subevent_code: int def __init__(self): self.set() def set(self,", "self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send: packet_type: int connection_handle:", "opcode=0): self.status = status self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode", "HCI_Ev_Disconn_Complete: status: int connection_handle: int reason: int def __init__(self): self.set()", "ANY # KIND, either express or implied. See the License", "supported_max_tx_octets num_of_packets_to_send = None events_list = [] bdaddr = '00:00:00:00:00:00'", "= own_address_type self.peer_address_type = peer_address_type self.peer_address = peer_address self.advertising_channel_map =", "= total_data_len self.data = data @dataclass class HCI_Recv_L2CAP_Data: pdu_length: int", "# # Licensed to the Apache Software Foundation (ASF) under", "peer_resolvable_private_address: int connection_interval: int peripheral_latency: int supervision_timeout: int central_clock_accuracy: int", "VAR CLASSES ############ @dataclass class Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets: int", "= 1 break addr_int = int(\"\".join([str(x[i]) for i in range(0,len(x))]),", "le_scan_interval: int le_scan_window: int own_address_type: int scanning_filter_policy: int ba_full_message: bytearray", "OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006 OCF_LE_SET_ADVERTISE_ENABLE", "5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############ # GLOBAL VAR ############ num_of_bytes_to_send", "= len(data) self.data = data self.ba_full_message = bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle", "status: int connection_handle: int role: int peer_address_type: int peer_address: str", "31 HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET = 0x02", "= 0x000c OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN =", "OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS", "agreed to in writing, # software distributed under the License", "pb_flag: int bc_flag: int data_total_length: int data: bytearray ba_full_message: bytearray", "int connection_handle: int role: int peer_address_type: int peer_address: str local_resolvable_private_address:", "num_handles: int connection_handle: int num_completed_packets: int def __init__(self): self.set() def", "super().set(subevent_code) self.status = status self.connection_handle = connection_handle self.tx_phy = tx_phy", "int advertising_type: int own_address_type: int peer_address_type: int peer_address: str advertising_channel_map:", "self.set() def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets self.opcode", "def __init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0, tx_phy=0, rx_phy=0):", "self.peer_address = peer_address self.own_address_type = own_address_type self.connection_interval_min = connection_interval_min self.connection_interval_max", "class HCI_Receive: packet_type: int def __init__(self): self.set() def set(self,packet_type=0): self.packet_type", "set(self, packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0, data=b''): super().set(packet_type) self.connection_handle =", "packet_type: int def __init__(self): self.set() def set(self,packet_type=0): self.packet_type = packet_type", "pb_flag: int bc_flag: int data_total_len: int data: bytearray def __init__(self):", "ASF licenses this file # to you under the Apache", "status self.connection_handle = connection_handle self.reason = reason @dataclass class HCI_Ev_Cmd_Complete:", "connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] =", "self.central_clock_accuracy = central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets: int", "ogf = opcode >> 10 ocf = opcode & 0x03ff", "ownership. The ASF licenses this file # to you under", "self.set() def set(self, subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle = connection_handle", "x[0] = 1 x[1] = 1 break addr_int = int(\"\".join([str(x[i])", "set(self, status = 0, num_hci_cmd_packets=0, opcode=0): self.status = status self.num_hci_command_packets", "triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int tx_phy: int", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.initiator_filter_policy", "self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send: packet_type: int connection_handle: int", "= None conn_handle = 0 requested_tx_octets = 1 requested_tx_time =", "int max_tx_time: int max_rx_octets: int max_rx_time: int triggered: int def", "self.advertising_interval_min = advertising_interval_min self.advertising_interval_max = advertising_interval_max self.advertising_type = advertising_type self.own_address_type", "connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba", "0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07", "dataclasses import dataclass import struct from binascii import unhexlify import", "algorithm: int def __init__(self): self.set() def set(self, subevent_code=0, connection_handle=0, algorithm=0):", "EVENTS ############ @dataclass class HCI_Ev_Disconn_Complete: status: int connection_handle: int reason:", "0x03 OCF_SET_EVENT_MASK = 0x0001 OCF_RESET = 0X0003 OGF_INFO_PARAM = 0x04", "@dataclass class HCI_Number_Of_Completed_Packets: num_handles: int connection_handle: int num_completed_packets: int def", "int channel_id: int data: bytearray ba_full_message: bytearray def __init__(self): self.set()", "gen_static_rand_addr(): while True: x = [random.randint(0,1) for _ in range(0,48)]", "language governing permissions and limitations # under the License. #", "import random ############ # DEFINES ############ AF_BLUETOOTH = 31 HCI_CHANNEL_USER", "None events_list = [] bdaddr = '00:00:00:00:00:00' static_addr = '00:00:00:00:00:00'", "set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status = status self.supported_max_tx_octets", "software distributed under the License is distributed on an #", "int pb_flag: int bc_flag: int data_total_len: int data: bytearray def", "self.set() def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status = status self.suggested_max_tx_octets", "in range(0, len(addr_hex), 2)) return addr.upper() ############ # GLOBAL VAR", "class HCI_Cmd_Send: packet_type: int ogf: int ocf: int packet_len: int", "pdu_length: int channel_id: int data: bytearray ba_full_message: bytearray def __init__(self):", "from binascii import unhexlify import random ############ # DEFINES ############", "set(self, subevent_code=0, status=0, connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0,", "return addr.upper() ############ # GLOBAL VAR CLASSES ############ @dataclass class", "information # regarding copyright ownership. The ASF licenses this file", "int data: bytearray def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0,", "min_ce_length: int max_ce_length: int ba_full_message: bytearray def __init__(self): self.set() def", "14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send: pdu_length: int channel_id: int", "# EVENTS ############ @dataclass class HCI_Ev_Disconn_Complete: status: int connection_handle: int", "return ':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2, -2))[1:] def gen_static_rand_addr():", "self.set() def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0):", "max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle = conn_handle self.max_tx_octets = max_tx_octets self.max_tx_time", "self.status = status self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets", "VAR ############ num_of_bytes_to_send = None # based on supported_max_tx_octets num_of_packets_to_send", "= peer_address self.own_address_type = own_address_type self.connection_interval_min = connection_interval_min self.connection_interval_max =", "None num_of_completed_packets_cnt = 0 num_of_completed_packets_time = 0 ############ # FUNCTIONS", "= data self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass", "self.set() def set(self, status=0, le_acl_data_packet_length=0, total_num_le_acl_data_packets=0, iso_data_packet_len=0, total_num_iso_data_packets=0): self.status =", "peer_address: str local_resolvable_private_address: int peer_resolvable_private_address: int connection_interval: int peripheral_latency: int", "& 0x03ff)|(ogf << 10)) def get_ogf_ocf(opcode: int): ogf = opcode", "licenses this file # to you under the Apache License,", "self.status = status self.connection_handle = connection_handle self.tx_phy = tx_phy self.rx_phy", "int opcode: int return_parameters: int def __init__(self): self.set() def set(self,", "HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm: int def __init__(self): self.set() def set(self,", "by applicable law or agreed to in writing, # software", "2) addr_hex = \"{0:0{1}x}\".format(addr_int, 12) addr = \":\".join(addr_hex[i:i+2] for i", "CONN_TIMEOUT = 0x08 OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK = 0x0001 OCF_RESET", "len(data) else: self.pdu_length = pdu_length self.channel_id = channel_id self.data =", "__init__(self): self.set() def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0,", "num_hci_command_packets: int opcode: int def __init__(self): self.set() def set(self, status", "############ # PARAMETERS ############ @dataclass class HCI_Advertising: advertising_interval_min: int advertising_interval_max:", "bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self, connection_handle=0, pb_flag=0b00,", "le_scan_interval: int le_scan_window: int initiator_filter_policy: int peer_address_type: int peer_address: str", "OCF_RESET = 0X0003 OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR", "str local_resolvable_private_address: int peer_resolvable_private_address: int connection_interval: int peripheral_latency: int supervision_timeout:", "compliance # with the License. You may obtain a copy", "LE_Read_PHY: status: int connection_handle: int tx_phy: int rx_phy: int def", "max_rx_octets: int max_rx_time: int triggered: int def __init__(self): self.set() def", "self.set() def set(self, pdu_length=0, channel_id=0, data=b''): if not pdu_length: self.pdu_length", "self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send:", "10)) def get_ogf_ocf(opcode: int): ogf = opcode >> 10 ocf", "0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005", "int connection_handle: int pb_flag: int bc_flag: int data_total_length: int data:", "peer_addr_ba ############ # RX / TX ############ @dataclass class HCI_Receive:", "__init__(self): self.set() def set(self, le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0, \\", "pdu_length self.channel_id = channel_id self.data = data @dataclass class HCI_Cmd_Send:", "own_address_type self.peer_address_type = peer_address_type self.peer_address = peer_address self.advertising_channel_map = advertising_channel_map", "peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status =", "@dataclass class HCI_Ev_Cmd_Complete: num_hci_command_packets: int opcode: int return_parameters: int def", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "= status self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time @dataclass class", "def __init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0, role=0, peer_address_type=0,", "le_scan_window: int own_address_type: int scanning_filter_policy: int ba_full_message: bytearray def __init__(self):", "[] bdaddr = '00:00:00:00:00:00' static_addr = '00:00:00:00:00:00' le_read_buffer_size = None", "num_hci_cmd_packets self.opcode = opcode @dataclass class HCI_Ev_LE_Meta: subevent_code: int def", "max_tx_time self.max_rx_octets = max_rx_octets self.max_rx_time = max_rx_time self.triggered = triggered", "self.packet_len = packet_len self.recv_data = recv_data self.recv_data = recv_data[:packet_len] @dataclass", "1 break addr_int = int(\"\".join([str(x[i]) for i in range(0,len(x))]), 2)", "__init__(self): self.set() def set(self, subevent_code=0, connection_handle=0, algorithm=0): super().set(subevent_code) self.connection_handle =", "@dataclass class Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets: int suggested_max_tx_time: int def", "= ogf self.ocf = ocf self.opcode = get_opcode(ogf, ocf) self.packet_len", "ogf self.ocf = ocf self.opcode = get_opcode(ogf, ocf) self.packet_len =", "@dataclass class HCI_Connect: le_scan_interval: int le_scan_window: int initiator_filter_policy: int peer_address_type:", "set(self, pdu_length=0, channel_id=0, data=b''): self.pdu_length = pdu_length self.channel_id = channel_id", "HCI_Ev_Cmd_Status: status: int num_hci_command_packets: int opcode: int def __init__(self): self.set()", "to in writing, # software distributed under the License is", "connection_handle=0, num_completed_packets=0): self.num_handles = num_handles self.connection_handle = connection_handle self.num_completed_packets =", "get_ogf_ocf(opcode: int): ogf = opcode >> 10 ocf = opcode", "x[1] = 1 break addr_int = int(\"\".join([str(x[i]) for i in", "OCF_LE_SET_ADVERTISE_ENABLE = 0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN", "int def __init__(self): self.set() def set(self, num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles", "def __init__(self): self.set() def set(self, ogf=0, ocf=0, data=b''): self.packet_type =", "= bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle & 0x0eff) | (self.pb_flag << 12)", "= data fmt_conf = \"<HH\" self.ba_full_message = bytearray(struct.pack(fmt_conf, self.pdu_length, self.channel_id))", "self.ogf = ogf self.ocf = ocf self.opcode = get_opcode(ogf, ocf)", "BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT", "bc_flag: int data_total_len: int data: bytearray def __init__(self): self.set() def", "implied. See the License for the # specific language governing", "self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval = connection_interval self.peripheral_latency", "supported_max_rx_time=0): self.status = status self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time", "self.algorithm = algorithm ############ # PARAMETERS ############ @dataclass class HCI_Advertising:", "addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2, -2))[1:] def", "self.max_ce_length = max_ce_length self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH', le_scan_interval, le_scan_window, initiator_filter_policy, peer_address_type,", "= pdu_length self.channel_id = channel_id self.data = data @dataclass class", "0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS = 0x13", "x[:-2]: x[0] = 1 x[1] = 1 break addr_int =", "def __init__(self): self.set() def set(self, le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0,", "@dataclass class HCI_Ev_Cmd_Status: status: int num_hci_command_packets: int opcode: int def", "str advertising_channel_map: int advertising_filter_policy: int ba_full_message: bytearray def __init__(self): self.set()", "CLASSES ############ @dataclass class Suggested_Dflt_Data_Length(): status: int suggested_max_tx_octets: int suggested_max_tx_time:", "0x002f OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY = 0x0032", "on supported_max_tx_octets num_of_packets_to_send = None events_list = [] bdaddr =", "either express or implied. See the License for the #", "get_opcode(ogf, ocf) self.packet_len = len(data) self.data = data self.ba_full_message =", "= tx_phy self.rx_phy = rx_phy @dataclass class HCI_Number_Of_Completed_Packets: num_handles: int", "############ @dataclass class HCI_Receive: packet_type: int def __init__(self): self.set() def", "max_ce_length=0): self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.initiator_filter_policy = initiator_filter_policy", "bytearray(struct.pack('<BHH', self.packet_type, ((self.connection_handle & 0x0eff) | (self.pb_flag << 12) |", "channel_id self.data = data fmt_conf = \"<HH\" self.ba_full_message = bytearray(struct.pack(fmt_conf,", "def __init__(self): self.set() def set(self, advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0, own_address_type=0,", "supported_max_tx_octets self.supported_max_tx_time = supported_max_tx_time self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time", "supported_max_rx_octets=0, supported_max_rx_time=0): self.status = status self.supported_max_tx_octets = supported_max_tx_octets self.supported_max_tx_time =", "= supervision_timeout self.central_clock_accuracy = central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int", "self.pdu_length = pdu_length self.channel_id = channel_id self.data = data fmt_conf", "self.rx_phy = rx_phy ############ # EVENTS ############ @dataclass class HCI_Ev_Disconn_Complete:", "file except in compliance # with the License. You may", "le_read_buffer_size = None conn_handle = 0 requested_tx_octets = 1 requested_tx_time", "advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0, own_address_type=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\", "peer_resolvable_private_address self.connection_interval = connection_interval self.peripheral_latency = peripheral_latency self.supervision_timeout = supervision_timeout", "int rx_phy: int def __init__(self): self.set() def set(self, status=0, connection_handle=0,", "int peer_address: str advertising_channel_map: int advertising_filter_policy: int ba_full_message: bytearray def", "recv_data=bytearray(256)): super().set(packet_type) self.ev_code = ev_code self.packet_len = packet_len self.recv_data =", "peer_address_type=0, \\ peer_address='00:00:00:00:00:00', advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min self.advertising_interval_max", "rx_phy: int def __init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0,", "(self.pb_flag << 12) | (self.bc_flag << 14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass", "class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets: int max_tx_time: int max_rx_octets: int", "contributor license agreements. See the NOTICE file # distributed with", "HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET", "int def __init__(self): self.set() def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0,", "def set(self, le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0,", "= 1 HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET =", "= opcode self.return_parameters = return_parameters @dataclass class HCI_Ev_Cmd_Status: status: int", "set(self, ogf=0, ocf=0, data=b''): self.packet_type = HCI_COMMAND_PACKET self.ogf = ogf", "status=0, connection_handle=0, reason=0): self.status = status self.connection_handle = connection_handle self.reason", "int advertising_filter_policy: int ba_full_message: bytearray def __init__(self): self.set() def set(self,", "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "((self.connection_handle & 0x0eff) | (self.pb_flag << 12) | (self.bc_flag <<", "class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag: int bc_flag: int data_total_len: int", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "connection_interval_max: int max_latency: int supervision_timeout: int min_ce_length: int max_ce_length: int", "HCI_Recv_Event_Packet(HCI_Receive): ev_code: int packet_len: int recv_data: bytearray current_event: None def", "self.triggered = triggered @dataclass class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int", "self.own_address_type = own_address_type self.peer_address_type = peer_address_type self.peer_address = peer_address self.advertising_channel_map", "distributed under the License is distributed on an # \"AS", "12) addr = \":\".join(addr_hex[i:i+2] for i in range(0, len(addr_hex), 2))", "set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type = le_scan_type self.le_scan_interval", "self.suggested_max_tx_time = suggested_max_tx_time @dataclass class Max_Data_Length(): status: int supported_max_tx_octets: int", "range(len(addr_str), -2, -2))[1:] def gen_static_rand_addr(): while True: x = [random.randint(0,1)", "self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy)) @dataclass class HCI_Connect:", "= connection_handle self.role = role self.peer_address_type = peer_address_type self.peer_address =", "reason: int def __init__(self): self.set() def set(self, status=0, connection_handle=0, reason=0):", "int return_parameters: int def __init__(self): self.set() def set(self, num_hci_cmd_packets=0, opcode=0,", "under the Apache License, Version 2.0 (the # \"License\"); you", "HCI_EV_CODE_DISCONN_CMP = 0x05 HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT", "self.supported_max_rx_time = supported_max_rx_time @dataclass class LE_Read_Buffer_Size: status: int le_acl_data_packet_length: int", "import unhexlify import random ############ # DEFINES ############ AF_BLUETOOTH =", "cmd_addr_to_ba(peer_address) self.ba_full_message[6:6] = peer_addr_ba ############ # RX / TX ############", "CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT = 0x08 OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK", "or more contributor license agreements. See the NOTICE file #", "max_tx_time: int max_rx_octets: int max_rx_time: int triggered: int def __init__(self):", "__init__(self): self.set() def set(self, advertising_interval_min=0, advertising_interval_max=0, \\ advertising_type=0, own_address_type=0, peer_address_type=0,", "if not pdu_length: self.pdu_length = len(data) else: self.pdu_length = pdu_length", "= 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS =", "max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle = conn_handle self.max_tx_octets = max_tx_octets", "connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0, data=b''): super().set(packet_type) self.connection_handle = connection_handle self.pb_flag", "self.supervision_timeout = supervision_timeout self.central_clock_accuracy = central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle:", "= le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len self.total_num_iso_data_packets =", "bc_flag=0, total_data_len=0, data=b''): super().set(packet_type) self.connection_handle = connection_handle self.pb_flag = pb_flag", "############ # EVENTS ############ @dataclass class HCI_Ev_Disconn_Complete: status: int connection_handle:", "self.le_scan_type = le_scan_type self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window self.own_address_type", "opcode @dataclass class HCI_Ev_LE_Meta: subevent_code: int def __init__(self): self.set() def", "int ba_full_message: bytearray def __init__(self): self.set() def set(self, le_scan_interval=0, le_scan_window=0,", "self.status = status self.connection_handle = connection_handle self.role = role self.peer_address_type", "self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag", "= max_tx_octets self.max_tx_time = max_tx_time self.max_rx_octets = max_rx_octets self.max_rx_time =", "total_num_iso_data_packets @dataclass class LE_Read_PHY: status: int connection_handle: int tx_phy: int", "ocf def cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray): addr_str", "le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type = le_scan_type self.le_scan_interval = le_scan_interval", "data=b''): if not pdu_length: self.pdu_length = len(data) else: self.pdu_length =", "subevent_code=0, conn_handle=0, max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle =", "tx_phy=0, rx_phy=0): super().set(subevent_code) self.status = status self.connection_handle = connection_handle self.tx_phy", "pb_flag=0, bc_flag=0, total_data_len=0, data=b''): super().set(packet_type) self.connection_handle = connection_handle self.pb_flag =", "self.set() def set(self, subevent_code=0, status=0, connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00',", "0 requested_tx_octets = 1 requested_tx_time = 1 suggested_dflt_data_len = None", "= addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2, -2))[1:]", "0x000d OCF_LE_SET_DATA_LEN = 0x0022 OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023 OCF_LE_READ_MAX_DATA_LEN = 0x002f", "or implied. See the License for the # specific language", "= pb_flag self.bc_flag = bc_flag self.data_total_length = len(data) self.data =", "suggested_max_tx_time=0): self.status = status self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time = suggested_max_tx_time", "OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN = 0x000d OCF_LE_SET_DATA_LEN", "= initiator_filter_policy self.peer_address_type = peer_address_type self.peer_address = peer_address self.own_address_type =", "the # specific language governing permissions and limitations # under", "self.set() def set(self, ogf=0, ocf=0, data=b''): self.packet_type = HCI_COMMAND_PACKET self.ogf", "def set(self, packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0, data=b''): super().set(packet_type) self.connection_handle", "ba_full_message: bytearray def __init__(self): self.set() def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00,", "unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray): addr_str = addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for", "bytearray ba_full_message: bytearray def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0,", "The ASF licenses this file # to you under the", "tx_phy: int rx_phy: int def __init__(self): self.set() def set(self, status=0,", "self.supported_max_rx_octets = supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time @dataclass class LE_Read_Buffer_Size: status:", "status: int le_acl_data_packet_length: int total_num_le_acl_data_packets: int iso_data_packet_len: int total_num_iso_data_packets: int", "status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status = status self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time", "advertising_channel_map=0, \\ advertising_filter_policy=0): self.advertising_interval_min = advertising_interval_min self.advertising_interval_max = advertising_interval_max self.advertising_type", "ba_full_message: bytearray def __init__(self): self.set() def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0,", "= rx_phy @dataclass class HCI_Number_Of_Completed_Packets: num_handles: int connection_handle: int num_completed_packets:", "if 0 in x[:-2] and 1 in x[:-2]: x[0] =", "connection_interval=0, peripheral_latency=0, supervision_timeout=0, central_clock_accuracy=0): super().set(subevent_code) self.status = status self.connection_handle =", "ocf=0, data=b''): self.packet_type = HCI_COMMAND_PACKET self.ogf = ogf self.ocf =", "law or agreed to in writing, # software distributed under", "L2CAP_Data_Send: pdu_length: int channel_id: int data: bytearray ba_full_message: bytearray def", "bc_flag: int data_total_length: int data: bytearray ba_full_message: bytearray def __init__(self):", "1 HCI_COMMAND_PACKET = 0x01 HCI_ACL_DATA_PACKET = 0x02 HCI_EVENT_PACKET = 0x04", "for i in range(len(addr_str), -2, -2))[1:] def gen_static_rand_addr(): while True:", "connection_handle: int role: int peer_address_type: int peer_address: str local_resolvable_private_address: int", "self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode self.return_parameters = return_parameters @dataclass", "self.connection_handle = connection_handle self.num_completed_packets = num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int", "subevent_code=0, status=0, connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00', connection_interval=0, peripheral_latency=0,", "GLOBAL VAR ############ num_of_bytes_to_send = None # based on supported_max_tx_octets", "OR CONDITIONS OF ANY # KIND, either express or implied.", "HCI_Receive: packet_type: int def __init__(self): self.set() def set(self,packet_type=0): self.packet_type =", "HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets: int max_tx_time: int max_rx_octets: int max_rx_time:", "i in range(len(addr_str), -2, -2))[1:] def gen_static_rand_addr(): while True: x", "initiator_filter_policy, peer_address_type, own_address_type, connection_interval_min, connection_interval_max, max_latency,supervision_timeout, min_ce_length, max_ce_length)) peer_addr_ba =", "int ocf: int packet_len: int data: bytearray ba_full_message: bytearray def", "advertising_interval_max: int advertising_type: int own_address_type: int peer_address_type: int peer_address: str", "copyright ownership. The ASF licenses this file # to you", "self.role = role self.peer_address_type = peer_address_type self.peer_address = peer_address self.local_resolvable_private_address", "@dataclass class HCI_Ev_Disconn_Complete: status: int connection_handle: int reason: int def", "in writing, # software distributed under the License is distributed", "int le_scan_interval: int le_scan_window: int own_address_type: int scanning_filter_policy: int ba_full_message:", "HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int tx_phy: int rx_phy: int def", "cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray): addr_str = addr_ba.hex().upper()", "opcode=0, return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode self.return_parameters =", "recv_data self.recv_data = recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag:", "class HCI_ACL_Data_Send: packet_type: int connection_handle: int pb_flag: int bc_flag: int", "= tx_phy self.rx_phy = rx_phy ############ # EVENTS ############ @dataclass", "def ba_addr_to_str(addr_ba: bytearray): addr_str = addr_ba.hex().upper() return ':'.join(addr_str[i:i+2] for i", "############ # GLOBAL VAR ############ num_of_bytes_to_send = None # based", "peer_address self.own_address_type = own_address_type self.connection_interval_min = connection_interval_min self.connection_interval_max = connection_interval_max", "int max_ce_length: int ba_full_message: bytearray def __init__(self): self.set() def set(self,", "= 0x08 OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK = 0x0001 OCF_RESET =", "total_data_len=0, data=b''): super().set(packet_type) self.connection_handle = connection_handle self.pb_flag = pb_flag self.bc_flag", "ogf, ocf def cmd_addr_to_ba(addr_str: str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray):", "break addr_int = int(\"\".join([str(x[i]) for i in range(0,len(x))]), 2) addr_hex", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= None # based on supported_max_tx_octets num_of_packets_to_send = None events_list", "HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED", "HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14 HCI_EV_NUM_COMP_PKTS", "HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07 HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP", "def set(self, num_handles=0, connection_handle=0, num_completed_packets=0): self.num_handles = num_handles self.connection_handle =", "| (self.bc_flag << 14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send: pdu_length:", "from dataclasses import dataclass import struct from binascii import unhexlify", "# GLOBAL VAR ############ num_of_bytes_to_send = None # based on", "connection_handle self.num_completed_packets = num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm: int", "int connection_handle: int tx_phy: int rx_phy: int def __init__(self): self.set()", "def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''): self.num_hci_command_packets = num_hci_cmd_packets self.opcode =", "CONDITIONS OF ANY # KIND, either express or implied. See", "TX ############ @dataclass class HCI_Receive: packet_type: int def __init__(self): self.set()", "bytearray def __init__(self): self.set() def set(self, le_scan_interval=0, le_scan_window=0, \\ initiator_filter_policy=0,", "self.data = data self.ba_full_message = bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data)", "self.connection_interval_max = connection_interval_max self.max_latency = max_latency self.supervision_timeout = supervision_timeout self.min_ce_length", "subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int role: int", "= central_clock_accuracy @dataclass class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta): conn_handle: int max_tx_octets: int max_tx_time:", "OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060 OCF_LE_SET_RANDOM_ADDRESS = 0x0005 OCF_LE_SET_ADVERTISING_PARAMETERS", "0x05 HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e", "= HCI_COMMAND_PACKET self.ogf = ogf self.ocf = ocf self.opcode =", "bytearray current_event: None def __init__(self): self.set() def set(self,packet_type=0, ev_code=0, packet_len=0,", "int def __init__(self): self.set() def set(self,packet_type=0): self.packet_type = packet_type @dataclass", "int def __init__(self): self.set() def set(self, subevent_code=0, status=0, connection_handle=0, tx_phy=0,", "= le_scan_interval self.le_scan_window = le_scan_window self.own_address_type = own_address_type self.scanning_filter_policy =", "rx_phy: int def __init__(self): self.set() def set(self, status=0, connection_handle=0, tx_phy=0,", "= 0x3e CONN_TIMEOUT = 0x08 OGF_HOST_CTL = 0x03 OCF_SET_EVENT_MASK =", "status=0, connection_handle=0, tx_phy=0, rx_phy=0): self.status = status self.connection_handle = connection_handle", "num_completed_packets class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta): connection_handle: int algorithm: int def __init__(self): self.set()", "super().set(subevent_code) self.connection_handle = connection_handle self.algorithm = algorithm ############ # PARAMETERS", "HCI_EV_CODE_CMD_CMP = 0x0e HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP", "OCF_LE_READ_PHY = 0x0030 OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC", "self.ocf = ocf self.opcode = get_opcode(ogf, ocf) self.packet_len = len(data)", "0x003f BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001 PUBLIC_ADDRESS_TYPE = 0 STATIC_RANDOM_ADDRESS_TYPE = 1", "DEFINES ############ AF_BLUETOOTH = 31 HCI_CHANNEL_USER = 1 HCI_COMMAND_PACKET =", "num_completed_packets: int def __init__(self): self.set() def set(self, num_handles=0, connection_handle=0, num_completed_packets=0):", "channel_id: int data: bytearray ba_full_message: bytearray def __init__(self): self.set() def", "opcode self.return_parameters = return_parameters @dataclass class HCI_Ev_Cmd_Status: status: int num_hci_command_packets:", "= 0x0001 OCF_RESET = 0X0003 OGF_INFO_PARAM = 0x04 OCF_READ_LOCAL_COMMANDS =", "local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval = connection_interval self.peripheral_latency = peripheral_latency", "int connection_interval_max: int max_latency: int supervision_timeout: int min_ce_length: int max_ce_length:", "the License is distributed on an # \"AS IS\" BASIS,", "= 0x0030 OCF_LE_SET_DFLT_PHY = 0x0031 OCF_LE_SET_PHY = 0x0032 OGF_VENDOR_SPECIFIC =", "\\ max_latency=0, supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval = le_scan_interval self.le_scan_window", "supported_max_rx_time: int def __init__(self): self.set() def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0,", "supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status = status self.supported_max_tx_octets = supported_max_tx_octets", "STATIC_RANDOM_ADDRESS_TYPE = 1 WAIT_FOR_EVENT_TIMEOUT = 5 WAIT_FOR_EVENT_CONN_TIMEOUT = 25 ############", "current_event: None def __init__(self): self.set() def set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)):", "def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''): if not", "status = 0, num_hci_cmd_packets=0, opcode=0): self.status = status self.num_hci_command_packets =", "pb_flag self.bc_flag = bc_flag self.data_total_length = len(data) self.data = data", "def gen_static_rand_addr(): while True: x = [random.randint(0,1) for _ in", "= bytearray(struct.pack('<BHHBB',le_scan_type, le_scan_interval, le_scan_window, own_address_type, scanning_filter_policy)) @dataclass class HCI_Connect: le_scan_interval:", "self.connection_interval_min = connection_interval_min self.connection_interval_max = connection_interval_max self.max_latency = max_latency self.supervision_timeout", "suggested_max_tx_octets=0, suggested_max_tx_time=0): self.status = status self.suggested_max_tx_octets = suggested_max_tx_octets self.suggested_max_tx_time =", "HCI_EV_CODE_CMD_STATUS = 0x0f HCI_EV_CODE_LE_META_EVENT = 0x3e HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE", "set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type) self.ev_code = ev_code self.packet_len =", "str): return unhexlify(\"\".join(addr_str.split(':')))[::-1] def ba_addr_to_str(addr_ba: bytearray): addr_str = addr_ba.hex().upper() return", "connection_handle: int pb_flag: int bc_flag: int data_total_length: int data: bytearray", "self.connection_handle = connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy @dataclass", "= channel_id self.data = data fmt_conf = \"<HH\" self.ba_full_message =", "self.set() def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type =", "int central_clock_accuracy: int def __init__(self): self.set() def set(self, subevent_code=0, status=0,", "packet_len: int recv_data: bytearray current_event: None def __init__(self): self.set() def", "__init__(self): self.set() def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0): self.status =", "__init__(self): self.set() def set(self, packet_type=0, connection_handle=0, pb_flag=0, bc_flag=0, total_data_len=0, data=b''):", "None def __init__(self): self.set() def set(self,packet_type=0, ev_code=0, packet_len=0, recv_data=bytearray(256)): super().set(packet_type)", "le_scan_interval self.le_scan_window = le_scan_window self.own_address_type = own_address_type self.scanning_filter_policy = scanning_filter_policy", "pdu_length: self.pdu_length = len(data) else: self.pdu_length = pdu_length self.channel_id =", "(the # \"License\"); you may not use this file except", "12) | (self.bc_flag << 14)), self.data_total_length)) self.ba_full_message.extend(self.data) @dataclass class L2CAP_Data_Send:", "peer_address_type, advertising_channel_map, advertising_filter_policy)) peer_addr_ba = cmd_addr_to_ba(peer_address) self.ba_full_message[7:7] = peer_addr_ba @dataclass", "\"{0:0{1}x}\".format(addr_int, 12) addr = \":\".join(addr_hex[i:i+2] for i in range(0, len(addr_hex),", "rx_phy=0): self.status = status self.connection_handle = connection_handle self.tx_phy = tx_phy", "\":\".join(addr_hex[i:i+2] for i in range(0, len(addr_hex), 2)) return addr.upper() ############", "self.set() def set(self, status = 0, num_hci_cmd_packets=0, opcode=0): self.status =", "__init__(self): self.set() def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0, own_address_type=0, scanning_filter_policy=0): self.le_scan_type", "self.set() def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status =", "HCI_EV_NUM_COMP_PKTS = 0x13 CONN_FAILED_TO_BE_ESTABLISHED = 0x3e CONN_TIMEOUT = 0x08 OGF_HOST_CTL", "self.pb_flag = pb_flag self.bc_flag = bc_flag self.data_total_len = total_data_len self.data", "advertising_interval_min: int advertising_interval_max: int advertising_type: int own_address_type: int peer_address_type: int", "peer_address_type: int peer_address: str advertising_channel_map: int advertising_filter_policy: int ba_full_message: bytearray", "le_scan_window=0, \\ initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0,", "self.tx_phy = tx_phy self.rx_phy = rx_phy @dataclass class HCI_Number_Of_Completed_Packets: num_handles:", "with the License. You may obtain a copy of the", "under the License. # from dataclasses import dataclass import struct", "'00:00:00:00:00:00' static_addr = '00:00:00:00:00:00' le_read_buffer_size = None conn_handle = 0", "applicable law or agreed to in writing, # software distributed", "bc_flag=0b00, data=b''): self.packet_type = HCI_ACL_DATA_PACKET self.connection_handle = connection_handle self.pb_flag =", "def set(self, subevent_code=0, status=0, connection_handle=0, role=0, peer_address_type=0, peer_address='00:00:00:00:00:00', local_resolvable_private_address='00:00:00:00:00:00', peer_resolvable_private_address='00:00:00:00:00:00',", "initiator_filter_policy: int peer_address_type: int peer_address: str own_address_type: int connection_interval_min: int", "int ba_full_message: bytearray def __init__(self): self.set() def set(self, le_scan_type=0, le_scan_interval=0,", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "self.set() def set(self, pdu_length=0, channel_id=0, data=b''): self.pdu_length = pdu_length self.channel_id", "data=b''): self.packet_type = HCI_COMMAND_PACKET self.ogf = ogf self.ocf = ocf", "file # to you under the Apache License, Version 2.0", "= bytearray(struct.pack('<BHB', self.packet_type, self.opcode, self.packet_len)) self.ba_full_message.extend(self.data) @dataclass class HCI_ACL_Data_Send: packet_type:", "= packet_len self.recv_data = recv_data self.recv_data = recv_data[:packet_len] @dataclass class", "# with the License. You may obtain a copy of", "OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1", "status self.le_acl_data_packet_length = le_acl_data_packet_length self.total_num_le_acl_data_packets = total_num_le_acl_data_packets self.iso_data_packet_len = iso_data_packet_len", "str own_address_type: int connection_interval_min: int connection_interval_max: int max_latency: int supervision_timeout:", "supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval = le_scan_interval self.le_scan_window = le_scan_window", "suggested_max_tx_time: int def __init__(self): self.set() def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0):", "under one # or more contributor license agreements. See the", "initiator_filter_policy=0, peer_address_type=0, \\ peer_address='00:00:00:00:00:00', own_address_type=0, \\ connection_interval_min=0, connection_interval_max=0, \\ max_latency=0,", "set(self, subevent_code=0): self.subevent_code = subevent_code @dataclass class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int", "recv_data[:packet_len] @dataclass class HCI_Recv_ACL_Data_Packet(HCI_Receive): connection_handle: int pb_flag: int bc_flag: int", "x = [random.randint(0,1) for _ in range(0,48)] if 0 in", "connection_interval_max=0, \\ max_latency=0, supervision_timeout=0, min_ce_length=0, \\ max_ce_length=0): self.le_scan_interval = le_scan_interval", "int num_completed_packets: int def __init__(self): self.set() def set(self, num_handles=0, connection_handle=0,", "int scanning_filter_policy: int ba_full_message: bytearray def __init__(self): self.set() def set(self,", "def set(self, pdu_length=0, channel_id=0, data=b''): if not pdu_length: self.pdu_length =", "= 0 num_of_completed_packets_time = 0 ############ # FUNCTIONS ############ def", "self.set() def set(self, subevent_code=0, status=0, connection_handle=0, tx_phy=0, rx_phy=0): super().set(subevent_code) self.status", "get_opcode(ogf: int, ocf: int): return ((ocf & 0x03ff)|(ogf << 10))", "HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta): status: int connection_handle: int role: int peer_address_type: int peer_address:", "agreements. See the NOTICE file # distributed with this work", "self.connection_handle = connection_handle self.tx_phy = tx_phy self.rx_phy = rx_phy ############", "OCF_READ_LOCAL_COMMANDS = 0x0002 OCF_READ_BD_ADDR = 0x0009 OGF_LE_CTL = 0x08 OCF_LE_SET_EVENT_MASK", "peer_address_type self.peer_address = peer_address self.own_address_type = own_address_type self.connection_interval_min = connection_interval_min", "ocf = opcode & 0x03ff return ogf, ocf def cmd_addr_to_ba(addr_str:", "max_tx_octets=0, max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0): super().set(subevent_code) self.conn_handle = conn_handle self.max_tx_octets", "def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0, supported_max_rx_octets=0, supported_max_rx_time=0): self.status = status", "ba_full_message: bytearray def __init__(self): self.set() def set(self, pdu_length=0, channel_id=0, data=b''):", "int rx_phy: int def __init__(self): self.set() def set(self, subevent_code=0, status=0,", "0x08 OCF_LE_SET_EVENT_MASK = 0x0001 OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002 OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060", "= status self.num_hci_command_packets = num_hci_cmd_packets self.opcode = opcode @dataclass class", "self.peer_address = peer_address self.local_resolvable_private_address = local_resolvable_private_address self.peer_resolvable_private_address = peer_resolvable_private_address self.connection_interval", "peer_address: str own_address_type: int connection_interval_min: int connection_interval_max: int max_latency: int", "advertising_filter_policy: int ba_full_message: bytearray def __init__(self): self.set() def set(self, advertising_interval_min=0,", "License. You may obtain a copy of the License at", "int, ocf: int): return ((ocf & 0x03ff)|(ogf << 10)) def", "connection_handle: int algorithm: int def __init__(self): self.set() def set(self, subevent_code=0,", "= 0x000a OCF_LE_SET_SCAN_PARAMETERS = 0x000b OCF_LE_SET_SCAN_ENABLE = 0x000c OCF_LE_CREATE_CONN =", "None phy = None ev_num_comp_pkts = None num_of_completed_packets_cnt = 0", "= [random.randint(0,1) for _ in range(0,48)] if 0 in x[:-2]", "supported_max_tx_octets: int supported_max_tx_time: int supported_max_rx_octets: int supported_max_rx_time: int def __init__(self):", "num_of_completed_packets_time = 0 ############ # FUNCTIONS ############ def get_opcode(ogf: int,", "supported_max_rx_octets self.supported_max_rx_time = supported_max_rx_time @dataclass class LE_Read_Buffer_Size: status: int le_acl_data_packet_length:" ]
[ "call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate' ) with", "get_screenshot_as_png from analysis_runner import output_path GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt'", "hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png', 'web') if rerun or not hl.hadoop_exists(plot_filename):", "range=(0, 1), legend='Call rate' ) with hl.hadoop_open(plot_filename, 'wb') as f:", ") @click.command() @click.option('--rerun', help='Whether to overwrite cached files', default=False) def", "hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100, n_cols=100) mt_qc =", "if rerun or not hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt =", "output_path GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun', help='Whether", "hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate' )", "@click.option('--rerun', help='Whether to overwrite cached files', default=False) def query(rerun): \"\"\"Query", "default=False) def query(rerun): \"\"\"Query script entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path =", "= hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100, n_cols=100) mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path)", "= mt.head(100, n_cols=100) mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path)", "'web') if rerun or not hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate,", "mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png', 'web') if rerun", "hail as hl from bokeh.io.export import get_screenshot_as_png from analysis_runner import", "import hail as hl from bokeh.io.export import get_screenshot_as_png from analysis_runner", "or not hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call", "get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__ == '__main__': query() # pylint: disable=no-value-for-parameter", "plot_filename = output_path('call_rate_plot.png', 'web') if rerun or not hl.hadoop_exists(plot_filename): call_rate_plot", "mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate' ) with hl.hadoop_open(plot_filename, 'wb') as", "@click.command() @click.option('--rerun', help='Whether to overwrite cached files', default=False) def query(rerun):", "not hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100, n_cols=100) mt_qc", "example.\"\"\" import click import hail as hl from bokeh.io.export import", "hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100, n_cols=100) mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc", "files', default=False) def query(rerun): \"\"\"Query script entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path", "'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun', help='Whether to overwrite cached files', default=False)", "query example.\"\"\" import click import hail as hl from bokeh.io.export", "rerun or not hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100,", "mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png',", "with hl.hadoop_open(plot_filename, 'wb') as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__ ==", "def query(rerun): \"\"\"Query script entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt')", "= hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png', 'web') if rerun or not", "hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate' ) with hl.hadoop_open(plot_filename, 'wb')", "mt = mt.head(100, n_cols=100) mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc =", "'wb') as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__ == '__main__': query()", "rate' ) with hl.hadoop_open(plot_filename, 'wb') as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if", "as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__ == '__main__': query() #", "output_path('sample_qc.mt') if rerun or not hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt", "hl.hadoop_open(plot_filename, 'wb') as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__ == '__main__':", "= output_path('call_rate_plot.png', 'web') if rerun or not hl.hadoop_exists(plot_filename): call_rate_plot =", "output_path('call_rate_plot.png', 'web') if rerun or not hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram(", "from bokeh.io.export import get_screenshot_as_png from analysis_runner import output_path GNOMAD_HGDP_1KG_MT =", "hl from bokeh.io.export import get_screenshot_as_png from analysis_runner import output_path GNOMAD_HGDP_1KG_MT", "overwrite cached files', default=False) def query(rerun): \"\"\"Query script entry point.\"\"\"", "point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt') if rerun or not hl.hadoop_exists(sample_qc_path):", "mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png', 'web') if rerun or", "to overwrite cached files', default=False) def query(rerun): \"\"\"Query script entry", "n_cols=100) mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename =", "import get_screenshot_as_png from analysis_runner import output_path GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/'", "= output_path('sample_qc.mt') if rerun or not hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT)", "import click import hail as hl from bokeh.io.export import get_screenshot_as_png", "legend='Call rate' ) with hl.hadoop_open(plot_filename, 'wb') as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG')", "( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun', help='Whether to overwrite cached", "hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt') if rerun or not hl.hadoop_exists(sample_qc_path): mt", "as hl from bokeh.io.export import get_screenshot_as_png from analysis_runner import output_path", "or not hl.hadoop_exists(sample_qc_path): mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100, n_cols=100)", "rerun or not hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1),", "= hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate' ) with hl.hadoop_open(plot_filename,", "GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun', help='Whether to", "entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt') if rerun or not", "\"\"\"Query script entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt') if rerun", "import output_path GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun',", "from analysis_runner import output_path GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' )", "mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT) mt = mt.head(100, n_cols=100) mt_qc = hl.sample_qc(mt)", "analysis_runner import output_path GNOMAD_HGDP_1KG_MT = ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command()", "Hail query example.\"\"\" import click import hail as hl from", "\"\"\"Simple Hail query example.\"\"\" import click import hail as hl", "= ( 'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun', help='Whether to overwrite", "sample_qc_path = output_path('sample_qc.mt') if rerun or not hl.hadoop_exists(sample_qc_path): mt =", "not hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate'", "query(rerun): \"\"\"Query script entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt') if", "bokeh.io.export import get_screenshot_as_png from analysis_runner import output_path GNOMAD_HGDP_1KG_MT = (", "if rerun or not hl.hadoop_exists(plot_filename): call_rate_plot = hl.plot.histogram( mt_qc.sample_qc.call_rate, range=(0,", "hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png', 'web') if", "f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__ == '__main__': query() # pylint:", "'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/' 'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt' ) @click.command() @click.option('--rerun', help='Whether to overwrite cached files',", "= hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename = output_path('call_rate_plot.png', 'web')", "script entry point.\"\"\" hl.init(default_reference='GRCh38') sample_qc_path = output_path('sample_qc.mt') if rerun or", "cached files', default=False) def query(rerun): \"\"\"Query script entry point.\"\"\" hl.init(default_reference='GRCh38')", "mt.head(100, n_cols=100) mt_qc = hl.sample_qc(mt) mt_qc.write(sample_qc_path) mt_qc = hl.read_matrix_table(sample_qc_path) plot_filename", "help='Whether to overwrite cached files', default=False) def query(rerun): \"\"\"Query script", ") with hl.hadoop_open(plot_filename, 'wb') as f: get_screenshot_as_png(call_rate_plot).save(f, format='PNG') if __name__", "click import hail as hl from bokeh.io.export import get_screenshot_as_png from", "1), legend='Call rate' ) with hl.hadoop_open(plot_filename, 'wb') as f: get_screenshot_as_png(call_rate_plot).save(f," ]
[ "\"\"\" Operation Code Description 0x1001 GetDeviceInfo 0x1002 OpenSession 0x1003 CloseSession", "the reply is of type PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if", "def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__() if", "= PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length = struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply", "the session id of the object if the reply is", "0x9805 GetObjectPropList \"\"\" def __init__(self, data=None, cmd=None, param1=None, param2=None, param3=None,", "None: self.args = self.args + struct.pack('L', self.param5) def data(self): return", "generieren self.transaction_id = struct.pack('I', 0x06) self.args = '' if self.param1", "ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code ==", "if data is not None: self.session_id = data[0:4] elif session_id", "Object WriteProtected 0x200E Store Read-Only 0x200F Access Denied 0x2010 No", "open socket: {message}\") return s def send_recieve_ptpip_packet(self, ptpip_packet, session): if", "if len(self.cmd_queue) == 0: # do a ping receive a", "data is not None: self.transaction_id = data[0:4] self.length = data[4:8]", "PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpEventAck, self).__init__() self.cmdtype", "0x2019): print(\"Cmd send successfully\") else: print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\") #", "0x09) super(PtpIpStartDataPacket, self).__init__() if data is not None: self.transaction_id =", "struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code Description 0x1001 GetDeviceInfo", "= PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket): data =", "= struct.unpack('I', data[0:4])[0] if self.cmdtype == 1: return PtpIpInitCmdReq(data[4:]) elif", "self.transaction_id = data[0:4] self.length = data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for", "= struct.pack('I', 0x02) if data is not None: self.session_id =", "ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data))", "event_code which consists of two bytes event_code = str(struct.unpack('H', data[offset:offset+2])[0])", "is None: self.cmdtype = None else: print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\")", "data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype = struct.pack('I', 0x07) if data is", "AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B EndMovieRec 0x920C", "of events passed from the data passed to the factory", "Invalid ObjectFormatCode 0x200C Store Full 0x200D Object WriteProtected 0x200E Store", "None self.cmd_queue = [] self.event_queue = [] self.object_queue = []", "return self.events class PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\" def __init__(self, object_handle,", "PtpIpInitCmdAck(data[4:]) elif self.cmdtype == 3: return PtpIpEventReq(data[4:]) elif self.cmdtype ==", "class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdAck, self).__init__()", "consists of two bytes event_code = str(struct.unpack('H', data[offset:offset+2])[0]) # get", "for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x10) super(PtpIpDataPacket,", "to the factory amount_of_events = struct.unpack('H', data[0:2])[0] # set an", "socket import struct class PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\" def __init__(self):", "0x2002 General Error 0x2003 Session Not Open 0x2004 Invalid TransactionID", "\"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x09)", "a pong (same as ping) as reply to keep the", "0x201E Session Already Open 0x201F Transaction Cancelled 0x2020 Specification of", "Present 0x2011 SelfTest Failed 0x2012 Partial Deletion 0x2013 Store Not", "to keep the connection alive # couldnt get any reply", "self.args + struct.pack('L', self.param3) if self.param4 is not None: self.args", "Capture Already Terminated 0x2019 Device Busy 0x201A Invalid ParentObject 0x201B", "pair offset = offset + 6 counter = counter +", "and event_parameter pair offset = offset + 6 counter =", "+ 6 counter = counter + 1 def get_events(self): return", "data is not None: self.transaction_id = data[0:4] self.data = data[4:]", "a GetEvent request 0x90C7 \"\"\" def __init__(self, data): super(PtpIpEventFactory, self).__init__()", "= offset + 6 counter = counter + 1 def", "def data(self): return self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd) +", "get any reply onto a propper PtpIpPing packet so i", "status # of the device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if", "as the first two bytes are already processed counter =", "0x100B DeleteObject 0x100C SendObjectInfo 0x100D SendObject 0x100E InitiateCapture 0x100F FormatStore", "0x01) self.ptp_cmd = cmd self.param1 = param1 self.param2 = param2", "counter and an offset of 2 as the first two", "data=None): self.cmdtype = struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__() if data is", "class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype =", "ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue else:", "PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket, self).__init__() def factory(self,", "and an offset of 2 as the first two bytes", "event_parameter)) # increase the offset by 6 to get to", "None: self.args = self.args + struct.pack('L', self.param3) if self.param4 is", "isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length = struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data", "super(PtpIpPacket, self).__init__() def factory(self, data=None): if data is None: self.cmdtype", "the object itself if it is not specified in the", "is not None: self.session_id = data[0:4] self.guid = data[4:20] self.hostname", "struct.pack('L', self.param2) if self.param3 is not None: self.args = self.args", "None: ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(), session) def send_data(self, data, session):", "data=None): self.cmdtype = struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__() if data is", "TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A", "bytes event_code = str(struct.unpack('H', data[offset:offset+2])[0]) # get the event_parameter which", "of 2 as the first two bytes are already processed", "PtpIpPing packet so i am querying the status # of", "session id of the object itself if it is not", "recieve_data(self, session): data = session.recv(4) (data_length,) = struct.unpack('I', data) print(f\"Packet", "if self.param1 is not None: self.args = self.args + struct.pack('L',", "return data[4:] class PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket,", "self).__init__() if data is not None: self.transaction_id = data[0:4] print(f\"transaction_id:", "super(PtpIpEvent, self).__init__() self.event_code = int(event_code) self.event_parameter = int(event_parameter) class PtpIpEventFactory(object):", "GetLargeThumb 0x90C7 GetEvent 0x90C8 DeviceReady 0x90C9 SetPreWbData 0x90CA GetVendorPropCodes 0x90CB", "def __init__(self): super(PtpIpPacket, self).__init__() def factory(self, data=None): if data is", "self.session_id: return self.cmdtype + self.session_id return self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring", "for PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket, self).__init__() def factory(self, data=None): if", "GetStorageInfo 0x1006 GetNumObjects 0x1007 GetObjectHandles 0x1008 GetObjectInfo 0x1009 GetObject 0x100A", "PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\" def __init__(self, object_handle, data): super(PtpIpDataObject, self).__init__()", "StorageID 0x2009 Invalid ObjectHandle 0x200A DeviceProp Not Supported 0x200B Invalid", "data passed to the factory amount_of_events = struct.unpack('H', data[0:2])[0] #", "3: return PtpIpEventReq(data[4:]) elif self.cmdtype == 4: return PtpIpEventAck(data[4:]) elif", "== 0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket):", "isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest)", "len(data): data += session.recv(data_length - len(data)) return data[4:] class PtpIpPacket(object):", "offset + 6 counter = counter + 1 def get_events(self):", "not None: self.transaction_id = data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\"", "self.param4 is not None: self.args = self.args + struct.pack('L', self.param4)", "offset = offset + 6 counter = counter + 1", "struct.pack('I', 0x06) self.unkown = struct.pack('I', 0x01) self.ptp_cmd = cmd self.param1", "self.args + struct.pack('L', self.param2) if self.param3 is not None: self.args", "request 0x90C7 \"\"\" def __init__(self, data): super(PtpIpEventFactory, self).__init__() # create", "ResponseCode Description 0x2000 Undefined 0x2001 OK 0x2002 General Error 0x2003", "session, first one for for commands, second for events self.session", "data) print(f\"Packet length: {data_length}\") while (data_length) > len(data): data +=", "= '' if self.param1 is not None: self.args = self.args", "Session Not Open 0x2004 Invalid TransactionID 0x2005 Operation Not Supported", "socket.error as message: if s: s.close() print(f\"Could not open socket:", "of PtpIpEvent objects if it got passd a data reply", "class PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype =", "0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805 GetObjectPropList", "= '' def data(self): return self.cmdtype class PtpIpEvent(object): \"\"\" EventCode", "0x1002 OpenSession 0x1003 CloseSession 0x1004 GetStorageIDs 0x1005 GetStorageInfo 0x1006 GetNumObjects", "self.session) self.session_events = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002 OpenSession", "pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdReq,", "+ struct.pack('L', self.param1) if self.param2 is not None: self.args =", "import socket import struct class PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\" def", "self.guid = guid.bytes self.hostname = socket.gethostname() + '\\x00' self.hostname =", "0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc", "the object if the reply is of type PtpIpInitCmdAck ptpip_packet_reply", "self.cmdtype = struct.pack('I', 0x02) if data is not None: self.session_id", "self.hostname.encode('utf-16-le') else: self.guid = data[0:16] self.hostname = data[16:0] def data(self):", "data=None): self.cmdtype = struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__() if data is", "= self.args + struct.pack('L', self.param1) if self.param2 is not None:", "while True: if len(self.cmd_queue) == 0: # do a ping", "data(self): return self.cmdtype class PtpIpEvent(object): \"\"\" EventCode Description 0x4001 CancelTransaction", "Store Read-Only 0x200F Access Denied 0x2010 No Thumbnail Present 0x2011", "is None: guid = uuid.uuid4() self.guid = guid.bytes self.hostname =", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length = struct.unpack('I', ptpip_packet_reply.length)[0]", "0x9803 GetObjectPropValue 0x9805 GetObjectPropList \"\"\" def __init__(self, data=None, cmd=None, param1=None,", "param4=None, param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype = struct.pack('I', 0x06) self.unkown =", "the data passed to the factory amount_of_events = struct.unpack('H', data[0:2])[0]", "def get_events(self): return self.events class PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\" def", "struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__() if data is not None: self.transaction_id", "= data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None):", "self.param1 = param1 self.param2 = param2 self.param3 = param3 self.param4", "True: if len(self.cmd_queue) == 0: # do a ping receive", "super(PtpIpCmdResponse, self).__init__() self.cmdtype = struct.pack('I', 0x07) if data is not", "session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd ==", "data[0:4] self.guid = data[4:20] self.hostname = data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring", "PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq):", "def send_data(self, data, session): session.send(struct.pack('I', len(data) + 4) + data)", "self.unkown + struct.pack('H', self.ptp_cmd) + \\ self.transaction_id + self.args class", "DeleteObject 0x100C SendObjectInfo 0x100D SendObject 0x100E InitiateCapture 0x100F FormatStore 0x1014", "ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket):", "param1 self.param2 = param2 self.param3 = param3 self.param4 = param4", "Unsupported 0x2015 No Valid ObjectInfo 0x2016 Invalid Code Format 0x2017", "passed from the data passed to the factory amount_of_events =", "data): super(PtpIpEventFactory, self).__init__() # create an empty array for the", "session): if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) # set the session", "0x2005 Operation Not Supported 0x2006 Parameter Not Supported 0x2007 Incomplete", "which consists of 4 bytes event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code,", "= param5 # Todo: Transaction ID generieren self.transaction_id = struct.pack('I',", "struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None):", "def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__() if", "self).__init__() self.cmdtype = struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\"", "len(self.cmd_queue) == 0: # do a ping receive a pong", "= PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet, session): # add", "self.param1 is not None: self.args = self.args + struct.pack('L', self.param1)", "GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016 SetDevicePropValue 0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1", "1) s.connect((host, port)) except socket.error as message: if s: s.close()", "self.cmdtype = struct.unpack('I', data[0:4])[0] if self.cmdtype == 1: return PtpIpInitCmdReq(data[4:])", "= PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(),", "9: return PtpIpStartDataPacket(data[4:]) elif self.cmdtype == 10: return PtpIpDataPacket(data[4:]) elif", "receive a pong (same as ping) as reply to keep", "4: return PtpIpEventAck(data[4:]) elif self.cmdtype == 5: return PtpIpInitFail(data[4:]) elif", "session) # set the session id of the object if", "= data[0:4] self.length = data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\"", "PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype", "__init__(self, data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype = struct.pack('I', 0x01) self.version =", "PtpIpEvent objects if it got passd a data reply from", "packets are processed/send to the camera time.sleep(1) pass def send_ptpip_cmd(self,", "+= session.recv(data_length - len(data)) return data[4:] class PtpIpPacket(object): \"\"\"docstring for", "PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None, session_id=None): super(PtpIpEventReq, self).__init__()", "class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype =", "self.ptp_cmd = cmd self.param1 = param1 self.param2 = param2 self.param3", "self.param2 is not None: self.args = self.args + struct.pack('L', self.param2)", "Parameter 0x201E Session Already Open 0x201F Transaction Cancelled 0x2020 Specification", "data) def recieve_data(self, session): data = session.recv(4) (data_length,) = struct.unpack('I',", "NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805", "Format 0x201C Invalid DeviceProp Value 0x201D Invalid Parameter 0x201E Session", "self.cmdtype == 12: return PtpIpEndDataPacket(data[4:]) elif self.cmdtype == 13: return", "Store Full 0x200D Object WriteProtected 0x200E Store Read-Only 0x200F Access", "port=15740): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host,", "self.args + struct.pack('L', self.param4) if self.param5 is not None: self.args", "if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length = struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "elif self.cmdtype == 13: return PtpIpPing(data[4:]) def data(self): pass class", "== 0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket):", "0x400C StorageInfoChanged 0x400D CaptureComplete 0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted", "as message: if s: s.close() print(f\"Could not open socket: {message}\")", "len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply", "GetObjectHandles 0x1008 GetObjectInfo 0x1009 GetObject 0x100A GetThumb 0x100B DeleteObject 0x100C", "== 0: # do a ping receive a pong (same", "Invalid Code Format 0x2017 Unknown Vendor Code 0x2018 Capture Already", "socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host, port)) except socket.error as message:", "= struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__() if data is not None:", "get to the next event_code and event_parameter pair offset =", "data is not None: self.transaction_id = data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\")", "\"\"\" def __init__(self, data=None, cmd=None, param1=None, param2=None, param3=None, param4=None, param5=None):", "+ self.hostname + self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def", "SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201 StartLiveView 0x9202 EndLiveView 0x9203", "GetDeviceInfo 0x1002 OpenSession 0x1003 CloseSession 0x1004 GetStorageIDs 0x1005 GetStorageInfo 0x1006", "type PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id =", "Already Terminated 0x2019 Device Busy 0x201A Invalid ParentObject 0x201B Invalid", "0x07) if data is not None: self.ptp_response_code = struct.unpack('H', data[0:2])[0]", "Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x13) super(PtpIpPing, self).__init__()", "self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length =", "None: guid = uuid.uuid4() self.guid = guid.bytes self.hostname = socket.gethostname()", "= self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue else: #", "get the amount of events passed from the data passed", "def communication_thread(self): while True: if len(self.cmd_queue) == 0: # do", "None self.session_id = None self.cmd_queue = [] self.event_queue = []", "PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpEventAck, self).__init__() self.cmdtype = struct.pack('I', 0x04)", "0x90C8 DeviceReady 0x90C9 SetPreWbData 0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData", "self.param5 is not None: self.args = self.args + struct.pack('L', self.param5)", "the amount of events passed from the data passed to", "object itself if it is not specified in the package", "\"\"\" def __init__(self, data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype = struct.pack('I', 0x07)", "bytes are already processed counter = 1 offset = 2", "\"\"\" def __init__(self, data): super(PtpIpEventFactory, self).__init__() # create an empty", "EventCode Description 0x4001 CancelTransaction 0x4002 ObjectAdded 0x4003 ObjectRemoved 0x4004 StoreAdded", "and \\ ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd send successfully\") else: print(f\"cmd", "for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitFail, self).__init__() self.cmdtype = struct.pack('I',", "host='192.168.1.1', port=15740): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)", "= counter + 1 def get_events(self): return self.events class PtpIpDataObject(object):", "Code Description 0x1001 GetDeviceInfo 0x1002 OpenSession 0x1003 CloseSession 0x1004 GetStorageIDs", "struct class PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\" def __init__(self): super(PtpIpConnection, self).__init__()", "propper PtpIpPing packet so i am querying the status #", "for commands, second for events self.session = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(),", "self.session_id = data[0:4] self.guid = data[4:20] self.hostname = data[20:] class", "a propper PtpIpPing packet so i am querying the status", "self.cmdtype + self.guid + self.hostname + self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring", "0x2018 Capture Already Terminated 0x2019 Device Busy 0x201A Invalid ParentObject", "param2 self.param3 = param3 self.param4 = param4 self.param5 = param5", "\"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype =", "is not None: self.args = self.args + struct.pack('L', self.param1) if", "cmd self.param1 = param1 self.param2 = param2 self.param3 = param3", "an empty array for the PtpIpEvent object which will be", "s def send_recieve_ptpip_packet(self, ptpip_packet, session): if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session)", "ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply,", "Open both session, first one for for commands, second for", "data_length == len(data): events = PtpIpEventFactory(data).get_events() for event in events:", "return PtpIpStartDataPacket(data[4:]) elif self.cmdtype == 10: return PtpIpDataPacket(data[4:]) elif self.cmdtype", "\"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x10)", "Invalid DeviceProp Value 0x201D Invalid Parameter 0x201E Session Already Open", "struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__() if data is not None: self.transaction_id", "self.session_events = None self.session_id = None self.cmd_queue = [] self.event_queue", "data=None, session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype = struct.pack('I', 0x03) self.session_id =", "not None: self.data = '' def data(self): return self.cmdtype class", "as reply to keep the connection alive # couldnt get", "0x2016 Invalid Code Format 0x2017 Unknown Vendor Code 0x2018 Capture", "def data(self): return self.cmdtype + self.guid + self.hostname + self.version", "1 def get_events(self): return self.events class PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\"", "is a factory to produce an array of PtpIpEvent objects", "Format Unsupported 0x2015 No Valid ObjectInfo 0x2016 Invalid Code Format", "class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description 0x2000 Undefined 0x2001 OK 0x2002", "0x100D SendObject 0x100E InitiateCapture 0x100F FormatStore 0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue", "two bytes are already processed counter = 1 offset =", "def send_recieve_ptpip_packet(self, ptpip_packet, session): if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) #", "0x200F Access Denied 0x2010 No Thumbnail Present 0x2011 SelfTest Failed", "== 4: return PtpIpEventAck(data[4:]) elif self.cmdtype == 5: return PtpIpInitFail(data[4:])", "import time import socket import struct class PtpIpConnection(object): \"\"\"docstring for", "0x1007 GetObjectHandles 0x1008 GetObjectInfo 0x1009 GetObject 0x100A GetThumb 0x100B DeleteObject", "self.session_events) # 0x1002 OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd,", "event_parameter pair offset = offset + 6 counter = counter", "0x1002 OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def", "= data[0:16] self.hostname = data[16:0] def data(self): return self.cmdtype +", "events self.session = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events = self.connect(host=host,", "EndLiveView 0x9203 GetLiveViewImage 0x9204 MfDrive 0x9205 ChangeAfArea 0x9206 AfDriveCancel 0x9207", "0x2012 Partial Deletion 0x2013 Store Not Available 0x2014 Specification By", "CloseSession 0x1004 GetStorageIDs 0x1005 GetStorageInfo 0x1006 GetNumObjects 0x1007 GetObjectHandles 0x1008", "an array of PtpIpEvent objects if it got passd a", "= [] self.object_queue = [] def open(self, host='192.168.1.1', port=15740): #", "Invalid DeviceProp Format 0x201C Invalid DeviceProp Value 0x201D Invalid Parameter", "ObjectHandle 0x200A DeviceProp Not Supported 0x200B Invalid ObjectFormatCode 0x200C Store", "+ struct.pack('L', self.param3) if self.param4 is not None: self.args =", "0x2001 OK 0x2002 General Error 0x2003 Session Not Open 0x2004", "return PtpIpCmdRequest(data[4:]) elif self.cmdtype == 7: return PtpIpCmdResponse(data[4:]) elif self.cmdtype", "= struct.unpack('H', data[0:2])[0] self.transaction_id = data[2:6] self.args = data[6:] class", "PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype = struct.pack('I', 0x01)", "# create an empty array for the PtpIpEvent object which", "if self.param3 is not None: self.args = self.args + struct.pack('L',", "= int(event_parameter) class PtpIpEventFactory(object): \"\"\" This is a factory to", "class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None, session_id=None): super(PtpIpEventReq,", "StoreFull 0x400C StorageInfoChanged 0x400D CaptureComplete 0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105", "elif self.cmdtype == 9: return PtpIpStartDataPacket(data[4:]) elif self.cmdtype == 10:", "\"\"\"docstring for PtpIP\"\"\" def __init__(self): super(PtpIpConnection, self).__init__() self.session = None", "data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data = data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring", "self).__init__() if data is not None: self.transaction_id = data[0:4] self.length", "__init__(self, data=None): self.cmdtype = struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__() if data", "self.cmdtype = struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__() if data is not", "0x1004 GetStorageIDs 0x1005 GetStorageInfo 0x1006 GetNumObjects 0x1007 GetObjectHandles 0x1008 GetObjectInfo", "Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__()", "def data(self): pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self,", "create an empty array for the PtpIpEvent object which will", "object which will be replied self.events = [] # get", "__init__(self, data=None): super(PtpIpInitFail, self).__init__() self.cmdtype = struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket):", "self.cmdtype == 13: return PtpIpPing(data[4:]) def data(self): pass class PtpIpInitCmdReq(PtpIpPacket):", "== 9: return PtpIpStartDataPacket(data[4:]) elif self.cmdtype == 10: return PtpIpDataPacket(data[4:])", "str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) # increase the offset by 6", "0x1005 GetStorageInfo 0x1006 GetNumObjects 0x1007 GetObjectHandles 0x1008 GetObjectInfo 0x1009 GetObject", "__init__(self, data=None): super(PtpIpEventAck, self).__init__() self.cmdtype = struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket):", "not open socket: {message}\") return s def send_recieve_ptpip_packet(self, ptpip_packet, session):", "\"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x12)", "def factory(self, data=None): if data is None: self.cmdtype = None", "already processed counter = 1 offset = 2 while counter", "self.transaction_id = data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self,", "GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B", "SetPreWbData 0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE", "+ ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1,", "0x0100) if data is None: guid = uuid.uuid4() self.guid =", "isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session)", "for events self.session = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events =", "ptpip_packet, session): if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) # set the", "message: if s: s.close() print(f\"Could not open socket: {message}\") return", "= param1 self.param2 = param2 self.param3 = param3 self.param4 =", "__init__(self, data=None): self.cmdtype = struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__() if data", "event_parameter which consists of 4 bytes event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0])", "data = ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket): data = data +", "\"\"\" def __init__(self, event_code, event_parameter): super(PtpIpEvent, self).__init__() self.event_code = int(event_code)", "else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def send_ptpip_event_req(self,", "is not None: self.args = self.args + struct.pack('L', self.param3) if", "Busy 0x201A Invalid ParentObject 0x201B Invalid DeviceProp Format 0x201C Invalid", "PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and", "class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdReq, self).__init__()", "return PtpIpInitCmdReq(data[4:]) elif self.cmdtype == 2: return PtpIpInitCmdAck(data[4:]) elif self.cmdtype", "which consists of two bytes event_code = str(struct.unpack('H', data[offset:offset+2])[0]) #", "0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\" def __init__(self, event_code, event_parameter): super(PtpIpEvent,", "\"\"\"docstring for PtpIpDataObject\"\"\" def __init__(self, object_handle, data): super(PtpIpDataObject, self).__init__() self.object_handle", "class PtpIpEventFactory(object): \"\"\" This is a factory to produce an", "No Valid ObjectInfo 0x2016 Invalid Code Format 0x2017 Unknown Vendor", "{message}\") return s def send_recieve_ptpip_packet(self, ptpip_packet, session): if isinstance(ptpip_packet, PtpIpInitCmdReq):", "4 bytes event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) # increase", "0x9203 GetLiveViewImage 0x9204 MfDrive 0x9205 ChangeAfArea 0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia", "= self.args + struct.pack('L', self.param2) if self.param3 is not None:", "not None: self.session_id = session_id def data(self): if self.session_id: return", "GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805 GetObjectPropList \"\"\" def __init__(self, data=None, cmd=None,", "0x920B EndMovieRec 0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408 GetTransferList", "class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype =", "else: print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype = struct.unpack('I', data[0:4])[0] if", "PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype = struct.pack('I', 0x02)", "send successfully\") else: print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\") # wait 1", "specified in the package if ptpip_packet.session_id is None: ptpip_packet.session_id =", "StartLiveView 0x9202 EndLiveView 0x9203 GetLiveViewImage 0x9204 MfDrive 0x9205 ChangeAfArea 0x9206", "send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1', port=15740): try: s =", "2: return PtpIpInitCmdAck(data[4:]) elif self.cmdtype == 3: return PtpIpEventReq(data[4:]) elif", "super(PtpIpCancelTransaction, self).__init__() if data is not None: self.transaction_id = data[0:4]", "objects if it got passd a data reply from a", "= 2 while counter <= amount_of_events: # get the event_code", "do a ping receive a pong (same as ping) as", "Supported 0x200B Invalid ObjectFormatCode 0x200C Store Full 0x200D Object WriteProtected", "ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply,", "self.hostname = data[16:0] def data(self): return self.cmdtype + self.guid +", "0x2006 Parameter Not Supported 0x2007 Incomplete Transfer 0x2008 Invalid StorageID", "struct.pack('I', 0x03) self.session_id = None if data is not None:", "self.version = struct.pack('>I', 0x0100) if data is None: guid =", "self.cmdtype == 9: return PtpIpStartDataPacket(data[4:]) elif self.cmdtype == 10: return", "= data[0:4] self.guid = data[4:20] self.hostname = data[20:] class PtpIpEventReq(PtpIpPacket):", "0x2014 Specification By Format Unsupported 0x2015 No Valid ObjectInfo 0x2016", "StorageInfoChanged 0x400D CaptureComplete 0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\"", "PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id", "Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__()", "self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd) + \\ self.transaction_id +", "self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdAck,", "0x400A StoreFull 0x400C StorageInfoChanged 0x400D CaptureComplete 0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram", "else: # get the next command from command the queue", "Operation Not Supported 0x2006 Parameter Not Supported 0x2007 Incomplete Transfer", "0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201 StartLiveView 0x9202 EndLiveView 0x9203 GetLiveViewImage", "self).__init__() self.cmdtype = struct.pack('I', 0x03) self.session_id = None if data", "two bytes event_code = str(struct.unpack('H', data[offset:offset+2])[0]) # get the event_parameter", "DeviceProp Not Supported 0x200B Invalid ObjectFormatCode 0x200C Store Full 0x200D", "\\ self.transaction_id + self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description 0x2000", "isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply =", "GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805 GetObjectPropList \"\"\" def __init__(self,", "param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self): while True: if len(self.cmd_queue)", "object_handle, data): super(PtpIpDataObject, self).__init__() self.object_handle = object_handle self.data = data", "+ 4) + data) def recieve_data(self, session): data = session.recv(4)", "host='192.168.1.1', port=15740): # Open both session, first one for for", "data[0:4] self.length = data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def", "Failed 0x2012 Partial Deletion 0x2013 Store Not Available 0x2014 Specification", "self.param2) if self.param3 is not None: self.args = self.args +", "for for commands, second for events self.session = self.connect(host=host, port=port)", "PtpIpStartDataPacket(data[4:]) elif self.cmdtype == 10: return PtpIpDataPacket(data[4:]) elif self.cmdtype ==", "+ struct.pack('L', self.param4) if self.param5 is not None: self.args =", "self.cmdtype class PtpIpEvent(object): \"\"\" EventCode Description 0x4001 CancelTransaction 0x4002 ObjectAdded", "0x90C7 GetEvent 0x90C8 DeviceReady 0x90C9 SetPreWbData 0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram", "Not Available 0x2014 Specification By Format Unsupported 0x2015 No Valid", "def connect(self, host='192.168.1.1', port=15740): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET,", "self.args = self.args + struct.pack('L', self.param3) if self.param4 is not", "communication_thread(self): while True: if len(self.cmd_queue) == 0: # do a", "PtpIpDataObject\"\"\" def __init__(self, object_handle, data): super(PtpIpDataObject, self).__init__() self.object_handle = object_handle", "0x01) self.version = struct.pack('>I', 0x0100) if data is None: guid", "def __init__(self, data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype = struct.pack('I', 0x01) self.version", "0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201 StartLiveView 0x9202 EndLiveView", "pong (same as ping) as reply to keep the connection", "self.transaction_id = data[2:6] self.args = data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for", "Not Supported 0x2007 Incomplete Transfer 0x2008 Invalid StorageID 0x2009 Invalid", "0x2003 Session Not Open 0x2004 Invalid TransactionID 0x2005 Operation Not", "Supported 0x2006 Parameter Not Supported 0x2007 Incomplete Transfer 0x2008 Invalid", "super(PtpIpDataPacket, self).__init__() if data is not None: self.transaction_id = data[0:4]", "in events: self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and", "GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4", "ParentObject 0x201B Invalid DeviceProp Format 0x201C Invalid DeviceProp Value 0x201D", "factory(self, data=None): if data is None: self.cmdtype = None else:", "PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitFail, self).__init__() self.cmdtype = struct.pack('I', 0x05)", "super(PtpIpInitFail, self).__init__() self.cmdtype = struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation", "OpenSession 0x1003 CloseSession 0x1004 GetStorageIDs 0x1005 GetStorageInfo 0x1006 GetNumObjects 0x1007", "def __init__(self, data=None): super(PtpIpInitFail, self).__init__() self.cmdtype = struct.pack('I', 0x05) class", "self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002", "for PtpIP\"\"\" def __init__(self): super(PtpIpConnection, self).__init__() self.session = None self.session_events", "0x2004 Invalid TransactionID 0x2005 Operation Not Supported 0x2006 Parameter Not", "is not specified in the package if ptpip_packet.session_id is None:", "0x2013 Store Not Available 0x2014 Specification By Format Unsupported 0x2015", "counter <= amount_of_events: # get the event_code which consists of", "0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability", "__init__(self): super(PtpIpPacket, self).__init__() def factory(self, data=None): if data is None:", "elif self.cmdtype == 3: return PtpIpEventReq(data[4:]) elif self.cmdtype == 4:", "self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self): while True: if len(self.cmd_queue) == 0:", "= int(event_code) self.event_parameter = int(event_parameter) class PtpIpEventFactory(object): \"\"\" This is", "0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram", "reply from a GetEvent request 0x90C7 \"\"\" def __init__(self, data):", "self.args = data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self,", "= self.args + struct.pack('L', self.param3) if self.param4 is not None:", "session): # add the session id of the object itself", "+ struct.pack('L', self.param5) def data(self): return self.cmdtype + self.unkown +", "Valid ObjectInfo 0x2016 Invalid Code Format 0x2017 Unknown Vendor Code", "struct.pack('I', 0x01) self.version = struct.pack('>I', 0x0100) if data is None:", "data is not None: self.transaction_id = data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring", "device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue", "if self.cmdtype == 1: return PtpIpInitCmdReq(data[4:]) elif self.cmdtype == 2:", "amount_of_events = struct.unpack('H', data[0:2])[0] # set an counter and an", "self.args = self.args + struct.pack('L', self.param2) if self.param3 is not", "not specified in the package if ptpip_packet.session_id is None: ptpip_packet.session_id", "0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue", "self.args = self.args + struct.pack('L', self.param5) def data(self): return self.cmdtype", "= data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None):", "PtpIpEvent object which will be replied self.events = [] #", "Not Supported 0x2006 Parameter Not Supported 0x2007 Incomplete Transfer 0x2008", "Device Busy 0x201A Invalid ParentObject 0x201B Invalid DeviceProp Format 0x201C", "increase the offset by 6 to get to the next", "PtpIpEventReq(data[4:]) elif self.cmdtype == 4: return PtpIpEventAck(data[4:]) elif self.cmdtype ==", "= self.args + struct.pack('L', self.param4) if self.param5 is not None:", "self.args = self.args + struct.pack('L', self.param1) if self.param2 is not", "s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host, port)) except socket.error as message: if", "self.events.append(PtpIpEvent(event_code, event_parameter)) # increase the offset by 6 to get", "am querying the status # of the device ptpip_packet_reply =", "= param2 self.param3 = param3 self.param4 = param4 self.param5 =", "self.transaction_id = data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data = data[4:] class", "PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(), session)", "= None self.session_events = None self.session_id = None self.cmd_queue =", "PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length = struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply =", "super(PtpIpInitCmdReq, self).__init__() self.cmdtype = struct.pack('I', 0x01) self.version = struct.pack('>I', 0x0100)", "set the session id of the object if the reply", "# add the session id of the object itself if", "self.param4 = param4 self.param5 = param5 # Todo: Transaction ID", "passd a data reply from a GetEvent request 0x90C7 \"\"\"", "2 while counter <= amount_of_events: # get the event_code which", "= uuid.uuid4() self.guid = guid.bytes self.hostname = socket.gethostname() + '\\x00'", "self.cmdtype = None else: print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype =", "GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802", "def __init__(self, data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype = struct.pack('I', 0x07) if", "def send_ptpip_event_req(self, ptpip_packet, session): # add the session id of", "PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self): while True: if", "= [] def open(self, host='192.168.1.1', port=15740): # Open both session,", "self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply =", "= struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__() if data is not None:", "so i am querying the status # of the device", "ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): events =", "object if the reply is of type PtpIpInitCmdAck ptpip_packet_reply =", "self.param4) if self.param5 is not None: self.args = self.args +", "return ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet, session): # add the session", "data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None, session_id=None):", "an offset of 2 as the first two bytes are", "self.events = [] # get the amount of events passed", "if the reply is of type PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "self.cmdtype == 4: return PtpIpEventAck(data[4:]) elif self.cmdtype == 5: return", "if it got passd a data reply from a GetEvent", "PtpIpEventFactory(object): \"\"\" This is a factory to produce an array", "factory to produce an array of PtpIpEvent objects if it", "ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif", "10: return PtpIpDataPacket(data[4:]) elif self.cmdtype == 12: return PtpIpEndDataPacket(data[4:]) elif", "= struct.pack('I', 0x01) self.version = struct.pack('>I', 0x0100) if data is", "struct.pack('L', self.param1) if self.param2 is not None: self.args = self.args", "SetDevicePropValue 0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2 ChangeCameraMode 0x90C3", "4) + data) def recieve_data(self, session): data = session.recv(4) (data_length,)", "0x2011 SelfTest Failed 0x2012 Partial Deletion 0x2013 Store Not Available", "if data is not None: self.transaction_id = data[0:4] self.length =", "Store Not Available 0x2014 Specification By Format Unsupported 0x2015 No", "+ data) def recieve_data(self, session): data = session.recv(4) (data_length,) =", "None: self.transaction_id = data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def", "continue else: # get the next command from command the", "param3=None, param4=None, param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype = struct.pack('I', 0x06) self.unkown", "data[0:16] self.hostname = data[16:0] def data(self): return self.cmdtype + self.guid", "PtpIpEventAck(data[4:]) elif self.cmdtype == 5: return PtpIpInitFail(data[4:]) elif self.cmdtype ==", "12: return PtpIpEndDataPacket(data[4:]) elif self.cmdtype == 13: return PtpIpPing(data[4:]) def", "EndMovieRec 0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408 GetTransferList 0x9409", "= struct.pack('I', 0x13) super(PtpIpPing, self).__init__() if data is not None:", "struct.pack('H', self.ptp_cmd) + \\ self.transaction_id + self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\"", "self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self): while True: if len(self.cmd_queue) ==", "No Thumbnail Present 0x2011 SelfTest Failed 0x2012 Partial Deletion 0x2013", "counter + 1 def get_events(self): return self.events class PtpIpDataObject(object): \"\"\"docstring", "elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet,", "return self.cmdtype + self.guid + self.hostname + self.version class PtpIpInitCmdAck(PtpIpPacket):", "if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue else: # get the next", "ptpip_packet, session): # add the session id of the object", "replied self.events = [] # get the amount of events", "len(data)) return data[4:] class PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\" def __init__(self):", "Transaction Cancelled 0x2020 Specification of Destination Unsupported \"\"\" def __init__(self,", "self.cmdtype = struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def", "0x2009 Invalid ObjectHandle 0x200A DeviceProp Not Supported 0x200B Invalid ObjectFormatCode", "class PtpIpEvent(object): \"\"\" EventCode Description 0x4001 CancelTransaction 0x4002 ObjectAdded 0x4003", "the next command from command the queue ptip_cmd = self.cmd_queue.pop()", "0x11) super(PtpIpCancelTransaction, self).__init__() if data is not None: self.transaction_id =", "DeviceProp Format 0x201C Invalid DeviceProp Value 0x201D Invalid Parameter 0x201E", "PtpIpEvent(object): \"\"\" EventCode Description 0x4001 CancelTransaction 0x4002 ObjectAdded 0x4003 ObjectRemoved", "event_code, event_parameter): super(PtpIpEvent, self).__init__() self.event_code = int(event_code) self.event_parameter = int(event_parameter)", "offset by 6 to get to the next event_code and", "Partial Deletion 0x2013 Store Not Available 0x2014 Specification By Format", "self.ptp_cmd) + \\ self.transaction_id + self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode", "0x13) super(PtpIpPing, self).__init__() if data is not None: self.data =", "PtpIpInitCmdReq(data[4:]) elif self.cmdtype == 2: return PtpIpInitCmdAck(data[4:]) elif self.cmdtype ==", "= str(struct.unpack('H', data[offset:offset+2])[0]) # get the event_parameter which consists of", "param2=None, param3=None, param4=None, param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype = struct.pack('I', 0x06)", "Parameter Not Supported 0x2007 Incomplete Transfer 0x2008 Invalid StorageID 0x2009", "if data_length == len(data): events = PtpIpEventFactory(data).get_events() for event in", "self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpEventAck,", "data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype = struct.pack('I', 0x02) if data is", "PtpIP\"\"\" def __init__(self): super(PtpIpConnection, self).__init__() self.session = None self.session_events =", "print(\"Cmd send successfully\") else: print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\") # wait", "return self.cmdtype + self.session_id return self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for", "data is not None: self.session_id = data[0:4] elif session_id is", "= session_id def data(self): if self.session_id: return self.cmdtype + self.session_id", "def __init__(self, data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype = struct.pack('I', 0x02) if", "self.cmdtype == 1: return PtpIpInitCmdReq(data[4:]) elif self.cmdtype == 2: return", "uuid.uuid4() self.guid = guid.bytes self.hostname = socket.gethostname() + '\\x00' self.hostname", "0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016 SetDevicePropValue 0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram", "for PtpIpInitCmd\"\"\" def __init__(self, data=None, session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype =", "0x2007 Incomplete Transfer 0x2008 Invalid StorageID 0x2009 Invalid ObjectHandle 0x200A", "Not Supported 0x200B Invalid ObjectFormatCode 0x200C Store Full 0x200D Object", "if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet,", "# couldnt get any reply onto a propper PtpIpPing packet", "data[0:4] elif session_id is not None: self.session_id = session_id def", "self.transaction_id = struct.pack('I', 0x06) self.args = '' if self.param1 is", "self.hostname = data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self,", "0x9202 EndLiveView 0x9203 GetLiveViewImage 0x9204 MfDrive 0x9205 ChangeAfArea 0x9206 AfDriveCancel", "port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) #", "self.param1) if self.param2 is not None: self.args = self.args +", "print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data = data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring for", "0x4009 RequestObjectTransfer 0x400A StoreFull 0x400C StorageInfoChanged 0x400D CaptureComplete 0xC101 ObjectAddedInSdram", "self.transaction_id + self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description 0x2000 Undefined", "add the session id of the object itself if it", "= self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002 OpenSession ptip_cmd =", "got passd a data reply from a GetEvent request 0x90C7", "0x2001 and \\ ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd send successfully\") else:", "return s def send_recieve_ptpip_packet(self, ptpip_packet, session): if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(),", "0x2008 Invalid StorageID 0x2009 Invalid ObjectHandle 0x200A DeviceProp Not Supported", "Specification of Destination Unsupported \"\"\" def __init__(self, data=None): super(PtpIpCmdResponse, self).__init__()", "\"\"\"docstring for PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket, self).__init__() def factory(self, data=None):", "{struct.unpack('I', self.transaction_id)[0]}\") self.data = data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\"", "is not None: self.transaction_id = data[0:4] self.data = data[4:] class", "self.session_id = None self.cmd_queue = [] self.event_queue = [] self.object_queue", "CaptureComplete 0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\" def __init__(self,", "s.close() print(f\"Could not open socket: {message}\") return s def send_recieve_ptpip_packet(self,", "PtpIpCmdResponse(data[4:]) elif self.cmdtype == 9: return PtpIpStartDataPacket(data[4:]) elif self.cmdtype ==", "self.cmdtype == 6: return PtpIpCmdRequest(data[4:]) elif self.cmdtype == 7: return", "if s: s.close() print(f\"Could not open socket: {message}\") return s", "Read-Only 0x200F Access Denied 0x2010 No Thumbnail Present 0x2011 SelfTest", "if self.session_id: return self.cmdtype + self.session_id return self.cmdtype class PtpIpEventAck(PtpIpPacket):", "# get the event_code which consists of two bytes event_code", "command from command the queue ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply =", "self.cmdtype == 5: return PtpIpInitFail(data[4:]) elif self.cmdtype == 6: return", "0x90C7 \"\"\" def __init__(self, data): super(PtpIpEventFactory, self).__init__() # create an", "InitiateCapture 0x100F FormatStore 0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016 SetDevicePropValue 0x101B", "\"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x11)", "By Format Unsupported 0x2015 No Valid ObjectInfo 0x2016 Invalid Code", "0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitFail,", "Supported 0x2007 Incomplete Transfer 0x2008 Invalid StorageID 0x2009 Invalid ObjectHandle", "Specification By Format Unsupported 0x2015 No Valid ObjectInfo 0x2016 Invalid", "return PtpIpInitFail(data[4:]) elif self.cmdtype == 6: return PtpIpCmdRequest(data[4:]) elif self.cmdtype", "struct.unpack('H', data[0:2])[0] # set an counter and an offset of", "ping receive a pong (same as ping) as reply to", "GetPicCtrlCapability 0x9201 StartLiveView 0x9202 EndLiveView 0x9203 GetLiveViewImage 0x9204 MfDrive 0x9205", "wait 1 second before new packets are processed/send to the", "== 12: return PtpIpEndDataPacket(data[4:]) elif self.cmdtype == 13: return PtpIpPing(data[4:])", "super(PtpIpStartDataPacket, self).__init__() if data is not None: self.transaction_id = data[0:4]", "PtpIpPing(data[4:]) def data(self): pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def", "= data[2:6] self.args = data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\"", "length: {data_length}\") while (data_length) > len(data): data += session.recv(data_length -", "'' if self.param1 is not None: self.args = self.args +", "struct.pack('I', 0x06) self.args = '' if self.param1 is not None:", "[] self.object_queue = [] def open(self, host='192.168.1.1', port=15740): # Open", "isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue else: # get the next command", "querying the status # of the device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8),", "self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code == 0x2001 and \\ ptpip_packet_reply.ptp_response_code ==", "0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A StoreFull 0x400C StorageInfoChanged 0x400D CaptureComplete", "self.cmd_queue = [] self.event_queue = [] self.object_queue = [] def", "0x201B Invalid DeviceProp Format 0x201C Invalid DeviceProp Value 0x201D Invalid", "for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x09) super(PtpIpStartDataPacket,", "ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self): while", "the status # of the device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session)", "0x10) super(PtpIpDataPacket, self).__init__() if data is not None: self.transaction_id =", "for event in events: self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet,", "struct.pack('>I', 0x0100) if data is None: guid = uuid.uuid4() self.guid", "connection alive # couldnt get any reply onto a propper", "0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B EndMovieRec 0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed", "PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I',", "== 3: return PtpIpEventReq(data[4:]) elif self.cmdtype == 4: return PtpIpEventAck(data[4:])", "data is None: guid = uuid.uuid4() self.guid = guid.bytes self.hostname", "PtpIpEndDataPacket(data[4:]) elif self.cmdtype == 13: return PtpIpPing(data[4:]) def data(self): pass", "next command from command the queue ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply", "s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host, port)) except", "a data reply from a GetEvent request 0x90C7 \"\"\" def", "super(PtpIpInitCmdAck, self).__init__() self.cmdtype = struct.pack('I', 0x02) if data is not", "= struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__() if data is not None:", "the event_code which consists of two bytes event_code = str(struct.unpack('H',", "time import socket import struct class PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\"", "PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\" def __init__(self): super(PtpIpConnection, self).__init__() self.session =", "except socket.error as message: if s: s.close() print(f\"Could not open", "consists of 4 bytes event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter))", "= struct.pack('I', 0x03) self.session_id = None if data is not", "any reply onto a propper PtpIpPing packet so i am", "0x9205 ChangeAfArea 0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard", "__init__(self, data=None): self.cmdtype = struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__() if data", "Already Open 0x201F Transaction Cancelled 0x2020 Specification of Destination Unsupported", "connect(self, host='192.168.1.1', port=15740): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE,", "6 to get to the next event_code and event_parameter pair", "next event_code and event_parameter pair offset = offset + 6", "i am querying the status # of the device ptpip_packet_reply", "= self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code == 0x2001", "ObjectAdded 0x4003 ObjectRemoved 0x4004 StoreAdded 0x4005 StoreRemoved 0x4006 DevicePropChanged 0x4007", "events passed from the data passed to the factory amount_of_events", "0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl", "= struct.unpack('H', data[0:2])[0] # set an counter and an offset", "self.args = self.args + struct.pack('L', self.param4) if self.param5 is not", "return PtpIpEndDataPacket(data[4:]) elif self.cmdtype == 13: return PtpIpPing(data[4:]) def data(self):", "import uuid import time import socket import struct class PtpIpConnection(object):", "self.cmdtype = struct.pack('I', 0x06) self.unkown = struct.pack('I', 0x01) self.ptp_cmd =", "0x90CC GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201 StartLiveView", "None: self.transaction_id = data[0:4] self.data = data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring", "data[4:20] self.hostname = data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def", "class PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket, self).__init__() def", "data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype", "self.hostname = socket.gethostname() + '\\x00' self.hostname = self.hostname.encode('utf-16-le') else: self.guid", "isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply =", "elif self.cmdtype == 6: return PtpIpCmdRequest(data[4:]) elif self.cmdtype == 7:", "to the next event_code and event_parameter pair offset = offset", "5: return PtpIpInitFail(data[4:]) elif self.cmdtype == 6: return PtpIpCmdRequest(data[4:]) elif", "self.param3 = param3 self.param4 = param4 self.param5 = param5 #", "PtpIpInitFail(data[4:]) elif self.cmdtype == 6: return PtpIpCmdRequest(data[4:]) elif self.cmdtype ==", "is not None: self.session_id = data[0:4] elif session_id is not", "data is not None: self.session_id = data[0:4] self.guid = data[4:20]", "self.cmdtype == 10: return PtpIpDataPacket(data[4:]) elif self.cmdtype == 12: return", "processed/send to the camera time.sleep(1) pass def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet)", "= str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) # increase the offset by", "0x200E Store Read-Only 0x200F Access Denied 0x2010 No Thumbnail Present", "General Error 0x2003 Session Not Open 0x2004 Invalid TransactionID 0x2005", "DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A StoreFull 0x400C StorageInfoChanged 0x400D CaptureComplete 0xC101", "alive # couldnt get any reply onto a propper PtpIpPing", "0x06) self.args = '' if self.param1 is not None: self.args", "is of type PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck):", "PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I',", "None: self.session_id = session_id def data(self): if self.session_id: return self.cmdtype", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009:", "session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype = struct.pack('I', 0x03) self.session_id = None", "None: self.args = self.args + struct.pack('L', self.param4) if self.param5 is", "not None: self.ptp_response_code = struct.unpack('H', data[0:2])[0] self.transaction_id = data[2:6] self.args", "is not None: self.transaction_id = data[0:4] self.length = data[4:8] class", "self.cmdtype = struct.pack('I', 0x03) self.session_id = None if data is", "self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue else: # get the", "== 5: return PtpIpInitFail(data[4:]) elif self.cmdtype == 6: return PtpIpCmdRequest(data[4:])", "not None: self.session_id = data[0:4] elif session_id is not None:", "None: self.transaction_id = data[0:4] self.length = data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring", "param1=None, param2=None, param3=None, param4=None, param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype = struct.pack('I',", "{data_length}\") while (data_length) > len(data): data += session.recv(data_length - len(data))", "events = PtpIpEventFactory(data).get_events() for event in events: self.event_queue.append(event) ptpip_packet_reply =", "# Todo: Transaction ID generieren self.transaction_id = struct.pack('I', 0x06) self.args", "PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "self.cmdtype = struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__() if data is not", "PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet, session): # add the", "(data_length) > len(data): data += session.recv(data_length - len(data)) return data[4:]", "Format 0x2017 Unknown Vendor Code 0x2018 Capture Already Terminated 0x2019", "import struct class PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\" def __init__(self): super(PtpIpConnection,", "self.event_code = int(event_code) self.event_parameter = int(event_parameter) class PtpIpEventFactory(object): \"\"\" This", "0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported", "PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitFail, self).__init__() self.cmdtype", "0x201D Invalid Parameter 0x201E Session Already Open 0x201F Transaction Cancelled", "self.cmdtype == 7: return PtpIpCmdResponse(data[4:]) elif self.cmdtype == 9: return", "0x200A DeviceProp Not Supported 0x200B Invalid ObjectFormatCode 0x200C Store Full", "def __init__(self, data=None): super(PtpIpEventAck, self).__init__() self.cmdtype = struct.pack('I', 0x04) class", "int(event_parameter) class PtpIpEventFactory(object): \"\"\" This is a factory to produce", "ObjectInfo 0x2016 Invalid Code Format 0x2017 Unknown Vendor Code 0x2018", "ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7 GetEvent 0x90C8 DeviceReady 0x90C9", "Description 0x2000 Undefined 0x2001 OK 0x2002 General Error 0x2003 Session", "it is not specified in the package if ptpip_packet.session_id is", "id of the object if the reply is of type", "data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) # increase the offset by 6 to", "data=None, cmd=None, param1=None, param2=None, param3=None, param4=None, param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype", "PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket, self).__init__() def factory(self, data=None): if data", "PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I',", "for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpEventAck, self).__init__() self.cmdtype = struct.pack('I',", "data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype", "self.param3 is not None: self.args = self.args + struct.pack('L', self.param3)", "get the next command from command the queue ptip_cmd =", "0x400D CaptureComplete 0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\" def", "+ self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None):", "print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype = struct.unpack('I', data[0:4])[0] if self.cmdtype", "0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length", "self.send_data(ptpip_packet.data(), session) def send_data(self, data, session): session.send(struct.pack('I', len(data) + 4)", "event_code and event_parameter pair offset = offset + 6 counter", "== 10: return PtpIpDataPacket(data[4:]) elif self.cmdtype == 12: return PtpIpEndDataPacket(data[4:])", "Value 0x201D Invalid Parameter 0x201E Session Already Open 0x201F Transaction", "event_parameter): super(PtpIpEvent, self).__init__() self.event_code = int(event_code) self.event_parameter = int(event_parameter) class", "data[16:0] def data(self): return self.cmdtype + self.guid + self.hostname +", "== 2: return PtpIpInitCmdAck(data[4:]) elif self.cmdtype == 3: return PtpIpEventReq(data[4:])", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply", "self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002 OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002,", "= guid.bytes self.hostname = socket.gethostname() + '\\x00' self.hostname = self.hostname.encode('utf-16-le')", "== 13: return PtpIpPing(data[4:]) def data(self): pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring", "of 4 bytes event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) #", "self.session) def communication_thread(self): while True: if len(self.cmd_queue) == 0: #", "def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__() if", "0x4006 DevicePropChanged 0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A StoreFull", "self.length = data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self,", "0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805 GetObjectPropList \"\"\" def", "second before new packets are processed/send to the camera time.sleep(1)", "def __init__(self, event_code, event_parameter): super(PtpIpEvent, self).__init__() self.event_code = int(event_code) self.event_parameter", "ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket): data = data + ptpip_packet_reply.data ptpip_packet_reply", "data(self): if self.session_id: return self.cmdtype + self.session_id return self.cmdtype class", "0x9204 MfDrive 0x9205 ChangeAfArea 0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs", "0x2019 Device Busy 0x201A Invalid ParentObject 0x201B Invalid DeviceProp Format", "+ self.session_id return self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def", "= struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code Description 0x1001", "self).__init__() self.cmdtype = struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code", "data = data + ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7:", "6: return PtpIpCmdRequest(data[4:]) elif self.cmdtype == 7: return PtpIpCmdResponse(data[4:]) elif", "produce an array of PtpIpEvent objects if it got passd", "elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply", "13: return PtpIpPing(data[4:]) def data(self): pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for", "Open 0x2004 Invalid TransactionID 0x2005 Operation Not Supported 0x2006 Parameter", "[] self.event_queue = [] self.object_queue = [] def open(self, host='192.168.1.1',", "Full 0x200D Object WriteProtected 0x200E Store Read-Only 0x200F Access Denied", "passed to the factory amount_of_events = struct.unpack('H', data[0:2])[0] # set", "0x12) super(PtpIpEndDataPacket, self).__init__() if data is not None: self.transaction_id =", "0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd", "try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host, port))", "if self.param4 is not None: self.args = self.args + struct.pack('L',", "elif self.cmdtype == 5: return PtpIpInitFail(data[4:]) elif self.cmdtype == 6:", "class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code Description 0x1001 GetDeviceInfo 0x1002 OpenSession", "1 second before new packets are processed/send to the camera", "self).__init__() self.cmdtype = struct.pack('I', 0x06) self.unkown = struct.pack('I', 0x01) self.ptp_cmd", "ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\" def __init__(self, event_code, event_parameter):", "is not None: self.transaction_id = data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for", "self).__init__() if data is not None: self.data = '' def", "FormatStore 0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016 SetDevicePropValue 0x101B GetPartialObject 0x90C0", "the PtpIpEvent object which will be replied self.events = []", "data is not None: self.ptp_response_code = struct.unpack('H', data[0:2])[0] self.transaction_id =", "1: return PtpIpInitCmdReq(data[4:]) elif self.cmdtype == 2: return PtpIpInitCmdAck(data[4:]) elif", "elif self.cmdtype == 12: return PtpIpEndDataPacket(data[4:]) elif self.cmdtype == 13:", "the camera time.sleep(1) pass def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self,", "# of the device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply,", "= data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None,", "reply to keep the connection alive # couldnt get any", "class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitFail, self).__init__()", "= data[0:4] self.data = data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\"", "if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) # set the session id", "0x9201 StartLiveView 0x9202 EndLiveView 0x9203 GetLiveViewImage 0x9204 MfDrive 0x9205 ChangeAfArea", "events: self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd", "== len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session)", "[] # get the amount of events passed from the", "0: # do a ping receive a pong (same as", "= struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data while", "0x4003 ObjectRemoved 0x4004 StoreAdded 0x4005 StoreRemoved 0x4006 DevicePropChanged 0x4007 ObjectInfoChanged", "0x200B Invalid ObjectFormatCode 0x200C Store Full 0x200D Object WriteProtected 0x200E", "NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803", "struct.pack('I', 0x02) if data is not None: self.session_id = data[0:4]", "ObjectRemoved 0x4004 StoreAdded 0x4005 StoreRemoved 0x4006 DevicePropChanged 0x4007 ObjectInfoChanged 0x4008", "event in events: self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest)", "def data(self): return self.cmdtype class PtpIpEvent(object): \"\"\" EventCode Description 0x4001", "GetStorageIDs 0x1005 GetStorageInfo 0x1006 GetNumObjects 0x1007 GetObjectHandles 0x1008 GetObjectInfo 0x1009", "self.guid = data[0:16] self.hostname = data[16:0] def data(self): return self.cmdtype", "self.events class PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\" def __init__(self, object_handle, data):", "0x2017 Unknown Vendor Code 0x2018 Capture Already Terminated 0x2019 Device", "both session, first one for for commands, second for events", "def __init__(self, data=None, session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype = struct.pack('I', 0x03)", "time.sleep(1) pass def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1', port=15740):", "1 offset = 2 while counter <= amount_of_events: # get", "str(struct.unpack('H', data[offset:offset+2])[0]) # get the event_parameter which consists of 4", "Transfer 0x2008 Invalid StorageID 0x2009 Invalid ObjectHandle 0x200A DeviceProp Not", "PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply", "self.object_queue = [] def open(self, host='192.168.1.1', port=15740): # Open both", "guid.bytes self.hostname = socket.gethostname() + '\\x00' self.hostname = self.hostname.encode('utf-16-le') else:", "uuid import time import socket import struct class PtpIpConnection(object): \"\"\"docstring", "s.connect((host, port)) except socket.error as message: if s: s.close() print(f\"Could", "the offset by 6 to get to the next event_code", "session.send(struct.pack('I', len(data) + 4) + data) def recieve_data(self, session): data", "PtpIpDataPacket(data[4:]) elif self.cmdtype == 12: return PtpIpEndDataPacket(data[4:]) elif self.cmdtype ==", "self.session_id = ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply =", "self.session_id = data[0:4] elif session_id is not None: self.session_id =", "0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805 GetObjectPropList \"\"\" def __init__(self, data=None,", "open(self, host='192.168.1.1', port=15740): # Open both session, first one for", "= struct.pack('>I', 0x0100) if data is None: guid = uuid.uuid4()", "'' def data(self): return self.cmdtype class PtpIpEvent(object): \"\"\" EventCode Description", "self.session = None self.session_events = None self.session_id = None self.cmd_queue", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host, port)) except socket.error as", "MfDrive 0x9205 ChangeAfArea 0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A", "0x90CF GetPicCtrlCapability 0x9201 StartLiveView 0x9202 EndLiveView 0x9203 GetLiveViewImage 0x9204 MfDrive", "self.ptp_response_code = struct.unpack('H', data[0:2])[0] self.transaction_id = data[2:6] self.args = data[6:]", "{struct.unpack('I', data[0:4])[0]}\") self.cmdtype = struct.unpack('I', data[0:4])[0] if self.cmdtype == 1:", "struct.unpack('I', data[0:4])[0] if self.cmdtype == 1: return PtpIpInitCmdReq(data[4:]) elif self.cmdtype", "+ self.guid + self.hostname + self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for", "self.session_id = session_id def data(self): if self.session_id: return self.cmdtype +", "self.session = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events = self.connect(host=host, port=port)", "event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) # increase the offset", "+ struct.pack('L', self.param2) if self.param3 is not None: self.args =", "DeviceProp Value 0x201D Invalid Parameter 0x201E Session Already Open 0x201F", "PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): events = PtpIpEventFactory(data).get_events() for event", "amount of events passed from the data passed to the", "class PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\" def __init__(self, object_handle, data): super(PtpIpDataObject,", "session id of the object if the reply is of", "class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype =", "def __init__(self, object_handle, data): super(PtpIpDataObject, self).__init__() self.object_handle = object_handle self.data", "SetTransferListLock 0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject 0x9801", "__init__(self): super(PtpIpConnection, self).__init__() self.session = None self.session_events = None self.session_id", "of Destination Unsupported \"\"\" def __init__(self, data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype", "not None: self.session_id = data[0:4] self.guid = data[4:20] self.hostname =", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id elif", "GetNumObjects 0x1007 GetObjectHandles 0x1008 GetObjectInfo 0x1009 GetObject 0x100A GetThumb 0x100B", "int(event_code) self.event_parameter = int(event_parameter) class PtpIpEventFactory(object): \"\"\" This is a", "0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code Description 0x1001 GetDeviceInfo 0x1002", "the queue ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session) if", "is not None: self.ptp_response_code = struct.unpack('H', data[0:2])[0] self.transaction_id = data[2:6]", "Invalid ParentObject 0x201B Invalid DeviceProp Format 0x201C Invalid DeviceProp Value", "data=None): self.cmdtype = struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__() if data is", "= PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): events = PtpIpEventFactory(data).get_events() for", "command the queue ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session)", "data is None: self.cmdtype = None else: print(f\"Cmd Type: {struct.unpack('I',", "class PtpIpConnection(object): \"\"\"docstring for PtpIP\"\"\" def __init__(self): super(PtpIpConnection, self).__init__() self.session", "0x90C9 SetPreWbData 0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD SetPicCtrlData", "self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description 0x2000 Undefined 0x2001 OK", "self.session_events = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002 OpenSession ptip_cmd", "0x02) if data is not None: self.session_id = data[0:4] self.guid", "session_id is not None: self.session_id = session_id def data(self): if", "= cmd self.param1 = param1 self.param2 = param2 self.param3 =", "offset of 2 as the first two bytes are already", "__init__(self, data=None, cmd=None, param1=None, param2=None, param3=None, param4=None, param5=None): super(PtpIpCmdRequest, self).__init__()", "def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x13) super(PtpIpPing, self).__init__() if", "0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B EndMovieRec 0x920C TerminateCapture", "= 1 offset = 2 while counter <= amount_of_events: #", "\"\"\" ResponseCode Description 0x2000 Undefined 0x2001 OK 0x2002 General Error", "while isinstance(ptpip_packet_reply, PtpIpDataPacket): data = data + ptpip_packet_reply.data ptpip_packet_reply =", "data[0:2])[0] # set an counter and an offset of 2", "Vendor Code 0x2018 Capture Already Terminated 0x2019 Device Busy 0x201A", "self.data = '' def data(self): return self.cmdtype class PtpIpEvent(object): \"\"\"", "self.cmdtype = struct.pack('I', 0x01) self.version = struct.pack('>I', 0x0100) if data", "if data is None: guid = uuid.uuid4() self.guid = guid.bytes", "WriteProtected 0x200E Store Read-Only 0x200F Access Denied 0x2010 No Thumbnail", "= PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply =", "GetEvent request 0x90C7 \"\"\" def __init__(self, data): super(PtpIpEventFactory, self).__init__() #", "set an counter and an offset of 2 as the", "== 6: return PtpIpCmdRequest(data[4:]) elif self.cmdtype == 7: return PtpIpCmdResponse(data[4:])", "0x90C1 AfDrive 0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7 GetEvent", "None: self.args = self.args + struct.pack('L', self.param2) if self.param3 is", "PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code Description 0x1001 GetDeviceInfo 0x1002 OpenSession 0x1003", "struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data while isinstance(ptpip_packet_reply,", "0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7 GetEvent 0x90C8 DeviceReady 0x90C9 SetPreWbData", "DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7 GetEvent 0x90C8 DeviceReady 0x90C9 SetPreWbData 0x90CA", "get the event_code which consists of two bytes event_code =", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) s.connect((host, port)) except socket.error", "data + ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data):", "queue ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code", "__init__(self, data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype = struct.pack('I', 0x02) if data", "of the object itself if it is not specified in", "def recieve_data(self, session): data = session.recv(4) (data_length,) = struct.unpack('I', data)", "PtpIpCmdResponse): time.sleep(1) continue else: # get the next command from", "return self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None):", "Not Open 0x2004 Invalid TransactionID 0x2005 Operation Not Supported 0x2006", "struct.unpack('I', data) print(f\"Packet length: {data_length}\") while (data_length) > len(data): data", "PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description 0x2000 Undefined 0x2001 OK 0x2002 General", "= data[16:0] def data(self): return self.cmdtype + self.guid + self.hostname", "self).__init__() if data is not None: self.transaction_id = data[0:4] class", "from a GetEvent request 0x90C7 \"\"\" def __init__(self, data): super(PtpIpEventFactory,", "PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) # set the session id of the", "= data + ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length ==", "ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code == 0x2001 and \\", "= struct.pack('I', 0x06) self.args = '' if self.param1 is not", "if data is not None: self.session_id = data[0:4] self.guid =", "__init__(self, data=None, session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype = struct.pack('I', 0x03) self.session_id", "not None: self.args = self.args + struct.pack('L', self.param5) def data(self):", "for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x13) super(PtpIpPing,", "and ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if", "port)) except socket.error as message: if s: s.close() print(f\"Could not", "Invalid TransactionID 0x2005 Operation Not Supported 0x2006 Parameter Not Supported", "0xC101 ObjectAddedInSdram 0xC102 CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\" def __init__(self, event_code,", "port=15740): # Open both session, first one for for commands,", "id of the object itself if it is not specified", "counter = 1 offset = 2 while counter <= amount_of_events:", "data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(),", "return PtpIpCmdResponse(data[4:]) elif self.cmdtype == 9: return PtpIpStartDataPacket(data[4:]) elif self.cmdtype", "data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "> len(data): data += session.recv(data_length - len(data)) return data[4:] class", "\"\"\" This is a factory to produce an array of", "self).__init__() self.cmdtype = struct.pack('I', 0x07) if data is not None:", "Invalid Parameter 0x201E Session Already Open 0x201F Transaction Cancelled 0x2020", "couldnt get any reply onto a propper PtpIpPing packet so", "+ 1 def get_events(self): return self.events class PtpIpDataObject(object): \"\"\"docstring for", "\\ ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd send successfully\") else: print(f\"cmd reply", "+ '\\x00' self.hostname = self.hostname.encode('utf-16-le') else: self.guid = data[0:16] self.hostname", "self.param5) def data(self): return self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd)", "struct.pack('L', self.param3) if self.param4 is not None: self.args = self.args", "while counter <= amount_of_events: # get the event_code which consists", "self.session_id return self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self,", "and ptpip_packet.ptp_cmd == 0x1009: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if", "return PtpIpInitCmdAck(data[4:]) elif self.cmdtype == 3: return PtpIpEventReq(data[4:]) elif self.cmdtype", "== 1: return PtpIpInitCmdReq(data[4:]) elif self.cmdtype == 2: return PtpIpInitCmdAck(data[4:])", "data[0:4] self.data = data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def", "self.cmdtype = struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\" Operation Code Description", "SendObjectInfo 0x100D SendObject 0x100E InitiateCapture 0x100F FormatStore 0x1014 GetDevicePropDesc 0x1015", "PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "None self.session_events = None self.session_id = None self.cmd_queue = []", "0x201C Invalid DeviceProp Value 0x201D Invalid Parameter 0x201E Session Already", "def __init__(self, data): super(PtpIpEventFactory, self).__init__() # create an empty array", "if ptpip_packet.session_id is None: ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(), session) def", "= data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None):", "data=None): super(PtpIpInitFail, self).__init__() self.cmdtype = struct.pack('I', 0x05) class PtpIpCmdRequest(PtpIpPacket): \"\"\"", "self.args + struct.pack('L', self.param1) if self.param2 is not None: self.args", "self.cmdtype + self.session_id return self.cmdtype class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\"", "(same as ping) as reply to keep the connection alive", "guid = uuid.uuid4() self.guid = guid.bytes self.hostname = socket.gethostname() +", "Cancelled 0x2020 Specification of Destination Unsupported \"\"\" def __init__(self, data=None):", "__init__(self, data=None): self.cmdtype = struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__() if data", "= None self.cmd_queue = [] self.event_queue = [] self.object_queue =", "0x1009 GetObject 0x100A GetThumb 0x100B DeleteObject 0x100C SendObjectInfo 0x100D SendObject", "one for for commands, second for events self.session = self.connect(host=host,", "+ self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description 0x2000 Undefined 0x2001", "is not None: self.transaction_id = data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data", "= [] self.event_queue = [] self.object_queue = [] def open(self,", "__init__(self, data=None): self.cmdtype = struct.pack('I', 0x13) super(PtpIpPing, self).__init__() if data", "if data is not None: self.data = '' def data(self):", "PtpIpInitCmd\"\"\" def __init__(self, data=None, session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype = struct.pack('I',", "self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002 OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0])", "session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet, session):", "bytes event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0]) self.events.append(PtpIpEvent(event_code, event_parameter)) # increase the", "not None: self.args = self.args + struct.pack('L', self.param2) if self.param3", "# set the session id of the object if the", "onto a propper PtpIpPing packet so i am querying the", "0x4002 ObjectAdded 0x4003 ObjectRemoved 0x4004 StoreAdded 0x4005 StoreRemoved 0x4006 DevicePropChanged", "GetObjectInfo 0x1009 GetObject 0x100A GetThumb 0x100B DeleteObject 0x100C SendObjectInfo 0x100D", "TransactionID 0x2005 Operation Not Supported 0x2006 Parameter Not Supported 0x2007", "\"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpEventAck, self).__init__() self.cmdtype =", "GetObjectPropList \"\"\" def __init__(self, data=None, cmd=None, param1=None, param2=None, param3=None, param4=None,", "get the event_parameter which consists of 4 bytes event_parameter =", "ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A StoreFull 0x400C StorageInfoChanged 0x400D", "are already processed counter = 1 offset = 2 while", "+ self.unkown + struct.pack('H', self.ptp_cmd) + \\ self.transaction_id + self.args", "\"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None, session_id=None): super(PtpIpEventReq, self).__init__() self.cmdtype", "= self.session_id self.send_data(ptpip_packet.data(), session) def send_data(self, data, session): session.send(struct.pack('I', len(data)", "elif self.cmdtype == 2: return PtpIpInitCmdAck(data[4:]) elif self.cmdtype == 3:", "elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply", "None if data is not None: self.session_id = data[0:4] elif", "ping) as reply to keep the connection alive # couldnt", "= struct.pack('I', 0x07) if data is not None: self.ptp_response_code =", "not None: self.args = self.args + struct.pack('L', self.param3) if self.param4", "== len(data): events = PtpIpEventFactory(data).get_events() for event in events: self.event_queue.append(event)", "(ptpip_packet_reply.ptp_response_code == 0x2001 and \\ ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd send", "self.cmdtype == 2: return PtpIpInitCmdAck(data[4:]) elif self.cmdtype == 3: return", "self.cmdtype = struct.pack('I', 0x09) super(PtpIpStartDataPacket, self).__init__() if data is not", "GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF", "# Open both session, first one for for commands, second", "Deletion 0x2013 Store Not Available 0x2014 Specification By Format Unsupported", "0x100E InitiateCapture 0x100F FormatStore 0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016 SetDevicePropValue", "self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1', port=15740): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "Undefined 0x2001 OK 0x2002 General Error 0x2003 Session Not Open", "GetObjectPropValue 0x9805 GetObjectPropList \"\"\" def __init__(self, data=None, cmd=None, param1=None, param2=None,", "a ping receive a pong (same as ping) as reply", "struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__() if data is not None: self.transaction_id", "not None: self.args = self.args + struct.pack('L', self.param4) if self.param5", "InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7", "Thumbnail Present 0x2011 SelfTest Failed 0x2012 Partial Deletion 0x2013 Store", "from the data passed to the factory amount_of_events = struct.unpack('H',", "self).__init__() self.cmdtype = struct.pack('I', 0x02) if data is not None:", "data(self): pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None):", "self).__init__() self.cmdtype = struct.pack('I', 0x01) self.version = struct.pack('>I', 0x0100) if", "0x200D Object WriteProtected 0x200E Store Read-Only 0x200F Access Denied 0x2010", "Incomplete Transfer 0x2008 Invalid StorageID 0x2009 Invalid ObjectHandle 0x200A DeviceProp", "# get the amount of events passed from the data", "0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7 GetEvent 0x90C8 DeviceReady", "GetObject 0x100A GetThumb 0x100B DeleteObject 0x100C SendObjectInfo 0x100D SendObject 0x100E", "ChangeAfArea 0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B", "= param3 self.param4 = param4 self.param5 = param5 # Todo:", "an counter and an offset of 2 as the first", "= None if data is not None: self.session_id = data[0:4]", "the first two bytes are already processed counter = 1", "self.cmdtype = struct.pack('I', 0x07) if data is not None: self.ptp_response_code", "super(PtpIpEndDataPacket, self).__init__() if data is not None: self.transaction_id = data[0:4]", "struct.pack('I', 0x13) super(PtpIpPing, self).__init__() if data is not None: self.data", "0x201A Invalid ParentObject 0x201B Invalid DeviceProp Format 0x201C Invalid DeviceProp", "0xC105 RecordingInterrupted \"\"\" def __init__(self, event_code, event_parameter): super(PtpIpEvent, self).__init__() self.event_code", "<= amount_of_events: # get the event_code which consists of two", "__init__(self, data): super(PtpIpEventFactory, self).__init__() # create an empty array for", "struct.pack('L', self.param5) def data(self): return self.cmdtype + self.unkown + struct.pack('H',", "None else: print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype = struct.unpack('I', data[0:4])[0]", "are processed/send to the camera time.sleep(1) pass def send_ptpip_cmd(self, ptpip_packet):", "(data_length,) = struct.unpack('I', data) print(f\"Packet length: {data_length}\") while (data_length) >", "SendObject 0x100E InitiateCapture 0x100F FormatStore 0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016", "as ping) as reply to keep the connection alive #", "= self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code == 0x2001 and \\ ptpip_packet_reply.ptp_response_code", "if self.param5 is not None: self.args = self.args + struct.pack('L',", "session.recv(4) (data_length,) = struct.unpack('I', data) print(f\"Packet length: {data_length}\") while (data_length)", "PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket): data = data", "port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events) # 0x1002 OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L',", "= socket.gethostname() + '\\x00' self.hostname = self.hostname.encode('utf-16-le') else: self.guid =", "= struct.unpack('I', data) print(f\"Packet length: {data_length}\") while (data_length) > len(data):", "of type PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id", "ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1', port=15740): try: s = socket.socket(socket.AF_INET,", "= self.args + struct.pack('L', self.param5) def data(self): return self.cmdtype +", "# 0x1002 OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session)", "s: s.close() print(f\"Could not open socket: {message}\") return s def", "0x4001 CancelTransaction 0x4002 ObjectAdded 0x4003 ObjectRemoved 0x4004 StoreAdded 0x4005 StoreRemoved", "super(PtpIpEventFactory, self).__init__() # create an empty array for the PtpIpEvent", "len(data) + 4) + data) def recieve_data(self, session): data =", "super(PtpIpEventAck, self).__init__() self.cmdtype = struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for", "data[0:4])[0] if self.cmdtype == 1: return PtpIpInitCmdReq(data[4:]) elif self.cmdtype ==", "InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B EndMovieRec 0x920C TerminateCapture 0x9400", "data[0:4])[0]}\") self.cmdtype = struct.unpack('I', data[0:4])[0] if self.cmdtype == 1: return", "in the package if ptpip_packet.session_id is None: ptpip_packet.session_id = self.session_id", "keep the connection alive # couldnt get any reply onto", "Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__()", "to produce an array of PtpIpEvent objects if it got", "[] def open(self, host='192.168.1.1', port=15740): # Open both session, first", "\"\"\" EventCode Description 0x4001 CancelTransaction 0x4002 ObjectAdded 0x4003 ObjectRemoved 0x4004", "0x2000 Undefined 0x2001 OK 0x2002 General Error 0x2003 Session Not", "<filename>ptpip/ptpip.py import uuid import time import socket import struct class", "0x1008 GetObjectInfo 0x1009 GetObject 0x100A GetThumb 0x100B DeleteObject 0x100C SendObjectInfo", "= PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(),", "GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201 StartLiveView 0x9202", "{ptpip_packet_reply.ptp_response_code}\") # wait 1 second before new packets are processed/send", "array of PtpIpEvent objects if it got passd a data", "# get the event_parameter which consists of 4 bytes event_parameter", "data is not None: self.data = '' def data(self): return", "return PtpIpEventAck(data[4:]) elif self.cmdtype == 5: return PtpIpInitFail(data[4:]) elif self.cmdtype", "self.transaction_id)[0]}\") self.data = data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def", "ID generieren self.transaction_id = struct.pack('I', 0x06) self.args = '' if", "= struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self,", "event_code = str(struct.unpack('H', data[offset:offset+2])[0]) # get the event_parameter which consists", "0x2020 Specification of Destination Unsupported \"\"\" def __init__(self, data=None): super(PtpIpCmdResponse,", "to the camera time.sleep(1) pass def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket): data", "== 0x2001 and \\ ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd send successfully\")", "None: self.session_id = data[0:4] elif session_id is not None: self.session_id", "if self.param2 is not None: self.args = self.args + struct.pack('L',", "session): data = session.recv(4) (data_length,) = struct.unpack('I', data) print(f\"Packet length:", "else: print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\") # wait 1 second before", "GetThumb 0x100B DeleteObject 0x100C SendObjectInfo 0x100D SendObject 0x100E InitiateCapture 0x100F", "array for the PtpIpEvent object which will be replied self.events", "= PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpInitCmdAck): self.session_id = ptpip_packet_reply.session_id elif isinstance(ptpip_packet,", "= data[4:20] self.hostname = data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\"", "data(self): return self.cmdtype + self.guid + self.hostname + self.version class", "self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd ==", "0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209 GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B EndMovieRec", "param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype = struct.pack('I', 0x06) self.unkown = struct.pack('I',", "super(PtpIpPing, self).__init__() if data is not None: self.data = ''", "if data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else:", "StoreAdded 0x4005 StoreRemoved 0x4006 DevicePropChanged 0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009", "0x90C4 GetLargeThumb 0x90C7 GetEvent 0x90C8 DeviceReady 0x90C9 SetPreWbData 0x90CA GetVendorPropCodes", "This is a factory to produce an array of PtpIpEvent", "- len(data)) return data[4:] class PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\" def", "0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A StoreFull 0x400C StorageInfoChanged", "for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype = struct.pack('I',", "is: {ptpip_packet_reply.ptp_response_code}\") # wait 1 second before new packets are", "0x1003 CloseSession 0x1004 GetStorageIDs 0x1005 GetStorageInfo 0x1006 GetNumObjects 0x1007 GetObjectHandles", "PtpIpEventFactory(data).get_events() for event in events: self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif", "0x1015 GetDevicePropValue 0x1016 SetDevicePropValue 0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive", "# set an counter and an offset of 2 as", "for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype = struct.pack('I',", "is not None: self.args = self.args + struct.pack('L', self.param4) if", "0x2015 No Valid ObjectInfo 0x2016 Invalid Code Format 0x2017 Unknown", "7: return PtpIpCmdResponse(data[4:]) elif self.cmdtype == 9: return PtpIpStartDataPacket(data[4:]) elif", "of the device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse):", "if data is None: self.cmdtype = None else: print(f\"Cmd Type:", "0x9407 SetTransferListLock 0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart 0x940A NotifyFileAcquisitionEnd 0x940B GetSpecificSizeObject", "None: self.data = '' def data(self): return self.cmdtype class PtpIpEvent(object):", "= [] # get the amount of events passed from", "Code 0x2018 Capture Already Terminated 0x2019 Device Busy 0x201A Invalid", "by 6 to get to the next event_code and event_parameter", "= self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(),", "data=None): if data is None: self.cmdtype = None else: print(f\"Cmd", "0x2010 No Thumbnail Present 0x2011 SelfTest Failed 0x2012 Partial Deletion", "data[4:] class PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\" def __init__(self): super(PtpIpPacket, self).__init__()", "Denied 0x2010 No Thumbnail Present 0x2011 SelfTest Failed 0x2012 Partial", "while (data_length) > len(data): data += session.recv(data_length - len(data)) return", "super(PtpIpEventReq, self).__init__() self.cmdtype = struct.pack('I', 0x03) self.session_id = None if", "session_id def data(self): if self.session_id: return self.cmdtype + self.session_id return", "Todo: Transaction ID generieren self.transaction_id = struct.pack('I', 0x06) self.args =", "itself if it is not specified in the package if", "elif self.cmdtype == 7: return PtpIpCmdResponse(data[4:]) elif self.cmdtype == 9:", "return PtpIpPing(data[4:]) def data(self): pass class PtpIpInitCmdReq(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\"", "data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype", "ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd send successfully\") else: print(f\"cmd reply is:", "param4 self.param5 = param5 # Todo: Transaction ID generieren self.transaction_id", "ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(), session) def send_data(self, data, session): session.send(struct.pack('I',", "if data is not None: self.transaction_id = data[0:4] class PtpIpEndDataPacket(PtpIpPacket):", "print(f\"Packet length: {data_length}\") while (data_length) > len(data): data += session.recv(data_length", "struct.pack('I', 0x07) if data is not None: self.ptp_response_code = struct.unpack('H',", "return self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd) + \\ self.transaction_id", "self.unkown = struct.pack('I', 0x01) self.ptp_cmd = cmd self.param1 = param1", "+ struct.pack('H', self.ptp_cmd) + \\ self.transaction_id + self.args class PtpIpCmdResponse(PtpIpPacket):", "StoreRemoved 0x4006 DevicePropChanged 0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A", "print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\") # wait 1 second before new", "DevicePropChanged 0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer 0x400A StoreFull 0x400C", "reply onto a propper PtpIpPing packet so i am querying", "commands, second for events self.session = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session)", "data[offset:offset+2])[0]) # get the event_parameter which consists of 4 bytes", "len(data): events = PtpIpEventFactory(data).get_events() for event in events: self.event_queue.append(event) ptpip_packet_reply", "== 7: return PtpIpCmdResponse(data[4:]) elif self.cmdtype == 9: return PtpIpStartDataPacket(data[4:])", "PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype", "PtpIpStartDataPacket): data_length = struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data =", "data reply from a GetEvent request 0x90C7 \"\"\" def __init__(self,", "CaptureCompleteRecInSdram 0xC105 RecordingInterrupted \"\"\" def __init__(self, event_code, event_parameter): super(PtpIpEvent, self).__init__()", "class PtpIpEventAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpEventAck, self).__init__()", "None: self.cmdtype = None else: print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype", "0x90C7: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length", "self.data = data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self,", "= struct.pack('I', 0x06) self.unkown = struct.pack('I', 0x01) self.ptp_cmd = cmd", "= PtpIpEventFactory(data).get_events() for event in events: self.event_queue.append(event) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "\"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitCmdAck, self).__init__() self.cmdtype =", "self.session_id self.send_data(ptpip_packet.data(), session) def send_data(self, data, session): session.send(struct.pack('I', len(data) +", "StartMovieRecInCard 0x920B EndMovieRec 0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408", "= param4 self.param5 = param5 # Todo: Transaction ID generieren", "self.send_data(ptpip_packet.data(), session) # set the session id of the object", "cmd=None, param1=None, param2=None, param3=None, param4=None, param5=None): super(PtpIpCmdRequest, self).__init__() self.cmdtype =", "socket.SO_KEEPALIVE, 1) s.connect((host, port)) except socket.error as message: if s:", "if it is not specified in the package if ptpip_packet.session_id", "self.guid = data[4:20] self.hostname = data[20:] class PtpIpEventReq(PtpIpPacket): \"\"\"docstring for", "second for events self.session = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events", "PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data)) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "0x1006 GetNumObjects 0x1007 GetObjectHandles 0x1008 GetObjectInfo 0x1009 GetObject 0x100A GetThumb", "self.args = '' if self.param1 is not None: self.args =", "PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I',", "def data(self): if self.session_id: return self.cmdtype + self.session_id return self.cmdtype", "param3 self.param4 = param4 self.param5 = param5 # Todo: Transaction", "not None: self.transaction_id = data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data =", "= struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__() if data is not None:", "send_recieve_ptpip_packet(self, ptpip_packet, session): if isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) # set", "data[2:6] self.args = data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def", "Unknown Vendor Code 0x2018 Capture Already Terminated 0x2019 Device Busy", "= PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self): while True:", "struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__() if data is not None: self.transaction_id", "0x200C Store Full 0x200D Object WriteProtected 0x200E Store Read-Only 0x200F", "of two bytes event_code = str(struct.unpack('H', data[offset:offset+2])[0]) # get the", "Available 0x2014 Specification By Format Unsupported 0x2015 No Valid ObjectInfo", "data(self): return self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd) + \\", "= data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data = data[4:] class PtpIpPing(PtpIpPacket):", "socket.gethostname() + '\\x00' self.hostname = self.hostname.encode('utf-16-le') else: self.guid = data[0:16]", "super(PtpIpCmdRequest, self).__init__() self.cmdtype = struct.pack('I', 0x06) self.unkown = struct.pack('I', 0x01)", "None: self.transaction_id = data[0:4] print(f\"transaction_id: {struct.unpack('I', self.transaction_id)[0]}\") self.data = data[4:]", "amount_of_events: # get the event_code which consists of two bytes", "is None: ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(), session) def send_data(self, data,", "struct.unpack('H', data[0:2])[0] self.transaction_id = data[2:6] self.args = data[6:] class PtpIpStartDataPacket(PtpIpPacket):", "send_ptpip_event_req(self, ptpip_packet, session): # add the session id of the", "def open(self, host='192.168.1.1', port=15740): # Open both session, first one", "== 0x2019): print(\"Cmd send successfully\") else: print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\")", "self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1) continue else: # get", "not None: self.args = self.args + struct.pack('L', self.param1) if self.param2", "0x100C SendObjectInfo 0x100D SendObject 0x100E InitiateCapture 0x100F FormatStore 0x1014 GetDevicePropDesc", "OK 0x2002 General Error 0x2003 Session Not Open 0x2004 Invalid", "Invalid ObjectHandle 0x200A DeviceProp Not Supported 0x200B Invalid ObjectFormatCode 0x200C", "= data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None):", "self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd", "None: self.args = self.args + struct.pack('L', self.param1) if self.param2 is", "None: self.session_id = data[0:4] self.guid = data[4:20] self.hostname = data[20:]", "PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def", "packet so i am querying the status # of the", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet, session): #", "self).__init__() def factory(self, data=None): if data is None: self.cmdtype =", "= struct.pack('I', 0x01) self.ptp_cmd = cmd self.param1 = param1 self.param2", "the event_parameter which consists of 4 bytes event_parameter = str(struct.unpack('I',", "for PtpIpDataObject\"\"\" def __init__(self, object_handle, data): super(PtpIpDataObject, self).__init__() self.object_handle =", "RequestObjectTransfer 0x400A StoreFull 0x400C StorageInfoChanged 0x400D CaptureComplete 0xC101 ObjectAddedInSdram 0xC102", "reply is: {ptpip_packet_reply.ptp_response_code}\") # wait 1 second before new packets", "Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x12) super(PtpIpEndDataPacket, self).__init__()", "if (ptpip_packet_reply.ptp_response_code == 0x2001 and \\ ptpip_packet_reply.ptp_response_code == 0x2019): print(\"Cmd", "get_events(self): return self.events class PtpIpDataObject(object): \"\"\"docstring for PtpIpDataObject\"\"\" def __init__(self,", "= self.hostname.encode('utf-16-le') else: self.guid = data[0:16] self.hostname = data[16:0] def", "data[0:2])[0] self.transaction_id = data[2:6] self.args = data[6:] class PtpIpStartDataPacket(PtpIpPacket): \"\"\"docstring", "ptpip_packet.session_id is None: ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(), session) def send_data(self,", "GetEvent 0x90C8 DeviceReady 0x90C9 SetPreWbData 0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC", "package if ptpip_packet.session_id is None: ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(), session)", "param5 # Todo: Transaction ID generieren self.transaction_id = struct.pack('I', 0x06)", "the package if ptpip_packet.session_id is None: ptpip_packet.session_id = self.session_id self.send_data(ptpip_packet.data(),", "if data is not None: self.transaction_id = data[0:4] print(f\"transaction_id: {struct.unpack('I',", "Description 0x1001 GetDeviceInfo 0x1002 OpenSession 0x1003 CloseSession 0x1004 GetStorageIDs 0x1005", "0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock 0x9408 GetTransferList 0x9409 NotifyFileAcquisitionStart", "self.data = data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self,", "the device ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8), self.session) if isinstance(ptpip_packet_reply, PtpIpCmdResponse): time.sleep(1)", "data=None): super(PtpIpInitCmdReq, self).__init__() self.cmdtype = struct.pack('I', 0x01) self.version = struct.pack('>I',", "ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet, session): # add the session id", "Transaction ID generieren self.transaction_id = struct.pack('I', 0x06) self.args = ''", "CancelTransaction 0x4002 ObjectAdded 0x4003 ObjectRemoved 0x4004 StoreAdded 0x4005 StoreRemoved 0x4006", "will be replied self.events = [] # get the amount", "the next event_code and event_parameter pair offset = offset +", "DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201 StartLiveView 0x9202 EndLiveView 0x9203 GetLiveViewImage 0x9204", "def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1', port=15740): try: s", "empty array for the PtpIpEvent object which will be replied", "0x06) self.unkown = struct.pack('I', 0x01) self.ptp_cmd = cmd self.param1 =", "GetDevicePropValue 0x1016 SetDevicePropValue 0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2", "data[0:4] class PtpIpEndDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype", "for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x11) super(PtpIpCancelTransaction,", "SelfTest Failed 0x2012 Partial Deletion 0x2013 Store Not Available 0x2014", "0x1001 GetDeviceInfo 0x1002 OpenSession 0x1003 CloseSession 0x1004 GetStorageIDs 0x1005 GetStorageInfo", "ObjectFormatCode 0x200C Store Full 0x200D Object WriteProtected 0x200E Store Read-Only", "0x4005 StoreRemoved 0x4006 DevicePropChanged 0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged 0x4009 RequestObjectTransfer", "= data[0:4] elif session_id is not None: self.session_id = session_id", "elif self.cmdtype == 10: return PtpIpDataPacket(data[4:]) elif self.cmdtype == 12:", "from command the queue ptip_cmd = self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd,", "'\\x00' self.hostname = self.hostname.encode('utf-16-le') else: self.guid = data[0:16] self.hostname =", "print(f\"Could not open socket: {message}\") return s def send_recieve_ptpip_packet(self, ptpip_packet,", "session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply, PtpIpStartDataPacket): data_length = struct.unpack('I',", "isinstance(ptpip_packet, PtpIpInitCmdReq): self.send_data(ptpip_packet.data(), session) # set the session id of", "self.cmdtype = struct.pack('I', 0x10) super(PtpIpDataPacket, self).__init__() if data is not", "Unsupported \"\"\" def __init__(self, data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype = struct.pack('I',", "first one for for commands, second for events self.session =", "a factory to produce an array of PtpIpEvent objects if", "counter = counter + 1 def get_events(self): return self.events class", "elif self.cmdtype == 4: return PtpIpEventAck(data[4:]) elif self.cmdtype == 5:", "def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x11) super(PtpIpCancelTransaction, self).__init__() if", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return", "before new packets are processed/send to the camera time.sleep(1) pass", "data, session): session.send(struct.pack('I', len(data) + 4) + data) def recieve_data(self,", "0x4004 StoreAdded 0x4005 StoreRemoved 0x4006 DevicePropChanged 0x4007 ObjectInfoChanged 0x4008 DeviceInfoChanged", "time.sleep(1) continue else: # get the next command from command", "0x920A StartMovieRecInCard 0x920B EndMovieRec 0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407 SetTransferListLock", "session.recv(data_length - len(data)) return data[4:] class PtpIpPacket(object): \"\"\"docstring for PtpIpCmd\"\"\"", "self.event_parameter = int(event_parameter) class PtpIpEventFactory(object): \"\"\" This is a factory", "reply is of type PtpIpInitCmdAck ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if isinstance(ptpip_packet_reply,", "= None self.session_id = None self.cmd_queue = [] self.event_queue =", "Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype = struct.unpack('I', data[0:4])[0] if self.cmdtype ==", "PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I',", "session): session.send(struct.pack('I', len(data) + 4) + data) def recieve_data(self, session):", "Access Denied 0x2010 No Thumbnail Present 0x2011 SelfTest Failed 0x2012", "data=None): super(PtpIpEventAck, self).__init__() self.cmdtype = struct.pack('I', 0x04) class PtpIpInitFail(PtpIpPacket): \"\"\"docstring", "Terminated 0x2019 Device Busy 0x201A Invalid ParentObject 0x201B Invalid DeviceProp", "data[4:8] class PtpIpDataPacket(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype", "= PtpIpPacket().factory(data=self.recieve_data(session)) else: self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply", "self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session) self.session_events = self.connect(host=host, port=port) self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events)", "else: self.guid = data[0:16] self.hostname = data[16:0] def data(self): return", "self.cmd_queue.pop() ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session) if (ptpip_packet_reply.ptp_response_code == 0x2001 and", "None: self.ptp_response_code = struct.unpack('H', data[0:2])[0] self.transaction_id = data[2:6] self.args =", "self.hostname + self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self,", "PtpIpPacket().factory(data=self.recieve_data(session)) elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7: self.send_data(ptpip_packet.data(), session)", "data += session.recv(data_length - len(data)) return data[4:] class PtpIpPacket(object): \"\"\"docstring", "RecordingInterrupted \"\"\" def __init__(self, event_code, event_parameter): super(PtpIpEvent, self).__init__() self.event_code =", "self.guid + self.hostname + self.version class PtpIpInitCmdAck(PtpIpPacket): \"\"\"docstring for PtpIpInitCmd\"\"\"", "new packets are processed/send to the camera time.sleep(1) pass def", "self.cmdtype = struct.pack('I', 0x13) super(PtpIpPing, self).__init__() if data is not", "GetLiveViewImage 0x9204 MfDrive 0x9205 ChangeAfArea 0x9206 AfDriveCancel 0x9207 InitiateCaptureRecInMedia 0x9209", "Code Format 0x2017 Unknown Vendor Code 0x2018 Capture Already Terminated", "OpenSession ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0]) self.send_recieve_ptpip_packet(ptip_cmd, self.session) def communication_thread(self):", "not None: self.transaction_id = data[0:4] self.data = data[4:] class PtpIpCancelTransaction(PtpIpPacket):", "Invalid StorageID 0x2009 Invalid ObjectHandle 0x200A DeviceProp Not Supported 0x200B", "GetVendorStorageIDs 0x920A StartMovieRecInCard 0x920B EndMovieRec 0x920C TerminateCapture 0x9400 GetPartialObjectHighSpeed 0x9407", "+ \\ self.transaction_id + self.args class PtpIpCmdResponse(PtpIpPacket): \"\"\" ResponseCode Description", "__init__(self, object_handle, data): super(PtpIpDataObject, self).__init__() self.object_handle = object_handle self.data =", "return PtpIpEventReq(data[4:]) elif self.cmdtype == 4: return PtpIpEventAck(data[4:]) elif self.cmdtype", "0x03) self.session_id = None if data is not None: self.session_id", "if data is not None: self.transaction_id = data[0:4] self.data =", "ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): events = PtpIpEventFactory(data).get_events()", "self.transaction_id = data[0:4] self.data = data[4:] class PtpIpCancelTransaction(PtpIpPacket): \"\"\"docstring for", "0x100F FormatStore 0x1014 GetDevicePropDesc 0x1015 GetDevicePropValue 0x1016 SetDevicePropValue 0x101B GetPartialObject", "AfDrive 0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb 0x90C7 GetEvent 0x90C8", "+ ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if data_length == len(data): events", "\"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x13)", "self.cmdtype == 3: return PtpIpEventReq(data[4:]) elif self.cmdtype == 4: return", "is not None: self.session_id = session_id def data(self): if self.session_id:", "= ptpip_packet_reply.data while isinstance(ptpip_packet_reply, PtpIpDataPacket): data = data + ptpip_packet_reply.data", "self.args + struct.pack('L', self.param5) def data(self): return self.cmdtype + self.unkown", "Error 0x2003 Session Not Open 0x2004 Invalid TransactionID 0x2005 Operation", "return PtpIpDataPacket(data[4:]) elif self.cmdtype == 12: return PtpIpEndDataPacket(data[4:]) elif self.cmdtype", "data=None): self.cmdtype = struct.pack('I', 0x13) super(PtpIpPing, self).__init__() if data is", "GetSpecificSizeObject 0x9801 GetObjectPropsSupported 0x9802 GetObjectPropDesc 0x9803 GetObjectPropValue 0x9805 GetObjectPropList \"\"\"", "session) def send_data(self, data, session): session.send(struct.pack('I', len(data) + 4) +", "not None: self.transaction_id = data[0:4] self.length = data[4:8] class PtpIpDataPacket(PtpIpPacket):", "0x100A GetThumb 0x100B DeleteObject 0x100C SendObjectInfo 0x100D SendObject 0x100E InitiateCapture", "socket: {message}\") return s def send_recieve_ptpip_packet(self, ptpip_packet, session): if isinstance(ptpip_packet,", "of the object if the reply is of type PtpIpInitCmdAck", "self.param3) if self.param4 is not None: self.args = self.args +", "def __init__(self, data=None, cmd=None, param1=None, param2=None, param3=None, param4=None, param5=None): super(PtpIpCmdRequest,", "to get to the next event_code and event_parameter pair offset", "0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2 ChangeCameraMode 0x90C3 DeleteImagesInSdram 0x90C4 GetLargeThumb", "= data[4:] class PtpIpPing(PtpIpPacket): \"\"\"docstring for Start_Data_Packet\"\"\" def __init__(self, data=None):", "= ptpip_packet_reply.session_id elif isinstance(ptpip_packet, PtpIpEventReq): self.send_ptpip_event_req(ptpip_packet, session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "the factory amount_of_events = struct.unpack('H', data[0:2])[0] # set an counter", "be replied self.events = [] # get the amount of", "self.event_queue = [] self.object_queue = [] def open(self, host='192.168.1.1', port=15740):", "PtpIpCmdRequest(data[4:]) elif self.cmdtype == 7: return PtpIpCmdResponse(data[4:]) elif self.cmdtype ==", "isinstance(ptpip_packet_reply, PtpIpDataPacket): data = data + ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))", "PtpIpDataPacket): data = data + ptpip_packet_reply.data ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) if", "self.session_id = None if data is not None: self.session_id =", "\"\"\"docstring for PtpIpInitCmd\"\"\" def __init__(self, data=None): super(PtpIpInitFail, self).__init__() self.cmdtype =", "Description 0x4001 CancelTransaction 0x4002 ObjectAdded 0x4003 ObjectRemoved 0x4004 StoreAdded 0x4005", "factory amount_of_events = struct.unpack('H', data[0:2])[0] # set an counter and", "is not None: self.data = '' def data(self): return self.cmdtype", "self.send_data(ptpip_packet.data(), session) ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) return ptpip_packet_reply def send_ptpip_event_req(self, ptpip_packet,", "offset = 2 while counter <= amount_of_events: # get the", "self.param5 = param5 # Todo: Transaction ID generieren self.transaction_id =", "Open 0x201F Transaction Cancelled 0x2020 Specification of Destination Unsupported \"\"\"", "is not None: self.args = self.args + struct.pack('L', self.param2) if", "self.param2 = param2 self.param3 = param3 self.param4 = param4 self.param5", "def __init__(self): super(PtpIpConnection, self).__init__() self.session = None self.session_events = None", "Destination Unsupported \"\"\" def __init__(self, data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype =", "for the PtpIpEvent object which will be replied self.events =", "for Start_Data_Packet\"\"\" def __init__(self, data=None): self.cmdtype = struct.pack('I', 0x12) super(PtpIpEndDataPacket,", "6 counter = counter + 1 def get_events(self): return self.events", "0x201F Transaction Cancelled 0x2020 Specification of Destination Unsupported \"\"\" def", "AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD SetPicCtrlData 0x90CE DeleteCustomPicCtrl 0x90CF GetPicCtrlCapability 0x9201", "__init__(self, data=None): super(PtpIpCmdResponse, self).__init__() self.cmdtype = struct.pack('I', 0x07) if data", "data = session.recv(4) (data_length,) = struct.unpack('I', data) print(f\"Packet length: {data_length}\")", "0x1016 SetDevicePropValue 0x101B GetPartialObject 0x90C0 InitiateCaptureRecInSdram 0x90C1 AfDrive 0x90C2 ChangeCameraMode", "pass def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1', port=15740): try:", "= None else: print(f\"Cmd Type: {struct.unpack('I', data[0:4])[0]}\") self.cmdtype = struct.unpack('I',", "send_data(self, data, session): session.send(struct.pack('I', len(data) + 4) + data) def", "is not None: self.args = self.args + struct.pack('L', self.param5) def", "self).__init__() self.event_code = int(event_code) self.event_parameter = int(event_parameter) class PtpIpEventFactory(object): \"\"\"", "struct.pack('I', 0x01) self.ptp_cmd = cmd self.param1 = param1 self.param2 =", "# increase the offset by 6 to get to the", "which will be replied self.events = [] # get the", "struct.pack('L', self.param4) if self.param5 is not None: self.args = self.args", "self).__init__() if data is not None: self.transaction_id = data[0:4] self.data", "Operation Code Description 0x1001 GetDeviceInfo 0x1002 OpenSession 0x1003 CloseSession 0x1004", "# wait 1 second before new packets are processed/send to", "DeviceReady 0x90C9 SetPreWbData 0x90CA GetVendorPropCodes 0x90CB AfAndCaptureRecInSdram 0x90CC GetPicCtrlData 0x90CD", "= session.recv(4) (data_length,) = struct.unpack('I', data) print(f\"Packet length: {data_length}\") while", "first two bytes are already processed counter = 1 offset", "it got passd a data reply from a GetEvent request", "2 as the first two bytes are already processed counter", "self).__init__() self.session = None self.session_events = None self.session_id = None", "the session id of the object itself if it is", "processed counter = 1 offset = 2 while counter <=", "data_length = struct.unpack('I', ptpip_packet_reply.length)[0] ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session)) data = ptpip_packet_reply.data", "self.session) if (ptpip_packet_reply.ptp_response_code == 0x2001 and \\ ptpip_packet_reply.ptp_response_code == 0x2019):", "# do a ping receive a pong (same as ping)", "the connection alive # couldnt get any reply onto a", "super(PtpIpConnection, self).__init__() self.session = None self.session_events = None self.session_id =", "__init__(self, event_code, event_parameter): super(PtpIpEvent, self).__init__() self.event_code = int(event_code) self.event_parameter =", "Session Already Open 0x201F Transaction Cancelled 0x2020 Specification of Destination", "return self.cmdtype class PtpIpEvent(object): \"\"\" EventCode Description 0x4001 CancelTransaction 0x4002", "successfully\") else: print(f\"cmd reply is: {ptpip_packet_reply.ptp_response_code}\") # wait 1 second", "camera time.sleep(1) pass def send_ptpip_cmd(self, ptpip_packet): self.cmd_queue.append(ptpip_packet) def connect(self, host='192.168.1.1',", "if data is not None: self.ptp_response_code = struct.unpack('H', data[0:2])[0] self.transaction_id", "self).__init__() # create an empty array for the PtpIpEvent object", "# get the next command from command the queue ptip_cmd", "elif session_id is not None: self.session_id = session_id def data(self):", "self.hostname = self.hostname.encode('utf-16-le') else: self.guid = data[0:16] self.hostname = data[16:0]" ]
[ "aval._morphology() out = StringIO() morph.export(out, 0) # we're printing it", "an expected failure right now, as morphology is not implemented", "object to work with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology", "would normally do something else with the morphology object. print(str(out.read()))", "load morphologies of certain cells from the database. \"\"\" #this", "from __future__ import absolute_import from __future__ import print_function import PyOpenWorm", "morph.export(out, 0) # we're printing it here, but we would", "associated with the Cell. Returns a neuroml.Morphology object. morph =", "as P from PyOpenWorm.context import Context from PyOpenWorm.worm import Worm", "the morphology associated with the Cell. Returns a neuroml.Morphology object.", "StringIO #Connect to database. with P.connect('default.conf') as conn: ctx =", "is an expected failure right now, as morphology is not", "database. \"\"\" #this is an expected failure right now, as", "from PyOpenWorm.context import Context from PyOpenWorm.worm import Worm from six", "= ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology associated with the Cell. Returns", "PyOpenWorm.worm import Worm from six import StringIO #Connect to database.", "the Cell. Returns a neuroml.Morphology object. morph = aval._morphology() out", "\"\"\" How to load morphologies of certain cells from the", "from the database. \"\"\" #this is an expected failure right", "a neuroml.Morphology object. morph = aval._morphology() out = StringIO() morph.export(out,", "0) # we're printing it here, but we would normally", "import Context from PyOpenWorm.worm import Worm from six import StringIO", "Cell. Returns a neuroml.Morphology object. morph = aval._morphology() out =", "ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology associated with the Cell. Returns a", "certain cells from the database. \"\"\" #this is an expected", "object. morph = aval._morphology() out = StringIO() morph.export(out, 0) #", "#Connect to database. with P.connect('default.conf') as conn: ctx = Context(ident=\"http://openworm.org/data\",", "PyOpenWorm.context import Context from PyOpenWorm.worm import Worm from six import", "cells from the database. \"\"\" #this is an expected failure", "not implemented from __future__ import absolute_import from __future__ import print_function", "absolute_import from __future__ import print_function import PyOpenWorm as P from", "to database. with P.connect('default.conf') as conn: ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored", "from __future__ import print_function import PyOpenWorm as P from PyOpenWorm.context", "import absolute_import from __future__ import print_function import PyOpenWorm as P", "but we would normally do something else with the morphology", "to work with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology associated", "aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology associated with the Cell.", "work with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology associated with", "from six import StringIO #Connect to database. with P.connect('default.conf') as", "as morphology is not implemented from __future__ import absolute_import from", "Returns a neuroml.Morphology object. morph = aval._morphology() out = StringIO()", "#this is an expected failure right now, as morphology is", "import StringIO #Connect to database. with P.connect('default.conf') as conn: ctx", "P from PyOpenWorm.context import Context from PyOpenWorm.worm import Worm from", "neuroml.Morphology object. morph = aval._morphology() out = StringIO() morph.export(out, 0)", "is not implemented from __future__ import absolute_import from __future__ import", "conf=conn.conf).stored #Create a new Cell object to work with. aval", "we're printing it here, but we would normally do something", "Worm from six import StringIO #Connect to database. with P.connect('default.conf')", "import print_function import PyOpenWorm as P from PyOpenWorm.context import Context", "printing it here, but we would normally do something else", "expected failure right now, as morphology is not implemented from", "with P.connect('default.conf') as conn: ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a", "How to load morphologies of certain cells from the database.", "with the Cell. Returns a neuroml.Morphology object. morph = aval._morphology()", "= StringIO() morph.export(out, 0) # we're printing it here, but", "database. with P.connect('default.conf') as conn: ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create", "out = StringIO() morph.export(out, 0) # we're printing it here,", "# we're printing it here, but we would normally do", "PyOpenWorm as P from PyOpenWorm.context import Context from PyOpenWorm.worm import", "Cell object to work with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the", "print_function import PyOpenWorm as P from PyOpenWorm.context import Context from", "to load morphologies of certain cells from the database. \"\"\"", "<gh_stars>0 \"\"\" How to load morphologies of certain cells from", "a new Cell object to work with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL')", "it here, but we would normally do something else with", "morphology associated with the Cell. Returns a neuroml.Morphology object. morph", "we would normally do something else with the morphology object.", "morphology is not implemented from __future__ import absolute_import from __future__", "implemented from __future__ import absolute_import from __future__ import print_function import", "ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a new Cell object to", "as conn: ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a new Cell", "failure right now, as morphology is not implemented from __future__", "six import StringIO #Connect to database. with P.connect('default.conf') as conn:", "Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a new Cell object to work with.", "Context from PyOpenWorm.worm import Worm from six import StringIO #Connect", "__future__ import absolute_import from __future__ import print_function import PyOpenWorm as", "morphologies of certain cells from the database. \"\"\" #this is", "\"\"\" #this is an expected failure right now, as morphology", "= aval._morphology() out = StringIO() morph.export(out, 0) # we're printing", "morph = aval._morphology() out = StringIO() morph.export(out, 0) # we're", "__future__ import print_function import PyOpenWorm as P from PyOpenWorm.context import", "with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get the morphology associated with the", "P.connect('default.conf') as conn: ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a new", "import Worm from six import StringIO #Connect to database. with", "of certain cells from the database. \"\"\" #this is an", "conn: ctx = Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a new Cell object", "new Cell object to work with. aval = ctx(Worm)().get_neuron_network().aneuron('AVAL') #Get", "StringIO() morph.export(out, 0) # we're printing it here, but we", "= Context(ident=\"http://openworm.org/data\", conf=conn.conf).stored #Create a new Cell object to work", "#Get the morphology associated with the Cell. Returns a neuroml.Morphology", "right now, as morphology is not implemented from __future__ import", "the database. \"\"\" #this is an expected failure right now,", "import PyOpenWorm as P from PyOpenWorm.context import Context from PyOpenWorm.worm", "from PyOpenWorm.worm import Worm from six import StringIO #Connect to", "here, but we would normally do something else with the", "now, as morphology is not implemented from __future__ import absolute_import", "#Create a new Cell object to work with. aval =" ]
[ "WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\")", "case_type='visit', is_subcase=True) m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0,", "'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[", "] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0') m0,", "WORKFLOW_MODULE expected = \"\"\" <partial> <stack> <create> <command value=\"'m1'\"/> <datum", "factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\",", "from corehq.apps.app_manager.const import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT,", "'form_workflow') def test_basic(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0',", "def test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog')", "case_type='patient') m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient')", "def test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child',", "FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM", "m1f0 = factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True,", "match up the # session variables between the source and", "that session variable references. # # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory =", "corehq.apps.app_manager.const import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE,", "Stack create blocks do not update the session after each", "factory.app.get_modules(): for form in module.get_forms(): form.post_form_workflow = mode return factory.app", "factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True) m2, m2f0 = factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0,", "CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d',", "= factory.new_basic_module('m0', 'frog') m1, m1f0 = factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow =", "= factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow", "&lt; 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self): factory", "parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected = \"\"\" <partial> <stack> <create>", "factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [", "in clean: if isinstance(child, CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id, child.value)) new_c", "FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [", "child', 'child') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0)", "'host', 'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id,", "self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) for", "Test that when linking between two forms in a submodule", "from django.test import SimpleTestCase from corehq.apps.app_manager.const import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE,", "the session after each datum # so items put into", "form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[", "being added to the session and then # later referenced.", "\"./entry[1]\") def test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll", "m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'),", "m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow", "import SimpleTestCase from corehq.apps.app_manager.const import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE,", "m0f0 = factory.new_basic_module('m0', 'frog') m1, m1f0 = factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow", "factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1,", "m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0) for module in factory.app.get_modules():", "# that session variable references. # # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory", "__future__ import unicode_literals from django.test import SimpleTestCase from corehq.apps.app_manager.const import", "factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) m2, m2f0", "= 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(),", "forms in a submodule we match up the # session", "test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0)", "= factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2 =", "WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob) &lt; 7\", form_id=m1f0.unique_id)", "case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient')", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0,", "clean_raw.append((child.id, child.value)) new_c = \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0',", "app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children = [ CommandId('m0'),", "m1, m1f0 = factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode',", "'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') # link to child -> edit", "factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self):", "= [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self):", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0) m1, m1f0", "def test_manual_form_link(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child',", "datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self):", "clean_raw = [] for child in clean: if isinstance(child, CommandId):", "test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0)", "self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "update the session after each datum # so items put", "so items put into the session in one step aren't", "when linking between two forms in a submodule we match", "# # <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/>", "# - <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/>", "factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected =", "parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE expected = \"\"\" <partial> <stack> <create>", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0)", "FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id,", "m0f0.form_links = [ FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\",", "datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath =", "m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog')", "value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum id=\"case_id_B\"", "WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow = WORKFLOW_FORM", "# # To fix this we need to replace any", "target form correctly factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('child", "\"./entry\") def test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def", "FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self): factory", "m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') # link", "factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5, m5f0 =", "TestXmlMixin): file_path = ('data', 'form_workflow') def test_basic(self): factory = AppFactory(build_version='2.9.0')", "= factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0 = factory.new_basic_module('m3', 'child') m3f1", "value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean", "self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self): # Test that when linking", "parent_case_type='child') m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow", "factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"a =", "factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links =", "value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack> </partial>", "= factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1, m1f0", "factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath='true()',", "factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE expected =", "factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2,", "] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0') m0,", "and target form correctly factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "= factory.new_basic_module('m2', 'patient') m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0", "\"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self): #", "\\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[", "= {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean =", "] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0') m0,", "m0, m0f0 = factory.new_basic_module('m0', '') factory.new_form(m0) m1, m1f0 = factory.new_basic_module('m1',", "m4, m4f0 = factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1", "session. # # To fix this we need to replace", "if isinstance(child, CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id, child.value)) new_c = \"instance('casedb')/case/[@case_id", "= factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2, m2f0", "factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links =", "m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def", "[ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath,", "self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 # # Stack", "factory.form_opens_case(m0f0) m1, m1f0 = factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0,", "= factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today()", "factory.new_basic_module('m0', 'frog') m1, m1f0 = factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow = WORKFLOW_FORM", "m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit',", "variable references. # # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0') m0,", "xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\",", "factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2 = factory.new_form(m4)", "test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child')", "= WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow =", "FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models import StackDatum", "= [ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id =", "= factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') # link to", "_replace_session_references_in_stack(children) clean_raw = [] for child in clean: if isinstance(child,", "session_var('new_a')), ('b', session_var('new_b')), ('c', new_c), ('d', \"if({c}, {c}, {a}]\".format(a=session_var('new_a'), c=new_c))", "factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit', 'child')", "m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def", "m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ]", "m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2,", "the source and target form correctly factory = AppFactory(build_version='2.9.0') m0,", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1, m1f0 =", "m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1)", "id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def", "self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) for m", "factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "expected = \"\"\" <partial> <stack> <create> <command value=\"'m1'\"/> <datum id=\"case_id\"", "in [1, 2]: module = app.get_module(m) module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'),", "form in module.get_forms(): form.post_form_workflow = mode return factory.app def test_form_workflow_previous(self):", "value isn't available in the session. # # To fix", "WORKFLOW_FORM m2f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\")", "factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models import FormDatum,", "visit', 'child') factory.form_requires_case(m1f0) m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)", "id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected,", "factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children = [ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b',", "self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback", "= [ FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self):", "factory.form_requires_case(m2f0, 'child') # link to child -> edit child m2f0.post_form_workflow", "{a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0', ('a', session_var('new_a')), ('b', session_var('new_b')), ('c', new_c),", "However since the session doesn't get updated # the value", "\"./entry\") def test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll", "self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def _build_workflow_app(self, mode): factory = AppFactory(build_version='2.9.0') m0,", "factory.new_basic_module('m2', 'patient') m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0 =", "factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True) m2,", "'visit', is_subcase=True) m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0,", "import AppFactory from corehq.apps.app_manager.tests.util import TestXmlMixin from corehq.apps.app_manager.xpath import session_var", "m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id),", "= factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) m2, m2f0 = factory.new_advanced_module('visit history',", "<datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in the above example ``case_id_A``", "between two forms in a submodule we match up the", "</stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory =", "# Test that when linking between two forms in a", "'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE", "id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in the above example ``case_id_A`` is", "# To fix this we need to replace any references", "factory.new_advanced_module('visit histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1", "] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def _build_workflow_app(self, mode): factory = AppFactory(build_version='2.9.0')", "xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self): factory =", "any references to previous variables with the full xpath which", "'person') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0, case_type='person')", "m2f0 = factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0')", "test_manual_form_link(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child')", "datum # so items put into the session in one", "is_extension=True) m2, m2f0 = factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE,", "app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self): app =", "app = self._build_workflow_app(WORKFLOW_PREVIOUS) for m in [1, 2]: module =", "the value isn't available in the session. # # To", "value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean = _replace_session_references_in_stack(children) clean_raw =", "__future__ import absolute_import from __future__ import unicode_literals from django.test import", "'child') factory.form_requires_case(m1f0) m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0,", "FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'),", "form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def _build_workflow_app(self, mode): factory =", "= WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\")", "'child') m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0 =", "WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models import FormDatum, FormLink from", "] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self): # Test that when", "'patient') m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0 = factory.new_basic_module('m3',", "datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id',", "corehq.apps.app_manager.tests.app_factory import AppFactory from corehq.apps.app_manager.tests.util import TestXmlMixin from corehq.apps.app_manager.xpath import", "self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def", "m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ]", "xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self): factory =", "factory.app.create_suite(), \"./entry\") def _build_workflow_app(self, mode): factory = AppFactory(build_version='2.9.0') m0, m0f0", "children = [ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id", "1\", form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\")", "# link to child -> edit child m2f0.post_form_workflow = WORKFLOW_FORM", "<command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/>", "WORKFLOW_PARENT_MODULE expected = \"\"\" <partial> <stack> <create> <command value=\"'m1'\"/> <datum", "SimpleTestCase from corehq.apps.app_manager.const import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS,", "clean = _replace_session_references_in_stack(children) clean_raw = [] for child in clean:", "put into the session in one step aren't available later", "AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models", "value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in the above example ``case_id_A`` is being", "id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in", "'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient')", "module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def test_form_workflow_root(self): app =", "'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow =", "'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1,", "factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1", "'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2, m2f0 = factory.new_advanced_module('visit history',", "# See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('person", "the # session variables between the source and target form", "= WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(),", "isinstance(child, CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id, child.value)) new_c = \"instance('casedb')/case/[@case_id =", "factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2, m2f0 =", "this we need to replace any references to previous variables", "m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id')", "# later referenced. However since the session doesn't get updated", "test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child')", "form.post_form_workflow = mode return factory.app def test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS)", "m2, m2f0 = factory.new_basic_module('m2', 'patient') m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1)", "{a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean = _replace_session_references_in_stack(children)", "which # that session variable references. # # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack", "\"./entry\") def test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) for m in [1,", "the session. # # To fix this we need to", "= \"instance('casedb')/casedb/case[@case_id = \" \\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links =", "m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'),", "]), ] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback =", "factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True) m2, m2f0 = factory.new_advanced_module('tests', 'episode')", "session variables between the source and target form correctly factory", "factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2,", "= [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]),", "corehq.apps.app_manager.models import FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId from", "factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM", "'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id,", "m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id',", "WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\")", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0)", "the above example ``case_id_A`` is being added to the session", "self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "edit child m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id),", "= self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS)", "m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0) for module in", "WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 # #", "def test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child',", "test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child')", "('a', session_var('new_a')), ('b', session_var('new_b')), ('c', new_c), ('d', \"if({c}, {c}, {a}]\".format(a=session_var('new_a'),", "later referenced. However since the session doesn't get updated #", "factory.new_form(m0) m1, m1f0 = factory.new_basic_module('m1', 'patient') m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0)", "m4f0 = factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1 =", "factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1)", "WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models import FormDatum, FormLink", "\"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0,", "corehq.apps.app_manager.tests.util import TestXmlMixin from corehq.apps.app_manager.xpath import session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin):", "two forms in a submodule we match up the #", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', '') factory.new_form(m0) m1, m1f0", "> 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self): factory", "= WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"),", "[ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ]", "( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from", "\"\"\" <partial> <stack> <create> <command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum", "To fix this we need to replace any references to", "in the session. # # To fix this we need", "self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "</stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self): factory =", "= WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a", "\"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0') m0,", "factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\",", "7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self): factory =", "form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def", "\"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children = [ CommandId('m0'), StackDatum(id='a',", "for child in clean: if isinstance(child, CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id,", "datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links =", "]), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id,", "case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient')", "the session in one step aren't available later steps #", "test_form_links_submodule(self): # Test that when linking between two forms in", "visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1, m1f0 = factory.new_advanced_module('visit", "form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link(self): factory = AppFactory(build_version='2.9.0')", "\"./entry\") def test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class", "self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self):", "m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0 = factory.new_advanced_module('m4',", "form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(),", "factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow", "value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c')))", "m2f0 = factory.new_basic_module('m2', 'patient') m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3,", "case_type='patient') m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW,", "replace any references to previous variables with the full xpath", "above example ``case_id_A`` is being added to the session and", "def test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child',", "factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow", "WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob) > 7\", form_id=m1f0.unique_id)", "] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE", "= mode return factory.app def test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'),", "'child') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) m2,", "the session doesn't get updated # the value isn't available", "'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE", "history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') # link to child ->", "references to previous variables with the full xpath which #", "steps # # <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum id=\"case_id_B\"", "factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links", "corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('person registration', 'person')", "self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children = [", "factory.form_requires_case(m5f0) for module in factory.app.get_modules(): for form in module.get_forms(): form.post_form_workflow", "xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(),", "form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0')", "in the above example ``case_id_A`` is being added to the", "m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1, m1f0 = factory.new_basic_module('m1',", "the session and then # later referenced. However since the", "in factory.app.get_modules(): for form in module.get_forms(): form.post_form_workflow = mode return", "xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id =", "is_subcase=True, is_extension=True) m2, m2f0 = factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0,", "factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit',", "references. # # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0') m0, m0f0", "m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow =", "factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True) m2, m2f0 = factory.new_advanced_module('tests',", "'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() -", "in module.get_forms(): form.post_form_workflow = mode return factory.app def test_form_workflow_previous(self): app", "factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow", "= WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 #", "is being added to the session and then # later", "value=\"uuid()\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self):", "factory.form_requires_case(m2f1) m3, m3f0 = factory.new_basic_module('m3', 'child') m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0,", "\"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id',", "# so items put into the session in one step", "case_type='episode', is_subcase=True, is_extension=True) m2, m2f0 = factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode')", "'child') # link to child -> edit child m2f0.post_form_workflow =", "= factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1 = factory.new_form(m4)", "<create> <command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create>", "'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM", "'visit', parent_case_type='child') m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child')", "app.create_suite(), \"./entry\") def test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\")", "factory.form_requires_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM", "CommandId from corehq.apps.app_manager.suite_xml.xml_models import StackDatum from corehq.apps.app_manager.tests.app_factory import AppFactory from", "items put into the session in one step aren't available", "FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\",", "FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\")", "StackDatum from corehq.apps.app_manager.tests.app_factory import AppFactory from corehq.apps.app_manager.tests.util import TestXmlMixin from", "]), ] m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id = \"", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0) m1,", "= [ FormLink(xpath=\"(today() - dob) &lt; 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'),", "m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') #", "parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ]", "def test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self):", "need to replace any references to previous variables with the", "AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\",", "m2f0.post_form_workflow = WORKFLOW_MODULE expected = \"\"\" <partial> <stack> <create> <command", "app.get_module(m) module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def test_form_workflow_root(self): app", "- <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> #", "session doesn't get updated # the value isn't available in", "case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1)", "m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links", "self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) m2, m2f0 = factory.new_advanced_module('visit", "= factory.new_basic_module('m0', '') factory.new_form(m0) m1, m1f0 = factory.new_basic_module('m1', 'patient') m1f1", "WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models import FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import", "m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id',", "m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'),", "self.assertEqual(clean_raw, [ 'm0', ('a', session_var('new_a')), ('b', session_var('new_b')), ('c', new_c), ('d',", "WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id = \" \\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\"", "factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob)", "self._build_workflow_app(WORKFLOW_PREVIOUS) for m in [1, 2]: module = app.get_module(m) module.put_in_root", "</partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0')", "FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'),", "into the session in one step aren't available later steps", "FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models import", "test_replace_session_references_in_stack(self): children = [ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c',", "[ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self): #", "clean_raw.append(child.id) else: clean_raw.append((child.id, child.value)) new_c = \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw,", "import unicode_literals from django.test import SimpleTestCase from corehq.apps.app_manager.const import (", "self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "'child') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0,", "id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(),", "case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient')", "with the full xpath which # that session variable references.", "fix this we need to replace any references to previous", "]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0')", "m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'),", "TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path = ('data', 'form_workflow') def test_basic(self): factory =", "\"./entry[3]\") def test_form_links_submodule(self): # Test that when linking between two", "value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/> </create> </stack> </partial> \"\"\"", "datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\")", "one step aren't available later steps # # <datum id=\"case_id_A\"", "]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0')", "factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit',", "= self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE)", "form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback =", "[ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))),", "aren't available later steps # # <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> #", "mode): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', '') factory.new_form(m0)", "'patient', parent_module=m1) factory.form_requires_case(m5f0) for module in factory.app.get_modules(): for form in", "<create> <command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command", "factory.app.create_suite(), \"./entry\") def test_manual_form_link(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "= WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id = \" \\ \"instance('commcaresession')/session/data/case_id]/prop =", "# Stack create blocks do not update the session after", "] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ]", "m0, m0f0 = factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0) m1, m1f0 =", "m0f0.form_links = [ FormLink(xpath=\"(today() - dob) > 7\", form_id=m1f0.unique_id) ]", "\"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT", "WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a =", "isn't available in the session. # # To fix this", "FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"),", "[ FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id) ]", "corehq.apps.app_manager.suite_xml.xml_models import StackDatum from corehq.apps.app_manager.tests.app_factory import AppFactory from corehq.apps.app_manager.tests.util import", "registration', 'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True) m2, m2f0", "[ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def _build_workflow_app(self, mode):", "m3f0 = factory.new_basic_module('m3', 'child') m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1)", "m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit',", "factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1, 'child')", "factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links", "test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 # # Stack create blocks do not", "module in factory.app.get_modules(): for form in module.get_forms(): form.post_form_workflow = mode", "m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob) >", "[ FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self): factory", "[1, 2]: module = app.get_module(m) module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(),", "\"./entry\") def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 # # Stack create blocks", "StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c},", "c=session_var('c'))) ] clean = _replace_session_references_in_stack(children) clean_raw = [] for child", "- dob) &lt; 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def", "factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM", "AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models import", "factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self): # Test that when linking between", "import absolute_import from __future__ import unicode_literals from django.test import SimpleTestCase", "_replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models import StackDatum from corehq.apps.app_manager.tests.app_factory import AppFactory", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') m1, m1f0 =", "id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum", "doesn't get updated # the value isn't available in the", "= factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0)", "factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0 = factory.new_basic_module('m3', 'child') m3f1 =", "we need to replace any references to previous variables with", "child in clean: if isinstance(child, CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id, child.value))", "parent_module=m1) factory.form_requires_case(m5f0) for module in factory.app.get_modules(): for form in module.get_forms():", "def test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child',", "# # in the above example ``case_id_A`` is being added", "= factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child visit',", "\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0', ('a', session_var('new_a')), ('b', session_var('new_b')),", "factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"a = 1\",", "] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link(self): factory = AppFactory(build_version='2.9.0') m0,", "from corehq.apps.app_manager.tests.util import TestXmlMixin from corehq.apps.app_manager.xpath import session_var class TestFormWorkflow(SimpleTestCase,", "m0f0 = factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('child", "FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self): # Test", "corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models import StackDatum from corehq.apps.app_manager.tests.app_factory", "self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'),", "= {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0', ('a', session_var('new_a')), ('b', session_var('new_b')), ('c',", "from corehq.apps.app_manager.models import FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId", "m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links", "xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]),", "factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0 = factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0,", "m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0 = factory.new_basic_module('m2', 'patient')", "module = app.get_module(m) module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def", "variables with the full xpath which # that session variable", "``case_id_A`` is being added to the session and then #", "history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow =", "= factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow =", "factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0 = factory.new_basic_module('m3', 'child') m3f1 = factory.new_form(m3)", "from corehq.apps.app_manager.suite_xml.xml_models import StackDatum from corehq.apps.app_manager.tests.app_factory import AppFactory from corehq.apps.app_manager.tests.util", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0 =", "] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0') m0,", "datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS", "class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children = [ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')),", "<datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # +", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') m1, m1f0", "factory.form_requires_case(m4f1, case_type='patient') m4f2 = factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2,", "AppFactory from corehq.apps.app_manager.tests.util import TestXmlMixin from corehq.apps.app_manager.xpath import session_var class", "= factory.new_basic_module('m3', 'child') m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4,", "factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5, m5f0 = factory.new_basic_module('m5', 'patient',", "WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0',", "</partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0')", "in a submodule we match up the # session variables", "self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(),", "= WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\")", "from corehq.apps.app_manager.xpath import session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path = ('data',", "linking between two forms in a submodule we match up", "def test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog')", "= WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(),", "referenced. However since the session doesn't get updated # the", "form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0')", "variables between the source and target form correctly factory =", "\"./entry\") def _build_workflow_app(self, mode): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "= [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow", "def test_form_links_submodule(self): # Test that when linking between two forms", "form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def test_form_links_submodule(self): # Test that", "FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self): factory =", "FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link(self): factory =", "<partial> <stack> <create> <command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\"", "\"./entry\") def test_manual_form_link(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') m1, m1f0 = factory.new_basic_module('m1',", "do not update the session after each datum # so", "<command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack>", "= _replace_session_references_in_stack(children) clean_raw = [] for child in clean: if", "factory.new_basic_module('m3', 'child') m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0", "session after each datum # so items put into the", "FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'),", "m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id = \" \\ \"instance('commcaresession')/session/data/case_id]/prop", "\"./entry\") def test_return_to_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll", "] clean = _replace_session_references_in_stack(children) clean_raw = [] for child in", "TestXmlMixin from corehq.apps.app_manager.xpath import session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path =", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1,", "factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) m2, m2f0 = factory.new_advanced_module('visit history', 'visit',", "= [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link(self):", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0)", "('data', 'form_workflow') def test_basic(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "m1, m1f0 = factory.new_basic_module('m1', 'patient') m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1)", "then # later referenced. However since the session doesn't get", "{a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean = _replace_session_references_in_stack(children) clean_raw = [] for", "file_path = ('data', 'form_workflow') def test_basic(self): factory = AppFactory(build_version='2.9.0') m0,", "'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected = \"\"\"", "= WORKFLOW_FORM m2f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(),", "WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]),", "7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self): factory =", "= factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child')", "= [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def _build_workflow_app(self,", "for m in [1, 2]: module = app.get_module(m) module.put_in_root =", "self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "example ``case_id_A`` is being added to the session and then", "factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0 = factory.new_basic_module('m2', 'patient') m2f1 = factory.new_form(m2)", "app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def test_form_workflow_module(self): app =", "the full xpath which # that session variable references. #", "m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)", "factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2, m2f0 = factory.new_advanced_module('visit history', 'visit',", "'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected = \"\"\" <partial> <stack>", "form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0')", "m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'),", "dob) &lt; 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self):", "factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow", "id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/> </create> </stack> </partial>", "m0f0 = factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog')", "= 2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self): factory", "m1, m1f0 = factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links =", "'value'\" m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0',", "FormLink(xpath=\"(today() - dob) &lt; 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\")", "'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id),", "m1f0 = factory.new_basic_module('m1', 'patient') m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2,", "app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) for m in", "[ FormLink(xpath=\"(today() - dob) > 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(),", "= factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0 = factory.new_advanced_module('m4', 'patient')", "m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0',", "def test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) for m in [1, 2]:", "factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5, m5f0", "def test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child',", "between the source and target form correctly factory = AppFactory(build_version='2.9.0')", "previous variables with the full xpath which # that session", "factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient') m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1,", "m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'),", "'visit', parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id),", "factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1, m1f0 =", "m0, m0f0 = factory.new_basic_module('m0', 'frog') m1, m1f0 = factory.new_basic_module('m1', 'frog')", "m0f0 = factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_advanced_module('episode", "we match up the # session variables between the source", "updated # the value isn't available in the session. #", "_build_workflow_app(self, mode): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', '')", "= factory.new_form(m4) factory.form_requires_case(m4f2, case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5,", "test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def test_form_workflow_module(self): app", "to previous variables with the full xpath which # that", "def _build_workflow_app(self, mode): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0',", "factory.app def test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def", "= [ FormLink(xpath=\"a = 1\", form_id=m1f0.unique_id), FormLink(xpath=\"a = 2\", form_id=m1f1.unique_id)", "m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750", "test_basic(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') m1,", "= WORKFLOW_MODULE expected = \"\"\" <partial> <stack> <create> <command value=\"'m1'\"/>", "factory.new_basic_module('m0', '') factory.new_form(m0) m1, m1f0 = factory.new_basic_module('m1', 'patient') m1f1 =", "source and target form correctly factory = AppFactory(build_version='2.9.0') m0, m0f0", "new_c = \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0', ('a', session_var('new_a')),", "'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1 = factory.new_form(m1)", "m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob) &lt;", "'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links =", "m0, m0f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True)", "'frog') factory.form_requires_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow =", "'case_id') m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0) for module", "FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), \"./entry\") def _build_workflow_app(self, mode): factory", "= factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [", "factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow =", "'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1, m1f0 = factory.new_advanced_module('visit histroy',", "def test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase):", "session variable references. # # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0')", "module.get_forms(): form.post_form_workflow = mode return factory.app def test_form_workflow_previous(self): app =", "else: clean_raw.append((child.id, child.value)) new_c = \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [", "factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected = \"\"\" <partial>", "# the value isn't available in the session. # #", "</create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self): factory", "factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath='true()', form_id=m1f0.unique_id) ]", "parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1,", "for form in module.get_forms(): form.post_form_workflow = mode return factory.app def", "form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def", "m3, m3f0 = factory.new_basic_module('m3', 'child') m3f1 = factory.new_form(m3) factory.form_requires_case(m3f0, parent_case_type='patient')", "FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\") def test_return_to_parent_module(self): factory", "get updated # the value isn't available in the session.", "parent_module=m1) factory.form_requires_case(m2f0, 'child') # link to child -> edit child", "= \" \\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links = [ FormLink(xpath=\"true()\",", "self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "<datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # #", "is_subcase=True) m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child')", "] m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id = \" \\", "</create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory", "value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> # + <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in the", "= \"\"\" <partial> <stack> <create> <command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/>", "'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE expected = \"\"\"", "session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path = ('data', 'form_workflow') def test_basic(self):", "submodule we match up the # session variables between the", "factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0 = factory.new_basic_module('m2', 'patient') m2f1 =", "{c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean = _replace_session_references_in_stack(children) clean_raw = []", "factory.new_basic_module('m1', 'patient') m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0 =", "]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ]", "factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() -", "= WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(),", "m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0) factory.form_requires_case(m2f1) m3, m3f0 = factory.new_basic_module('m3', 'child')", "'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath='true()', form_id=m1f0.unique_id)", "is_subcase=True) m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child')", "= WORKFLOW_PARENT_MODULE expected = \"\"\" <partial> <stack> <create> <command value=\"'m1'\"/>", "m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0,", "+ <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in the above example", "value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(),", "\"./entry[1]\") def test_with_case_management_multiple_links(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0',", "not update the session after each datum # so items", "StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ] clean = _replace_session_references_in_stack(children) clean_raw", "form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0')", "<command value=\"'m2'\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def", "# in the above example ``case_id_A`` is being added to", "# # See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "<stack> <create> <command value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/>", "value=\"uuid()\"/> <command value=\"'m2'\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\")", "available later steps # # <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # -", "= \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0', ('a', session_var('new_a')), ('b',", "= factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"a", "test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def", "CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id, child.value)) new_c = \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a'))", "= 'value'\" m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"),", "] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[", "return factory.app def test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\")", "factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)", "and then # later referenced. However since the session doesn't", "form correctly factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('child visit',", "[ 'm0', ('a', session_var('new_a')), ('b', session_var('new_b')), ('c', new_c), ('d', \"if({c},", "factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links =", "clean: if isinstance(child, CommandId): clean_raw.append(child.id) else: clean_raw.append((child.id, child.value)) new_c =", "[ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(),", "factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0) for module in factory.app.get_modules(): for form", "full xpath which # that session variable references. # #", "# http://manage.dimagi.com/default.asp?236750 # # Stack create blocks do not update", "<datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\")", "factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1 = factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit',", "= self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children", "m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"),", "def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 # # Stack create blocks do", "import StackDatum from corehq.apps.app_manager.tests.app_factory import AppFactory from corehq.apps.app_manager.tests.util import TestXmlMixin", "import session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path = ('data', 'form_workflow') def", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit',", "WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\")", "test_form_workflow_module(self): app = self._build_workflow_app(WORKFLOW_MODULE) self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), \"./entry\") def test_form_workflow_module_in_root(self): app", "session in one step aren't available later steps # #", "m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow", "value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c},", "from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models import StackDatum from", "[ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow =", "<datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/> </create> </stack>", "= [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links =", "test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child')", "= factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_advanced_module('episode registration',", "xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0',", "app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self):", "child m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ]", "factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "later steps # # <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum", "dob) > 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self):", "factory.app.create_suite(), \"./entry[3]/stack\") def test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "[] for child in clean: if isinstance(child, CommandId): clean_raw.append(child.id) else:", "'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE expected = \"\"\" <partial> <stack>", "correctly factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('child visit', 'child')", "'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0') m1f0.post_form_workflow = WORKFLOW_FORM", "<reponame>kkrampa/commcare-hq from __future__ import absolute_import from __future__ import unicode_literals from", "= factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0 = factory.new_basic_module('m2', 'patient') m2f1", "mode return factory.app def test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(),", "def test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog')", "# + <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host\"/> # # in the above", "FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), \"./entry\")", "] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0,", "\"./entry[3]/stack\") def test_return_to_child_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0) m1, m1f0", "= factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0) for module in factory.app.get_modules(): for", "after each datum # so items put into the session", "FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"),", "2]: module = app.get_module(m) module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\")", "factory.form_requires_case(m4f0, case_type='patient') m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1,", "blocks do not update the session after each datum #", "added to the session and then # later referenced. However", "\"./entry[1]\") def test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0',", "m2f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), \"./entry[3]\") def", "visit', 'child') factory.form_requires_case(m1f0) factory.form_opens_case(m1f0, case_type='visit', is_subcase=True) m2, m2f0 = factory.new_advanced_module('visit", "self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\") class TestReplaceSessionRefs(SimpleTestCase): def test_replace_session_references_in_stack(self): children =", "parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links", "xpath which # that session variable references. # # See", "link to child -> edit child m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links", "import FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models", "FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback", "from __future__ import absolute_import from __future__ import unicode_literals from django.test", "= factory.new_form(m1) factory.form_requires_case(m1f1, 'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM", "m1f0 = factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0) m1, m1f0", "each datum # so items put into the session in", "'m0', ('a', session_var('new_a')), ('b', session_var('new_b')), ('c', new_c), ('d', \"if({c}, {c},", "case_type='patient') factory.form_requires_case(m4f2, case_type='patient') factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5, m5f0 = factory.new_basic_module('m5',", "parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE expected", "def test_replace_session_references_in_stack(self): children = [ CommandId('m0'), StackDatum(id='a', value=session_var('new_a')), StackDatum(id='b', value=session_var('new_b')),", "FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath = \"instance('casedb')/casedb/case[@case_id", "m0f0.form_links = [ FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\") def", "that when linking between two forms in a submodule we", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', '') factory.new_form(m0) m1,", "in one step aren't available later steps # # <datum", "FormLink(xpath=\"(today() - dob) > 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\")", "since the session doesn't get updated # the value isn't", "test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child')", "to child -> edit child m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links =", "parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[", "factory.form_requires_case(m3f1) m4, m4f0 = factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0, case_type='patient')", "= ('data', 'form_workflow') def test_basic(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "[ FormLink(xpath=\"true()\", form_id=m2f0.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link(self): factory", "'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [", "m0f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m0f0) factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1,", "m0f0.form_links = [ FormLink(xpath=\"(today() - dob) &lt; 7\", form_id=m1f0.unique_id) ]", "from corehq.apps.app_manager.tests.app_factory import AppFactory from corehq.apps.app_manager.tests.util import TestXmlMixin from corehq.apps.app_manager.xpath", "child -> edit child m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links = [", "See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('person registration',", "\"instance('casedb')/casedb/case[@case_id = \" \\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links = [", "value=\"'m2'\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected, factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self):", ") from corehq.apps.app_manager.models import FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack,", "WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath='true()', form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), \"./entry[1]\")", "test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_requires_case(m0f0)", "= factory.new_advanced_module('visit histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child')", "parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0 = factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient') factory.form_requires_case(m4f0,", "m in [1, 2]: module = app.get_module(m) module.put_in_root = True", "parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected", "= WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob) &lt; 7\",", "xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback", "= factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True)", "from __future__ import unicode_literals from django.test import SimpleTestCase from corehq.apps.app_manager.const", "\"./entry[1]\") def test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0',", "'episode') factory.form_requires_case(m1f0, case_type='person') factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True) m2, m2f0 =", "test_form_workflow_module_in_root(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) for m in [1, 2]: module", "[ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [", "xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self):", "factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow = WORKFLOW_MODULE expected = \"\"\" <partial>", "= WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob) > 7\",", "factory.form_requires_case(m1f1) m2, m2f0 = factory.new_basic_module('m2', 'patient') m2f1 = factory.new_form(m2) factory.form_requires_case(m2f0)", "('b', session_var('new_b')), ('c', new_c), ('d', \"if({c}, {c}, {a}]\".format(a=session_var('new_a'), c=new_c)) ])", "m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [", "available in the session. # # To fix this we", "m4f1 = factory.new_form(m4) factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') factory.form_requires_case(m4f1, case_type='patient') m4f2", "factory.form_requires_case(m3f0, parent_case_type='patient') factory.form_requires_case(m3f1) m4, m4f0 = factory.new_advanced_module('m4', 'patient') factory.form_requires_case(m4f0, case_type='patient')", "# # Stack create blocks do not update the session", "factory.new_basic_module('person registration', 'person') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_advanced_module('episode registration', 'episode')", "absolute_import from __future__ import unicode_literals from django.test import SimpleTestCase from", "form_id=m1f0.unique_id), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id),", "unicode_literals from django.test import SimpleTestCase from corehq.apps.app_manager.const import ( AUTO_SELECT_RAW,", "import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM, WORKFLOW_MODULE, WORKFLOW_PREVIOUS, WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, )", "'frog') factory.form_requires_case(m1f0) m1f1 = factory.new_form(m1) factory.form_opens_case(m1f1) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links", "= factory.new_basic_module('m1', 'patient') m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0", "m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE expected = \"\"\" <partial> <stack> <create> <command", "form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM condition_for_xpath", "factory.app.create_suite(), \"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1,", "corehq.apps.app_manager.xpath import session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path = ('data', 'form_workflow')", "a submodule we match up the # session variables between", "create blocks do not update the session after each datum", "-> edit child m2f0.post_form_workflow = WORKFLOW_FORM m2f0.form_links = [ FormLink(xpath=\"true()\",", "= True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT)", "True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'),", "form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]), ] m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links", "m0, m0f0 = factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0) m1, m1f0 =", "2\", form_id=m1f1.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), \"./entry[1]\") def test_link_to_child_module(self): factory =", "session and then # later referenced. However since the session", "parent_case_type='child') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id), ]", "to replace any references to previous variables with the full", "<datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/> </create> </stack> </partial> \"\"\" self.assertXmlPartialEqual(expected,", "StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'), c=session_var('c'))) ]", "'child') factory.form_requires_case(m1f1, 'visit', parent_case_type='child') m1f0.post_form_workflow = WORKFLOW_FORM m1f0.form_links = [", "= self._build_workflow_app(WORKFLOW_PREVIOUS) for m in [1, 2]: module = app.get_module(m)", "m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1',", "= app.get_module(m) module.put_in_root = True self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def test_form_workflow_root(self):", "factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback =", "= [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id, datums=[ FormDatum(name='case_id_load_episode_0', xpath=\"instance('commcaresession')/session/data/case_id_new_episode_0\") ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'),", "import _replace_session_references_in_stack, CommandId from corehq.apps.app_manager.suite_xml.xml_models import StackDatum from corehq.apps.app_manager.tests.app_factory import", "django.test import SimpleTestCase from corehq.apps.app_manager.const import ( AUTO_SELECT_RAW, AUTO_SELECT_CASE, WORKFLOW_FORM,", "\"./entry[3]/stack\") def test_link_to_form_in_parent_module(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll", "'') factory.new_form(m0) m1, m1f0 = factory.new_basic_module('m1', 'patient') m1f1 = factory.new_form(m1)", "] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_both_update(self): factory = AppFactory(build_version='2.9.0') m0,", "# <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/> # - <datum id=\"case_id_B\" value=\"instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host\"/> #", "factory.form_opens_case(m0f0, 'visit', is_subcase=True) m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0)", "AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', '') factory.new_form(m0) m1, m1f0 =", "child.value)) new_c = \"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('new_a')) self.assertEqual(clean_raw, [ 'm0', ('a',", "'frog') m1, m1f0 = factory.new_basic_module('m1', 'frog') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links", "condition_for_xpath = \"instance('casedb')/casedb/case[@case_id = \" \\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links", "[ FormLink(xpath=\"(today() - dob) &lt; 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(),", "= WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(),", "m1, m1f0 = factory.new_basic_module('child visit', 'child') factory.form_requires_case(m1f0) m2, m2f0 =", "app.create_suite(), \"./entry\") def test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), \"./entry\")", "WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f0.unique_id, datums=[ FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id_new_child_0\") ]),", "FormDatum(name='case_id', xpath=\"instance('commcaresession')/session/data/case_id\"), FormDatum(name='case_id_load_visit_0', xpath=\"instance('commcaresession')/session/data/case_id_new_visit_0\"), ]), ] self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def", "m2, m2f0 = factory.new_advanced_module('tests', 'episode') factory.form_requires_case(m2f0, 'episode') factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host',", "'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m1f1 =", "m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0,", "<datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> </create> </stack> </partial> \"\"\"", "factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id') m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0)", "= [] for child in clean: if isinstance(child, CommandId): clean_raw.append(child.id)", "'patient') m1f1 = factory.new_form(m1) factory.form_opens_case(m1f0) factory.form_requires_case(m1f1) m2, m2f0 = factory.new_basic_module('m2',", "factory.form_requires_case(m1f0) m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child')", "'frog') m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today() - dob)", "class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path = ('data', 'form_workflow') def test_basic(self): factory", "m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links =", "self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), \"./entry\") def test_form_workflow_root(self): app = self._build_workflow_app(WORKFLOW_ROOT) self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(),", "# session variables between the source and target form correctly", "for module in factory.app.get_modules(): for form in module.get_forms(): form.post_form_workflow =", "StackDatum(id='b', value=session_var('new_b')), StackDatum(id='c', value=\"instance('casedb')/case/[@case_id = {a}]/index/parent\".format(a=session_var('a'))), StackDatum(id='d', value=\"if({c}, {c}, {a}]\".format(a=session_var('a'),", "histroy', 'visit', parent_module=m0) factory.form_requires_case(m1f0, 'child') factory.form_requires_case(m1f0, 'visit', parent_case_type='child') m1f1 =", "WORKFLOW_ROOT, WORKFLOW_PARENT_MODULE, ) from corehq.apps.app_manager.models import FormDatum, FormLink from corehq.apps.app_manager.suite_xml.post_process.workflow", "WORKFLOW_MODULE self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'), factory.app.create_suite(), \"./entry\") m1f0.post_form_workflow_fallback = WORKFLOW_ROOT self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'), factory.app.create_suite(), \"./entry\")", "up the # session variables between the source and target", "registration', 'person') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_advanced_module('episode registration', 'episode') factory.form_requires_case(m1f0,", "m0f0 = factory.new_basic_module('m0', '') factory.new_form(m0) m1, m1f0 = factory.new_basic_module('m1', 'patient')", "to the session and then # later referenced. However since", "value=\"'m1'\"/> <datum id=\"case_id\" value=\"instance('commcaresession')/session/data/case_id\"/> <datum id=\"case_id_new_visit_0\" value=\"uuid()\"/> <command value=\"'m2'\"/> </create>", "history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') factory.form_requires_case(m2f0, 'visit', parent_case_type='child') m2f0.post_form_workflow =", "factory.app.create_suite(), \"./entry\") def test_reference_to_missing_session_variable_in_stack(self): # http://manage.dimagi.com/default.asp?236750 # # Stack create", "- dob) > 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), \"./entry[1]\") def", "def test_form_workflow_previous(self): app = self._build_workflow_app(WORKFLOW_PREVIOUS) self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), \"./entry\") def test_form_workflow_module(self):", "factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0) m0f0.post_form_workflow = WORKFLOW_FORM m0f0.form_links = [ FormLink(xpath=\"(today()", "factory.app.create_suite(), \"./entry[1]\") def test_with_case_management_create_update(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 =", "http://manage.dimagi.com/default.asp?236750 # # Stack create blocks do not update the", "self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), \"./entry\") def test_manual_form_link_with_fallback(self): factory = AppFactory(build_version='2.9.0') m0, m0f0", "import TestXmlMixin from corehq.apps.app_manager.xpath import session_var class TestFormWorkflow(SimpleTestCase, TestXmlMixin): file_path", "factory.new_advanced_module('visit history', 'visit', parent_module=m1) factory.form_requires_case(m2f0, 'child') # link to child", "step aren't available later steps # # <datum id=\"case_id_A\" value=\"instance('commcaresession')/session/data/case_id_new_A\"/>", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('enroll child', 'child') factory.form_opens_case(m0f0) m1,", "= factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0 = factory.new_basic_module('m1', 'frog') factory.form_requires_case(m1f0)", "def test_basic(self): factory = AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog')", "= AppFactory(build_version='2.9.0') m0, m0f0 = factory.new_basic_module('m0', 'frog') factory.form_opens_case(m0f0) m1, m1f0", "\" \\ \"instance('commcaresession')/session/data/case_id]/prop = 'value'\" m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m2f0.unique_id,", "AUTO_SELECT_RAW, 'case_id') m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1) factory.form_requires_case(m5f0) for", "= [ FormLink(xpath=\"(today() - dob) > 7\", form_id=m1f0.unique_id) ] self.assertXmlPartialEqual(self.get_xml('form_link_update_case'),", "= WORKFLOW_FORM m1f0.form_links = [ FormLink(xpath=\"true()\", form_id=m1f1.unique_id), ] self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite()," ]
[ "= True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a TF1 model with", "-> framework_ops.Tensor: \"\"\"Generates a random tensor based on the data", "tensors. Args: inputs: Mapping from names to input ndarrays in", "2.0 (the \"License\"); # you may not use this file", "): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params = trt_convert_params self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting", "from typing import Callable, Iterable, List, Mapping, Optional, Sequence, Union", "-> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self) ->", "inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult:", "output has any TensorRT engines.\"\"\" def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if", "before = time.time() outputs = self.graph_func(*inputs) latency.append(time.time() - before) except", "Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping from names to input tensors.\"\"\" @abc.abstractmethod", "ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler class.", "from tensorflow.python.ops import random_ops from tensorflow.python.saved_model import load as saved_model_load", "return self._trt_convert_params def save(self, output_saved_model_dir: Optional[str] = None, overwrite=True) ->", "ModelConfig): self._model_config = model_config def __str__(self) -> str: return str(self._model_config)", "model with TensorRT and runs the converted model.\"\"\" def _create_converter(self,", "minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def run(self,", "\"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod @abc.abstractmethod def model_handler_cls(cls): \"\"\"The modle handler", "in range(benchmark_iterations): before = time.time() outputs = self.graph_func(*inputs) latency.append(time.time() -", "of tensors in TF2. If `None`, ramdomly generated input tensors", "= saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names = [ tensor.name.split(\":\")[0]", "\"config\"])): def __new__(cls, config: ModelConfig, results: Sequence[TestResult] = tuple()): return", "signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int = 1): return super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags,", "ModelHandlers for aggregrated testing/benchmarking in TF2.\"\"\" model_handler_cls = ModelHandlerV2 trt_model_handler_cls", "repeatedly loaded for different TensorRT conversion settings. # Using cache", "provided or randomly generated input tensors. Args: inputs: Mapping from", "Mapping, Optional, Sequence, Union from absl import logging import numpy", "from tensorflow.python.saved_model import load as saved_model_load from tensorflow.python.saved_model import loader", "tensorflow.python.saved_model import load as saved_model_load from tensorflow.python.saved_model import loader as", "in self.graph_func.inputs] @property def output_tensor_names(self): return [tensor.name for tensor in", "License for the specific language governing permissions and # limitations", "for running a model.\"\"\" def __init__(self, model_config: ModelConfig): self._model_config =", "dynamic dimensions.\"\"\" if tensor_shape.unknown_rank: raise ValueError(\"Cannot generates random tensors for", "Sequence[str]: return [info.name for info in self.input_tensor_info.values()] @property def output_tensor_names(self)", "model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names return trt.TrtGraphConverter(", "Reserved. # # Licensed under the Apache License, Version 2.0", "batch_size if any(filter(lambda x: x < 0, shape)): raise ValueError(\"Cannot", "from tensorflow.python.framework import dtypes as tf_dtypes from tensorflow.python.framework import importer", "10, benchmark_iterations: int = 100) -> TestResultCollection: \"\"\"Runs model inference", "reduce I/O. @functools.lru_cache() def load_meta_graph( saved_model_dir: str, saved_model_tags: str, saved_model_signature_key:", "for the corresponding TF version.\"\"\" @abc.abstractmethod def _check_conversion(self, conversion_output): \"\"\"Checks", "batch_size: Optional[int] = None ) -> Mapping[str, np.ndarray]: batch_size =", "= super(_TrtModelHandlerBase, self).__str__() return \"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property def", "@abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def", "import dtypes as tf_dtypes from tensorflow.python.framework import importer from tensorflow.python.framework", "@classmethod @abc.abstractmethod def model_handler_cls(cls): \"\"\"The modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property", "self.graph_func(*inputs) latency = [] for _ in range(benchmark_iterations): before =", "= None) -> framework_ops.Tensor: \"\"\"Generates a random tensor based on", "to input ndarrays in TF1. Or a sequence of tensors", "self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1):", "input tensors. Args: inputs: Mapping from names to input ndarrays", "Sequence[str]: \"\"\"Names of input tensors.\"\"\" @property def output_tensor_names(self) -> Sequence[str]:", "TensorRT!\") test_result = ModelHandlerV2.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return", "up the runtime. benchmark_iterations: Number of inferences to measure the", "for different TensorRT conversion settings. # Using cache can reduce", "self).__str__() return \"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property def trt_convert_params(self) ->", "TF1.\"\"\" model_handler_cls = ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages", "tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def,", "[tensor.name for tensor in self.graph_func.outputs] def generate_random_inputs(self, batch_size: Optional[int] =", "outputs = self.graph_func(*inputs) latency.append(time.time() - before) except Exception as exc:", "TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])): def __new__(cls, outputs: Mapping[str, np.ndarray],", "__new__(cls, outputs: Mapping[str, np.ndarray], latency: List[float], trt_convert_params: trt.TrtConversionParams = None):", "@property def graph_func(self): graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return", "self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved = False @abc.abstractmethod def", "__repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property def model_config(self) ->", "`None`, ramdomly generated inputs will be used instead. warmup_iterations: Number", "def _create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a converter for the corresponding", "[] for _ in range(benchmark_iterations): before = time.time() outputs =", "Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config) self._trt_models = [] for trt_convert_params", "OF ANY KIND, either express or implied. # See the", "export_dir=saved_model_dir, tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes", "See the License for the specific language governing permissions and", "tensor cannot have a rank of 0!\") if shape[0] <", "summarizing timing and numerics information. \"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a", "time.time() outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() - before) except Exception", "def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod @abc.abstractmethod", "to in writing, software # distributed under the License is", "saved_model_signature_key: str): \"\"\"Loads a graph function in TF2.\"\"\" imported =", "testing/benchmarking.\"\"\" def __init__( self, model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams],", "input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op,", "benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV1.run(", "sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency = [] for _ in range(benchmark_iterations): before", "x < 0, shape)): raise ValueError(\"Cannot have dynamic dimensions except", "ModelConfig: return self._model_config @property def input_tensort_names(self) -> Sequence[str]: \"\"\"Names of", "dimensions.\"\"\" if tensor_shape.unknown_rank: raise ValueError(\"Cannot generates random tensors for unknown", "or agreed to in writing, software # distributed under the", "-> str: return \"Input Model: {}\".format(str(self._ori_model)) def __repr__(self) -> str:", "\"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model in TF1.\"\"\" @property def", "benchmark_iterations: int = 100) -> TestResultCollection: \"\"\"Runs model inference with", "warmup_iterations, benchmark_iterations) for model in [self._ori_model] + self._trt_models ] return", "ModelHandlerV2): \"\"\"Converts a TF2 model with TensorRT and runs the", "test_result = ModelHandlerV1.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params)", "Mapping[str, np.ndarray], latency: List[float], trt_convert_params: trt.TrtConversionParams = None): return super(TestResult,", "cache can reduce I/O. @functools.lru_cache() def load_meta_graph( saved_model_dir: str, saved_model_tags:", "base = super(_TrtModelHandlerBase, self).__str__() return \"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property", "Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self) -> Mapping[str,", "( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache() def", "import numpy as np from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "= _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name) # Models", "Optional[str] = None, overwrite=True) -> None: \"\"\"Saves a TensorRT converted", "@property def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property", "ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config) self._trt_models", "def load_meta_graph( saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef:", "self._model_config = model_config def __str__(self) -> str: return str(self._model_config) def", "-> None: \"\"\"Saves a TensorRT converted model.\"\"\" if self._conversion_is_saved and", "= 10, benchmark_iterations: int = 100) -> TestResultCollection: \"\"\"Runs model", "not use this file except in compliance with the License.", "load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self): return", "tensor in self.graph_func.outputs] def generate_random_inputs(self, batch_size: Optional[int] = None )", "load_graph_func(saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str): \"\"\"Loads a graph function", "return self._model_config @property def input_tensort_names(self) -> Sequence[str]: \"\"\"Names of input", "as framework_ops from tensorflow.python.ops import random_ops from tensorflow.python.saved_model import load", ") _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs: Optional[Mapping[str, np.ndarray]] =", "return \"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod @abc.abstractmethod def model_handler_cls(cls): \"\"\"The modle", "maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs: Optional[Mapping[str,", "you may not use this file except in compliance with", "with TensorRT!\") test_result = ModelHandlerV2.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True)", "outputs: Mapping[str, np.ndarray], latency: List[float], trt_convert_params: trt.TrtConversionParams = None): return", "= [dim.size for dim in tensor_shape.dim] if not shape: raise", "for different TensorRT conversion settings. \"\"\" inputs = inputs or", "`TestResult` summarizing timing and numerics information. \"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs", "None): return self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None, warmup_iterations: int = 10,", "import config_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.python.client import session", "### Test Classes class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])): def", "saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a TF1", "TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for aggregrated", "1, \"GPU\": 0}) with session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def) try: for", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "from tensorflow.python.client import session from tensorflow.python.compiler.tensorrt import trt_convert as trt", "convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir:", "\"\"\"Generates a random tensor based on the data type and", "-> TestResult: inputs = inputs or self.generate_random_inputs() try: device =", "in TF2. If `None`, ramdomly generated inputs will be used", "[ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations for test models.\"\"\"", "TestResult: \"\"\"Runs the model with provided or randomly generated input", "run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations: int = 100,", "from names to input ndarrays in TF1, or a sequence", "TensorRT! \" \"Model Information: {}\".format(str(self))) def __str__(self) -> str: base", "import tag_constants # pylint: disable=bad-whitespace ### Helper Functions def _get_concrete_tensor_shape(", "Args: inputs: Mapping from names to input ndarrays in TF1.", "__new__(cls, config: ModelConfig, results: Sequence[TestResult] = tuple()): return super(TestResultCollection, cls).__new__(cls,", "meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef` in TF1.\"\"\" with session.Session() as sess:", "sess: meta_graph = saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names =", "tensorflow.python.framework import importer from tensorflow.python.framework import ops as framework_ops from", "rank!\") shape = [dim.size for dim in tensor_shape.dim] if not", "TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV2.run( self, inputs,", "batch_size or self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor, batch_size) for tensor in", "in TF2. If `None`, ramdomly generated input tensors will be", "None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs = inputs or", "_ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency = [] for _", "= self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size,", "str(self)) @property @classmethod @abc.abstractmethod def model_handler_cls(cls): \"\"\"The modle handler class.", "TestResultCollection: \"\"\"Runs model inference with provided or randomly generated input", "super(_TrtModelHandlerBase, self).__str__() return \"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property def trt_convert_params(self)", "GPU or not. Returns: `TestResult` summarizing timing and numerics information.", "logging import numpy as np from tensorflow.core.framework import graph_pb2 from", "inputs else None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model in TF2.\"\"\"", "to convert to TensorRT! \" \"Model Information: {}\".format(str(self))) def __str__(self)", "int = 10, benchmark_iterations: int = 100, allow_to_use_gpu: bool =", "model in TF2.\"\"\" @property def graph_func(self): graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir,", "as saved_model_load from tensorflow.python.saved_model import loader as saved_model_loader from tensorflow.python.saved_model", "Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config) self._trt_models = [] for trt_convert_params in", "batch_size: Optional[int] = None) -> framework_ops.Tensor: \"\"\"Generates a random tensor", "def output_tensor_names(self) -> Sequence[str]: \"\"\"Names of output tensors.\"\"\" @abc.abstractmethod def", "rank of 0!\") if shape[0] < 0: if batch_size is", "to input ndarrays in TF1, or a sequence of tensors", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "\"\"\"Names of input tensors.\"\"\" @property def output_tensor_names(self) -> Sequence[str]: \"\"\"Names", "timing and numerics information. \"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model", "\"\"\"The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self): return self._ori_model.model_config", "-> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV2.run( self,", "None ) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping from names", "converted model.\"\"\" if self._conversion_is_saved and not overwrite: return output_saved_model_dir =", "the data type and tensor shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size)", "Optional, Sequence, Union from absl import logging import numpy as", "the latency. allow_to_use_gpu: Whether it is allowed to use GPU", "in self.input_tensor_info.values() } def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None,", "[ _generate_random_tensor_v2(tensor, batch_size) for tensor in self.graph_func.inputs ] def run(self,", "input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self, graph_func): graph_def = graph_func.graph.as_graph_def()", "str(self)) @property def model_config(self) -> ModelConfig: return self._model_config @property def", "{}\".format(str(self))) def __str__(self) -> str: base = super(_TrtModelHandlerBase, self).__str__() return", "file except in compliance with the License. # You may", "disable=bad-whitespace ### Helper Functions def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int]", "from tensorflow.python.saved_model import tag_constants # pylint: disable=bad-whitespace ### Helper Functions", "batch_size: Optional[int] = None) -> Sequence[int]: \"\"\"Gets a concrete tensor", "aggregrated testing/benchmarking in TF1.\"\"\" model_handler_cls = ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1", "trt.TrtConversionParams = None): return super(TestResult, cls).__new__(cls, outputs, latency, trt_convert_params) class", "from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.python.client import session from tensorflow.python.compiler.tensorrt", "overwrite: return output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving TensorRT model", "numerics information for different TensorRT conversion settings. \"\"\" inputs =", "return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for", "shape = [dim.size for dim in tensor_shape.dim] if not shape:", "and tensor shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform( shape=shape,", "\"\"\"Converts a TF2 model with TensorRT and runs the converted", "handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle", "type and tensor shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform(", "# ============================================================================== \"\"\"Loads, converts, and runs sample models.\"\"\" import abc", "TF2.\"\"\" @property def graph_func(self): graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key)", "tensor in self.graph_func.inputs] @property def output_tensor_names(self): return [tensor.name for tensor", "= (tag_constants.SERVING,), saved_model_signature_key: str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int =", "+ self._trt_models ] return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series", "if allow_to_use_gpu else \"/device:cpu:0\" with framework_ops.device(device): for _ in range(warmup_iterations):", "input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self, graph_func): graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def)", "of output tensors.\"\"\" @abc.abstractmethod def generate_random_inputs( self, batch_size: Optional[int] =", "self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations: int", "str, saved_model_tags: Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key: str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY),", "of 0!\") if shape[0] < 0: if batch_size is None", "= None ) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping from", "KIND, either express or implied. # See the License for", "`TestResultCollection` summarizing timing and numerics information for different TensorRT conversion", "-> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self) ->", "dynamic dimensions except for batch size!\") return shape def _generate_random_tensor_v1(tensor_info:", "= trt_convert_params self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved", "inputs=None, warmup_iterations: int = 10, benchmark_iterations: int = 100) ->", "TensorRT model to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir)", "TensorRT conversion settings. \"\"\" inputs = inputs or self.generate_random_inputs() results", "= False @abc.abstractmethod def _create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a converter", "if \"TRTEngineOp\" not in [node.op for node in graph_def.node]: raise", "(the \"License\"); # you may not use this file except", "{}\".format(base, str(self._trt_convert_params)) @property def trt_convert_params(self) -> trt.TrtConversionParams: return self._trt_convert_params def", "size!\") shape[0] = batch_size if any(filter(lambda x: x < 0,", "-> Sequence[framework_ops.Tensor]: batch_size = batch_size or self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor,", "feed_dict=inputs) latency = [] for _ in range(benchmark_iterations): before =", "_generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size: Optional[int] = None) -> framework_ops.Tensor: \"\"\"Generates", "= time.time() outputs = self.graph_func(*inputs) latency.append(time.time() - before) except Exception", "\"\"\"Converts a TF1 model with TensorRT and runs the converted", "Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key: str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int", "return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self): return [tensor.name for tensor in", "inputs or self.generate_random_inputs() results = [ model.run(inputs, warmup_iterations, benchmark_iterations) for", "ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series of", "have dynamic dimensions except for batch size!\") return shape def", "sess: importer.import_graph_def(self.meta_graph.graph_def) try: for _ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency", "random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name) # Models are repeatedly loaded for", "outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() - before) except Exception as", "# # Unless required by applicable law or agreed to", "= sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() - before) except Exception as exc:", "# Models are repeatedly loaded for different TensorRT conversion settings.", "TestResult(latency=latency, outputs=outputs if inputs else None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a", "] graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return", "models.\"\"\" def __new__(cls, saved_model_dir: str, saved_model_tags: Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key:", "and running a model.\"\"\" def __init__( self, model_config: ModelConfig, trt_convert_params:", "tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf import", "-> Sequence[str]: \"\"\"Names of output tensors.\"\"\" @abc.abstractmethod def generate_random_inputs( self,", "TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property def trt_convert_params(self) -> trt.TrtConversionParams: return self._trt_convert_params", "logging.info(\"Saving TensorRT model to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace(", "tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session(): return random_ops.random_uniform( shape=shape,", "model to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved", "[ model.run(inputs, warmup_iterations, benchmark_iterations) for model in [self._ori_model] + self._trt_models", "implied. # See the License for the specific language governing", "for model in [self._ori_model] + self._trt_models ] return self._result_collection._replace(results=results) class", "loader as saved_model_loader from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import", "def __str__(self) -> str: return str(self._model_config) def __repr__(self) -> str:", "TF1, or a sequence of tensors in TF2. If `None`,", "Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running", "different TensorRT conversion settings. \"\"\" inputs = inputs or self.generate_random_inputs()", "load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:", "ramdomly generated input tensors will be used instead. warmup_iterations: Number", "a graph function in TF2.\"\"\" imported = saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags)", "-> Sequence[str]: \"\"\"Names of input tensors.\"\"\" @property def output_tensor_names(self) ->", "if tensor_shape.unknown_rank: raise ValueError(\"Cannot generates random tensors for unknown rank!\")", "import random_ops from tensorflow.python.saved_model import load as saved_model_load from tensorflow.python.saved_model", "sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir: str,", "permissions and # limitations under the License. # ============================================================================== \"\"\"Loads,", "0}) with session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def) try: for _ in", "as saved_model_loader from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import tag_constants", "in TF1.\"\"\" model_handler_cls = ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase):", "in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names))", "and runs sample models.\"\"\" import abc import collections import functools", "model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config)", "logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV2.run( self, inputs, warmup_iterations, benchmark_iterations,", "with framework_ops.device(device): for _ in range(warmup_iterations): self.graph_func(*inputs) latency = []", "for unknown rank!\") shape = [dim.size for dim in tensor_shape.dim]", "graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100)", "__init__( self, model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model", "benchmark_iterations) for model in [self._ori_model] + self._trt_models ] return self._result_collection._replace(results=results)", "Unless required by applicable law or agreed to in writing,", "_ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for running a model.\"\"\" def __init__(self, model_config:", "benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series of", "logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV1.run( self, inputs, warmup_iterations, benchmark_iterations,", "the specific language governing permissions and # limitations under the", "or a sequence of tensors in TF2. If `None`, ramdomly", "sequence of tensors in TF2. If `None`, ramdomly generated input", "None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for converting and running a", "unknown rank!\") shape = [dim.size for dim in tensor_shape.dim] if", "name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size: Optional[int] = None) ->", "to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved = False @abc.abstractmethod def _create_converter(self, trt_convert_params:", "saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self): return [tensor.name for tensor", "def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] = None) -> Sequence[int]:", "of input tensors.\"\"\" @property def output_tensor_names(self) -> Sequence[str]: \"\"\"Names of", "a TensorRT converted model.\"\"\" if self._conversion_is_saved and not overwrite: return", "} def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100,", "= None if not allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\":", "_check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None,", "# limitations under the License. # ============================================================================== \"\"\"Loads, converts, and", "meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def)", "or self.generate_random_inputs() results = [ model.run(inputs, warmup_iterations, benchmark_iterations) for model", "ModelHandlerV1.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase,", "inputs will be used instead. warmup_iterations: Number of inferences to", "warm up the runtime. benchmark_iterations: Number of inferences to measure", "and numerics information for different TensorRT conversion settings. \"\"\" inputs", "default_batch_size: int = 1): return super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key,", "tempfile.mkdtemp() logging.info(\"Saving TensorRT model to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config =", "a concrete tensor shape without dynamic dimensions.\"\"\" if tensor_shape.unknown_rank: raise", "= graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10,", "_ in range(benchmark_iterations): before = time.time() outputs = self.graph_func(*inputs) latency.append(time.time()", "shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name)", "trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection = TestResultCollection( results=[], config=model_config) def __str__(self) ->", "latency = [] for _ in range(benchmark_iterations): before = time.time()", "============================================================================== \"\"\"Loads, converts, and runs sample models.\"\"\" import abc import", "dimensions except for batch size!\") return shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo,", "%s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True", "collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])): def __new__(cls, outputs: Mapping[str, np.ndarray], latency:", "saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test", "allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series of ModelHandlers", "generated input tensors. Args: inputs: Mapping from names to input", "TF1.\"\"\" @property def meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags,", "sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names = [ tensor.name.split(\":\")[0] for tensor", "a model in TF1.\"\"\" @property def meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return", "Optional[int] = None ) -> Sequence[framework_ops.Tensor]: batch_size = batch_size or", "str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int = 1): return super(ModelConfig,", "self.graph_func.inputs] @property def output_tensor_names(self): return [tensor.name for tensor in self.graph_func.outputs]", "TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def __new__(cls, config: ModelConfig, results: Sequence[TestResult]", "ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations for", "\"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations for test models.\"\"\" def __new__(cls,", "-> np.ndarray: \"\"\"Generates a random tensor based on the data", "the data type and tensor shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape", "x: x < 0, shape)): raise ValueError(\"Cannot have dynamic dimensions", "[ tensor.name.split(\":\")[0] for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def = (", "{}\".format(str(self._ori_model)) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod", "int = 100) -> TestResultCollection: \"\"\"Runs model inference with provided", "import session from tensorflow.python.compiler.tensorrt import trt_convert as trt from tensorflow.python.framework", "from tensorflow.python.framework import ops as framework_ops from tensorflow.python.ops import random_ops", "tensor_shape_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from", "warmup_iterations: Number of inferences to warm up the runtime. benchmark_iterations:", "Callable, Iterable, List, Mapping, Optional, Sequence, Union from absl import", "handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self): return self._ori_model.model_config def generate_random_inputs(self,", "= imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes class TestResult( collections.namedtuple(\"TestResult\",", "and tensor shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size)", "trt_model_handler_cls = TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers", "function in TF2.\"\"\" imported = saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func =", "self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs", "= None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\")", "generate_random_inputs( self, batch_size: Optional[int] = None ) -> Mapping[str, Union[np.ndarray,", "def trt_convert_params(self) -> trt.TrtConversionParams: return self._trt_convert_params def save(self, output_saved_model_dir: Optional[str]", "self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, )", "to measure the latency. allow_to_use_gpu: Whether it is allowed to", "graph_func(self): graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property", "the License. # ============================================================================== \"\"\"Loads, converts, and runs sample models.\"\"\"", "\"\"\"Base class for converting and running a model.\"\"\" def __init__(", "Optional[int] = None): return self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None, warmup_iterations: int", "If `None`, ramdomly generated input tensors will be used instead.", "allow_to_use_gpu=False) -> TestResult: inputs = inputs or self.generate_random_inputs() try: device", "tensor.name.split(\":\")[0] for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph(", "generate_random_inputs(self, batch_size: Optional[int] = None ) -> Mapping[str, np.ndarray]: batch_size", "if inputs else None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for converting", "\"trt_convert_params\"])): def __new__(cls, outputs: Mapping[str, np.ndarray], latency: List[float], trt_convert_params: trt.TrtConversionParams", "_check_conversion(self, graph_func): graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self, inputs: Optional[Sequence[framework_ops.Tensor]]", "= [] for trt_convert_params in trt_convert_params_updater( default_trt_convert_params): trt_model = self.trt_model_handler_cls(", "tags=saved_model_tags, ) output_node_names = [ tensor.name.split(\":\")[0] for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values()", "TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self): return self._ori_model.model_config def generate_random_inputs(self, batch_size: Optional[int]", "input_tensort_names(self) -> Sequence[str]: return [info.name for info in self.input_tensor_info.values()] @property", "_get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session(): return random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def", "You may obtain a copy of the License at #", "_check_conversion(self, conversion_output): \"\"\"Checks if conversion output has any TensorRT engines.\"\"\"", "aggregrated testing/benchmarking.\"\"\" def __init__( self, model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater:", "TestResult(latency=latency, outputs=outputs if inputs else None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class", "shape: raise ValueError(\"The tensor cannot have a rank of 0!\")", "Or a sequence of tensors in TF2. If `None`, ramdomly", "range(warmup_iterations): self.graph_func(*inputs) latency = [] for _ in range(benchmark_iterations): before", "ValueError(\"Cannot have dynamic dimensions except for batch size!\") return shape", "= inputs or self.generate_random_inputs() results = [ model.run(inputs, warmup_iterations, benchmark_iterations)", "return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return", "try: device = \"/device:gpu:0\" if allow_to_use_gpu else \"/device:cpu:0\" with framework_ops.device(device):", "for info in self.output_tensor_info.values()] def generate_random_inputs(self, batch_size: Optional[int] = None", "def __new__(cls, saved_model_dir: str, saved_model_tags: Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key: str", "batch size!\") return shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int] =", "runtime. benchmark_iterations: Number of inferences to measure the latency. allow_to_use_gpu:", "self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self) -> Sequence[str]: return [info.name for", "inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.protobuf", "if any(filter(lambda x: x < 0, shape)): raise ValueError(\"Cannot have", "return random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size:", "and not overwrite: return output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving", "for _ in range(warmup_iterations): self.graph_func(*inputs) latency = [] for _", "before = time.time() outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() - before)", "results = [ model.run(inputs, warmup_iterations, benchmark_iterations) for model in [self._ori_model]", "any TensorRT engines.\"\"\" def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\" not", "self._check_contains_trt_engine(graph_def) def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100) ->", "str, saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef` in TF1.\"\"\"", "self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[", "str: return \"Input Model: {}\".format(str(self._ori_model)) def __repr__(self) -> str: return", "0: raise ValueError(\"Must provide a valid batch size \" \"as", "None) -> np.ndarray: \"\"\"Generates a random tensor based on the", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "tensors will be used instead. warmup_iterations: Number of inferences to", "License. # You may obtain a copy of the License", "input tensors.\"\"\" @property def output_tensor_names(self) -> Sequence[str]: \"\"\"Names of output", "as exc: raise RuntimeError(\"Failed to run model inference! \" \"Model", "exc outputs = dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency, outputs=outputs if inputs", "version.\"\"\" @abc.abstractmethod def _check_conversion(self, conversion_output): \"\"\"Checks if conversion output has", "data type and tensor shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return", "model.\"\"\" def __init__( self, model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase,", "TF2 model with TensorRT and runs the converted model.\"\"\" def", "ModelHandlers for aggregrated testing/benchmarking in TF1.\"\"\" model_handler_cls = ModelHandlerV1 trt_model_handler_cls", "overwrite=True) -> None: \"\"\"Saves a TensorRT converted model.\"\"\" if self._conversion_is_saved", "governing permissions and # limitations under the License. # ==============================================================================", "TensorFlow Authors. All Rights Reserved. # # Licensed under the", "class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model in TF1.\"\"\" @property def meta_graph(self)", "# pylint: disable=bad-whitespace ### Helper Functions def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto,", "= ModelHandlerV2.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class", "@classmethod @abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property", "allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0}) with session.Session(config=config_proto) as", "2020 The TensorFlow Authors. All Rights Reserved. # # Licensed", "type and tensor shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape,", "on the data type and tensor shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(),", "cls).__new__(cls, config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for running a", "output_node_names = [ tensor.name.split(\":\")[0] for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def", "def output_tensor_names(self) -> Sequence[str]: return [info.name for info in self.output_tensor_info.values()]", "self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved = False", "self, model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model =", "] def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False)", "default_trt_convert_params): trt_model = self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection = TestResultCollection(", "return TestResult(latency=latency, outputs=outputs if inputs else None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs", "Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs =", "Mapping from names to input ndarrays in TF1. Or a", "benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs = inputs or self.generate_random_inputs() config_proto", "have a rank of 0!\") if shape[0] < 0: if", "inferences to measure the latency. allow_to_use_gpu: Whether it is allowed", "\"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations for test models.\"\"\" def", "outputs=outputs if inputs else None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model", "shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name) #", "self._ori_model.model_config def generate_random_inputs(self, batch_size: Optional[int] = None): return self._ori_model.generate_random_inputs(batch_size) def", "use_calibration=trt_convert_params.use_calibration, ) _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs: Optional[Mapping[str, np.ndarray]]", "from tensorflow.python.framework import convert_to_constants from tensorflow.python.framework import dtypes as tf_dtypes", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "input tensors.\"\"\" @abc.abstractmethod def run(self, inputs=None, warmup_iterations: int = 10,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "model.\"\"\" if self._conversion_is_saved and not overwrite: return output_saved_model_dir = output_saved_model_dir", "return meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str):", "language governing permissions and # limitations under the License. #", "convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self): return [tensor.name for tensor in self.graph_func.inputs]", "required by applicable law or agreed to in writing, software", "graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\" not in [node.op for node in", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "generates random tensors for unknown rank!\") shape = [dim.size for", "] return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers", "\"{}({})\".format(self.__class__.__name__, str(self)) @property def model_config(self) -> ModelConfig: return self._model_config @property", "License. # ============================================================================== \"\"\"Loads, converts, and runs sample models.\"\"\" import", "running a model.\"\"\" def __init__( self, model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams,", "agreed to in writing, software # distributed under the License", "meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:", "( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int = 1): return super(ModelConfig, cls).__new__(cls, saved_model_dir,", "distributed under the License is distributed on an \"AS IS\"", "dim in tensor_shape.dim] if not shape: raise ValueError(\"The tensor cannot", "batch_size: Optional[int] = None ) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates", "warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs = inputs or self.generate_random_inputs()", "Mapping[str, np.ndarray]: batch_size = batch_size or self.model_config.default_batch_size return { tensor_info.name:", "is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs:", "conversion settings. # Using cache can reduce I/O. @functools.lru_cache() def", "from tensorflow.python.saved_model import loader as saved_model_loader from tensorflow.python.saved_model import signature_constants", "return [tensor.name for tensor in self.graph_func.inputs] @property def output_tensor_names(self): return", "trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key),", "\"\"\"Names of output tensors.\"\"\" @abc.abstractmethod def generate_random_inputs( self, batch_size: Optional[int]", "benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs = inputs or self.generate_random_inputs() try:", "__init__(self, model_config: ModelConfig): self._model_config = model_config def __str__(self) -> str:", "self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a", "def _generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size: Optional[int] = None) -> framework_ops.Tensor:", "output_tensor_names(self): return [tensor.name for tensor in self.graph_func.outputs] def generate_random_inputs(self, batch_size:", "meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str): \"\"\"Loads", "def model_config(self): return self._ori_model.model_config def generate_random_inputs(self, batch_size: Optional[int] = None):", "-> TestResultCollection: \"\"\"Runs model inference with provided or randomly generated", "meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir: str, saved_model_tags:", "self._trt_convert_params = trt_convert_params self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert())", "range(benchmark_iterations): before = time.time() outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() -", "not shape: raise ValueError(\"The tensor cannot have a rank of", "class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def __new__(cls, config: ModelConfig, results:", "for dim in tensor_shape.dim] if not shape: raise ValueError(\"The tensor", "input_tensort_names(self) -> Sequence[str]: \"\"\"Names of input tensors.\"\"\" @property def output_tensor_names(self)", "\"\"\"Checks if conversion output has any TensorRT engines.\"\"\" def _check_contains_trt_engine(self,", "_generate_random_tensor_v2(tensor, batch_size) for tensor in self.graph_func.inputs ] def run(self, inputs:", "is None or batch_size <= 0: raise ValueError(\"Must provide a", "not in [node.op for node in graph_def.node]: raise RuntimeError(\"Failed to", "default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config) self._trt_models =", "= ModelHandlerV1.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class", "class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for converting and running a model.\"\"\"", "converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names return", "saved_model_tags: Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key: str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size:", "data type and tensor shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape =", "@abc.abstractmethod def _check_conversion(self, conversion_output): \"\"\"Checks if conversion output has any", "Iterable, List, Mapping, Optional, Sequence, Union from absl import logging", "OR CONDITIONS OF ANY KIND, either express or implied. #", "True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a TF1 model with TensorRT", "the License is distributed on an \"AS IS\" BASIS, #", "\" \"Model Information: {}\".format(str(self))) def __str__(self) -> str: base =", "str, saved_model_signature_key: str): \"\"\"Loads a graph function in TF2.\"\"\" imported", "self.model_config.default_batch_size return { tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for tensor_info in self.input_tensor_info.values()", "Sequence[str]: return [info.name for info in self.output_tensor_info.values()] def generate_random_inputs(self, batch_size:", "input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def output_tensor_info(self)", "<= 0: raise ValueError(\"Must provide a valid batch size \"", "numerics information. \"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model in TF1.\"\"\"", "trt.TrtConversionParams: return self._trt_convert_params def save(self, output_saved_model_dir: Optional[str] = None, overwrite=True)", "run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False)", "None ) -> Sequence[framework_ops.Tensor]: batch_size = batch_size or self.model_config.default_batch_size return", "law or agreed to in writing, software # distributed under", "cannot have a rank of 0!\") if shape[0] < 0:", "Optional[int] = None ) -> Mapping[str, np.ndarray]: batch_size = batch_size", "meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir: str, saved_model_tags: str, saved_model_signature_key:", "graph function in TF2.\"\"\" imported = saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func", "def __init__(self, model_config: ModelConfig): self._model_config = model_config def __str__(self) ->", "class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self): return self._ori_model.model_config def generate_random_inputs(self, batch_size:", "def _create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir,", "@property def model_config(self): return self._ori_model.model_config def generate_random_inputs(self, batch_size: Optional[int] =", "def __init__( self, model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config)", "model_config(self) -> ModelConfig: return self._model_config @property def input_tensort_names(self) -> Sequence[str]:", "may obtain a copy of the License at # #", "Mapping from names to input ndarrays in TF1, or a", "def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) ->", "a sequence of tensors in TF2. If `None`, ramdomly generated", "[info.name for info in self.output_tensor_info.values()] def generate_random_inputs(self, batch_size: Optional[int] =", "of ModelHandlers for aggregrated testing/benchmarking.\"\"\" def __init__( self, model_config: ModelConfig,", "may not use this file except in compliance with the", "signature_constants from tensorflow.python.saved_model import tag_constants # pylint: disable=bad-whitespace ### Helper", "self.graph_func.inputs ] def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100,", "and runs the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist", "the runtime. benchmark_iterations: Number of inferences to measure the latency.", "this file except in compliance with the License. # You", "self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV1.run( self, inputs, warmup_iterations,", "based on the data type and tensor shape.\"\"\" dtype =", "allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a TF2 model", "\"\"\"Runs a model in TF2.\"\"\" @property def graph_func(self): graph_func =", "# # Licensed under the Apache License, Version 2.0 (the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "to run model inference! \" \"Model information: {}\".format(str(self))) from exc", "inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a", "for tensor in self.graph_func.inputs] @property def output_tensor_names(self): return [tensor.name for", "config: ModelConfig, results: Sequence[TestResult] = tuple()): return super(TestResultCollection, cls).__new__(cls, config,", "batch_size = batch_size or self.model_config.default_batch_size return { tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size)", "to measure the latency. Returns: `TestResultCollection` summarizing timing and numerics", "timing and numerics information for different TensorRT conversion settings. \"\"\"", "for tensor in self.graph_func.inputs ] def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] =", "def input_tensort_names(self) -> Sequence[str]: return [info.name for info in self.input_tensor_info.values()]", "tensors.\"\"\" @property def output_tensor_names(self) -> Sequence[str]: \"\"\"Names of output tensors.\"\"\"", "TF1.\"\"\" with session.Session() as sess: meta_graph = saved_model_loader.load( sess=sess, export_dir=saved_model_dir,", "= None ) -> Mapping[str, np.ndarray]: batch_size = batch_size or", "meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self) -> Sequence[str]: return", "= model_config def __str__(self) -> str: return str(self._model_config) def __repr__(self)", "If `None`, ramdomly generated inputs will be used instead. warmup_iterations:", "inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs", "for node in graph_def.node]: raise RuntimeError(\"Failed to convert to TensorRT!", "ModelHandlerV2.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta):", "Copyright 2020 The TensorFlow Authors. All Rights Reserved. # #", "self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self, graph_func): graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def", "measure the latency. allow_to_use_gpu: Whether it is allowed to use", "in range(benchmark_iterations): before = time.time() outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time()", "shape[0] = batch_size if any(filter(lambda x: x < 0, shape)):", "\"\"\"Creates a converter for the corresponding TF version.\"\"\" @abc.abstractmethod def", "series of ModelHandlers for aggregrated testing/benchmarking.\"\"\" def __init__( self, model_config:", "shape[0] < 0: if batch_size is None or batch_size <=", "inputs or self.generate_random_inputs() config_proto = None if not allow_to_use_gpu: config_proto", "__new__(cls, saved_model_dir: str, saved_model_tags: Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key: str =", "= time.time() outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() - before) except", "or implied. # See the License for the specific language", "return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self) -> Sequence[str]: return [info.name", "@property def input_tensor_names(self): return [tensor.name for tensor in self.graph_func.inputs] @property", "_ in range(benchmark_iterations): before = time.time() outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs)", "@property @classmethod @abc.abstractmethod def model_handler_cls(cls): \"\"\"The modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\"", "test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series of ModelHandlers for aggregrated", "graph_pb2.GraphDef): if \"TRTEngineOp\" not in [node.op for node in graph_def.node]:", "\"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF2.\"\"\"", "the model with provided or randomly generated input tensors. Args:", "import logging import numpy as np from tensorflow.core.framework import graph_pb2", "= self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts", "name=tensor.name) # Models are repeatedly loaded for different TensorRT conversion", "Test Classes class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])): def __new__(cls,", "meta_graph_pb2.TensorInfo, batch_size: Optional[int] = None) -> np.ndarray: \"\"\"Generates a random", "in TF1.\"\"\" @property def meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir,", "if self._conversion_is_saved and not overwrite: return output_saved_model_dir = output_saved_model_dir or", "\"default_batch_size\" ])): \"\"\"Configurations for test models.\"\"\" def __new__(cls, saved_model_dir: str,", "def __str__(self) -> str: base = super(_TrtModelHandlerBase, self).__str__() return \"{},", "10, benchmark_iterations: int = 100, allow_to_use_gpu: bool = False) ->", "test models.\"\"\" def __new__(cls, saved_model_dir: str, saved_model_tags: Sequence[str] = (tag_constants.SERVING,),", "return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series of ModelHandlers for", "tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.protobuf import", "to use GPU or not. Returns: `TestResult` summarizing timing and", "Union from absl import logging import numpy as np from", "or tempfile.mkdtemp() logging.info(\"Saving TensorRT model to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config", "results: Sequence[TestResult] = tuple()): return super(TestResultCollection, cls).__new__(cls, config, results) class", "information: {}\".format(str(self))) from exc outputs = dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency,", "a converter for the corresponding TF version.\"\"\" @abc.abstractmethod def _check_conversion(self,", "class ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations", "\"\"\"Loads a graph function in TF2.\"\"\" imported = saved_model_load.load( export_dir=saved_model_dir,", "self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection = TestResultCollection( results=[], config=model_config) def", "str(self._model_config) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property def", "class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking.\"\"\"", "_generate_random_tensor_v1(tensor_info, batch_size) for tensor_info in self.input_tensor_info.values() } def run(self, inputs:", "trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])):", "class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a TF1 model with TensorRT and", "return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self) -> Mapping[str,", "Optional[int] = None ) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping", "model_handler_cls(cls): \"\"\"The modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod def", "self._result_collection = TestResultCollection( results=[], config=model_config) def __str__(self) -> str: return", "100, allow_to_use_gpu: bool = False) -> TestResult: \"\"\"Runs the model", "= None): return super(TestResult, cls).__new__(cls, outputs, latency, trt_convert_params) class ModelConfig(", "\"TRTEngineOp\" not in [node.op for node in graph_def.node]: raise RuntimeError(\"Failed", "-> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef` in TF1.\"\"\" with session.Session() as", "output tensors.\"\"\" @abc.abstractmethod def generate_random_inputs( self, batch_size: Optional[int] = None", "to TensorRT! \" \"Model Information: {}\".format(str(self))) def __str__(self) -> str:", "= inputs or self.generate_random_inputs() config_proto = None if not allow_to_use_gpu:", "= None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs = inputs", "return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\",", "= None ) -> Sequence[framework_ops.Tensor]: batch_size = batch_size or self.model_config.default_batch_size", "Helper Functions def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] = None)", "\"\"\"Runs the model with provided or randomly generated input tensors.", "def _create_converter(self, trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key),", "ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model in TF2.\"\"\" @property def graph_func(self): graph_func", "framework_ops.device(device): for _ in range(warmup_iterations): self.graph_func(*inputs) latency = [] for", "graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph", "= tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session(): return random_ops.random_uniform(", "output_tensor_names(self) -> Sequence[str]: return [info.name for info in self.output_tensor_info.values()] def", "shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session(): return random_ops.random_uniform( shape=shape, dtype=dtype,", "raise RuntimeError(\"Failed to run model inference! \" \"Model information: {}\".format(str(self)))", "[\"outputs\", \"latency\", \"trt_convert_params\"])): def __new__(cls, outputs: Mapping[str, np.ndarray], latency: List[float],", "in TF1. Or a sequence of tensors in TF2. If", "saved_model_signature_key, default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def __new__(cls, config:", "importer from tensorflow.python.framework import ops as framework_ops from tensorflow.python.ops import", "class for converting and running a model.\"\"\" def __init__( self,", "TF2.\"\"\" imported = saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key] return", "dtype=tensor.dtype, name=tensor.name) # Models are repeatedly loaded for different TensorRT", "trt_convert_params_updater( default_trt_convert_params): trt_model = self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection =", "in writing, software # distributed under the License is distributed", "return self._ori_model.model_config def generate_random_inputs(self, batch_size: Optional[int] = None): return self._ori_model.generate_random_inputs(batch_size)", "super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\",", "trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params = trt_convert_params self._converter =", "in range(warmup_iterations): self.graph_func(*inputs) latency = [] for _ in range(benchmark_iterations):", "self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2):", "tensor_shape.dim] if not shape: raise ValueError(\"The tensor cannot have a", "trt_convert_params(self) -> trt.TrtConversionParams: return self._trt_convert_params def save(self, output_saved_model_dir: Optional[str] =", "a model in TF2.\"\"\" @property def graph_func(self): graph_func = load_graph_func(", "framework_ops.Tensor]]: \"\"\"Generates mapping from names to input tensors.\"\"\" @abc.abstractmethod def", "as tf_dtypes from tensorflow.python.framework import importer from tensorflow.python.framework import ops", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "import tensor_shape_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf import meta_graph_pb2", "a series of ModelHandlers for aggregrated testing/benchmarking.\"\"\" def __init__( self,", "tag_constants # pylint: disable=bad-whitespace ### Helper Functions def _get_concrete_tensor_shape( tensor_shape:", "batch_size is None or batch_size <= 0: raise ValueError(\"Must provide", "License, Version 2.0 (the \"License\"); # you may not use", "outputs=outputs if inputs else None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for", "\"\"\"Saves a TensorRT converted model.\"\"\" if self._conversion_is_saved and not overwrite:", "def __str__(self) -> str: return \"Input Model: {}\".format(str(self._ori_model)) def __repr__(self)", "None, overwrite=True) -> None: \"\"\"Saves a TensorRT converted model.\"\"\" if", "trt_convert_params self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved =", "summarizing timing and numerics information for different TensorRT conversion settings.", "super(TestResult, cls).__new__(cls, outputs, latency, trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\",", "shape)): raise ValueError(\"Cannot have dynamic dimensions except for batch size!\")", "def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int] = None) -> np.ndarray: \"\"\"Generates", "Model: {}\".format(str(self._ori_model)) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property", "[info.name for info in self.input_tensor_info.values()] @property def output_tensor_names(self) -> Sequence[str]:", "it is allowed to use GPU or not. Returns: `TestResult`", "the License for the specific language governing permissions and #", "with provided or randomly generated input tensors. Args: inputs: Mapping", "input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines,", "ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking in", "in tensor_shape.dim] if not shape: raise ValueError(\"The tensor cannot have", "warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result =", "@property def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property", "RuntimeError(\"Failed to run model inference! \" \"Model information: {}\".format(str(self))) from", "from tensorflow.python.framework import importer from tensorflow.python.framework import ops as framework_ops", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "model.run(inputs, warmup_iterations, benchmark_iterations) for model in [self._ori_model] + self._trt_models ]", "= _TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10,", "output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving TensorRT model to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir)", "Optional[int] = None) -> np.ndarray: \"\"\"Generates a random tensor based", "= \"/device:gpu:0\" if allow_to_use_gpu else \"/device:cpu:0\" with framework_ops.device(device): for _", "random tensors for unknown rank!\") shape = [dim.size for dim", "\"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations for test models.\"\"\" def __new__(cls, saved_model_dir:", "with TensorRT and runs the converted model.\"\"\" def _create_converter(self, trt_convert_params:", "@abc.abstractmethod def _create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a converter for the", "runs sample models.\"\"\" import abc import collections import functools import", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "batch_size) for tensor in self.graph_func.inputs ] def run(self, inputs: Optional[Sequence[framework_ops.Tensor]]", "import functools import tempfile import time from typing import Callable,", "def run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations: int =", "a rank of 0!\") if shape[0] < 0: if batch_size", "trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config) self._trt_models = []", "from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf", "aggregrated testing/benchmarking in TF2.\"\"\" model_handler_cls = ModelHandlerV2 trt_model_handler_cls = TrtModelHandlerV2", "self).__init__(model_config) self._trt_convert_params = trt_convert_params self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\")", "meta_graph_pb2 from tensorflow.python.client import session from tensorflow.python.compiler.tensorrt import trt_convert as", "batch_size) with session.Session(): return random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2(", "allowed to use GPU or not. Returns: `TestResult` summarizing timing", "str, saved_model_tags: str, saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef`", "meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def", "in trt_convert_params_updater( default_trt_convert_params): trt_model = self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection", "\"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property def trt_convert_params(self) -> trt.TrtConversionParams: return", "config=model_config) def __str__(self) -> str: return \"Input Model: {}\".format(str(self._ori_model)) def", "str): \"\"\"Loads a graph function in TF2.\"\"\" imported = saved_model_load.load(", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "[] for trt_convert_params in trt_convert_params_updater( default_trt_convert_params): trt_model = self.trt_model_handler_cls( model_config,", "\"as the tensor has a dynamic batch size!\") shape[0] =", "_create_converter(self, trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params)", "if shape[0] < 0: if batch_size is None or batch_size", "info in self.output_tensor_info.values()] def generate_random_inputs(self, batch_size: Optional[int] = None )", "= None, overwrite=True) -> None: \"\"\"Saves a TensorRT converted model.\"\"\"", "framework_ops from tensorflow.python.ops import random_ops from tensorflow.python.saved_model import load as", "Classes class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])): def __new__(cls, outputs:", "raise ValueError(\"Cannot have dynamic dimensions except for batch size!\") return", "Functions def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] = None) ->", "run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) ->", "the Apache License, Version 2.0 (the \"License\"); # you may", "converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags,", "return super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\",", "as trt from tensorflow.python.framework import convert_to_constants from tensorflow.python.framework import dtypes", "bool = False) -> TestResult: \"\"\"Runs the model with provided", "raise ValueError(\"Cannot generates random tensors for unknown rank!\") shape =", "batch_size: Optional[int] = None ) -> Sequence[framework_ops.Tensor]: batch_size = batch_size", "for info in self.input_tensor_info.values()] @property def output_tensor_names(self) -> Sequence[str]: return", "meta_graph = saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names = [", "for _ in range(benchmark_iterations): before = time.time() outputs = sess.run(fetches=self.output_tensor_names,", "under the License. # ============================================================================== \"\"\"Loads, converts, and runs sample", "tensors in TF2. If `None`, ramdomly generated input tensors will", "batch_size) return random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name) # Models are repeatedly", "different TensorRT conversion settings. # Using cache can reduce I/O.", "= TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for", "TensorRT conversion settings. # Using cache can reduce I/O. @functools.lru_cache()", "@property def output_tensor_names(self) -> Sequence[str]: \"\"\"Names of output tensors.\"\"\" @abc.abstractmethod", "ndarrays in TF1, or a sequence of tensors in TF2.", "dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency, outputs=outputs if inputs else None) class", "logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved = False @abc.abstractmethod def _create_converter(self,", "conversion output has any TensorRT engines.\"\"\" def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef):", "Whether it is allowed to use GPU or not. Returns:", "return \"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params)) @property def trt_convert_params(self) -> trt.TrtConversionParams:", "model with provided or randomly generated input tensors. Args: inputs:", "raise RuntimeError(\"Failed to convert to TensorRT! \" \"Model Information: {}\".format(str(self)))", "precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine def", "for _ in range(benchmark_iterations): before = time.time() outputs = self.graph_func(*inputs)", "self.model_handler_cls(model_config) self._trt_models = [] for trt_convert_params in trt_convert_params_updater( default_trt_convert_params): trt_model", "None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model in TF2.\"\"\" @property def", "output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving TensorRT model to %s!\",", "Number of inferences to warm up the runtime. benchmark_iterations: Number", "latency.append(time.time() - before) except Exception as exc: raise RuntimeError(\"Failed to", "under the License is distributed on an \"AS IS\" BASIS,", "-> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV1.run( self,", "[tensor.name for tensor in self.graph_func.inputs] @property def output_tensor_names(self): return [tensor.name", "with TensorRT!\") test_result = ModelHandlerV1.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True)", "ramdomly generated inputs will be used instead. warmup_iterations: Number of", "- before) except Exception as exc: raise RuntimeError(\"Failed to run", "importer.import_graph_def(self.meta_graph.graph_def) try: for _ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency =", "`None`, ramdomly generated input tensors will be used instead. warmup_iterations:", "@abc.abstractmethod def run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations: int", "str: base = super(_TrtModelHandlerBase, self).__str__() return \"{}, TrtConversionParams: {}\".format(base, str(self._trt_convert_params))", "import convert_to_constants from tensorflow.python.framework import dtypes as tf_dtypes from tensorflow.python.framework", "else \"/device:cpu:0\" with framework_ops.device(device): for _ in range(warmup_iterations): self.graph_func(*inputs) latency", "def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\" not in [node.op for", "functools import tempfile import time from typing import Callable, Iterable,", "self._check_conversion(self._converter.convert()) self._conversion_is_saved = False @abc.abstractmethod def _create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates", "\"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF1.\"\"\"", "Number of inferences to measure the latency. Returns: `TestResultCollection` summarizing", "allow_to_use_gpu=False) -> TestResult: inputs = inputs or self.generate_random_inputs() config_proto =", "batch size \" \"as the tensor has a dynamic batch", "def generate_random_inputs(self, batch_size: Optional[int] = None): return self._ori_model.generate_random_inputs(batch_size) def run(self,", "will be used instead. warmup_iterations: Number of inferences to warm", "else None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for converting and running", "batch_size or self.model_config.default_batch_size return { tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for tensor_info", "model inference with provided or randomly generated input tensors. Args:", "@property def output_tensor_names(self): return [tensor.name for tensor in self.graph_func.outputs] def", "self.generate_random_inputs() config_proto = None if not allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\":", "tensors.\"\"\" @abc.abstractmethod def generate_random_inputs( self, batch_size: Optional[int] = None )", "-> meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self)", "output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True class", "Exception as exc: raise RuntimeError(\"Failed to run model inference! \"", "outputs)) return TestResult(latency=latency, outputs=outputs if inputs else None) class ModelHandlerV2(_ModelHandlerBase):", "tensor has a dynamic batch size!\") shape[0] = batch_size if", "0!\") if shape[0] < 0: if batch_size is None or", "ANY KIND, either express or implied. # See the License", "batch_size) for tensor_info in self.input_tensor_info.values() } def run(self, inputs: Optional[Mapping[str,", "\"\"\" inputs = inputs or self.generate_random_inputs() results = [ model.run(inputs,", "def _check_conversion(self, conversion_output): \"\"\"Checks if conversion output has any TensorRT", "the License. # You may obtain a copy of the", "self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self) -> Sequence[str]: return [info.name for info", "List, Mapping, Optional, Sequence, Union from absl import logging import", "graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def", "TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a TF1 model with TensorRT and runs", "# See the License for the specific language governing permissions", "based on the data type and tensor shape.\"\"\" shape =", "-> Sequence[str]: return [info.name for info in self.input_tensor_info.values()] @property def", "latency. allow_to_use_gpu: Whether it is allowed to use GPU or", "I/O. @functools.lru_cache() def load_meta_graph( saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str)", "or self.generate_random_inputs() config_proto = None if not allow_to_use_gpu: config_proto =", "self, model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params =", "return super(TestResultCollection, cls).__new__(cls, config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for", "or self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor, batch_size) for tensor in self.graph_func.inputs", "self._ori_model = self.model_handler_cls(model_config) self._trt_models = [] for trt_convert_params in trt_convert_params_updater(", "model in [self._ori_model] + self._trt_models ] return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase):", "tensor shape without dynamic dimensions.\"\"\" if tensor_shape.unknown_rank: raise ValueError(\"Cannot generates", "\"Model Information: {}\".format(str(self))) def __str__(self) -> str: base = super(_TrtModelHandlerBase,", "run model inference! \" \"Model information: {}\".format(str(self))) from exc outputs", "str(self._trt_convert_params)) @property def trt_convert_params(self) -> trt.TrtConversionParams: return self._trt_convert_params def save(self,", "test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a TF2 model with TensorRT", "converter for the corresponding TF version.\"\"\" @abc.abstractmethod def _check_conversion(self, conversion_output):", "size \" \"as the tensor has a dynamic batch size!\")", "a `tf.MetaGraphDef` in TF1.\"\"\" with session.Session() as sess: meta_graph =", "latency: List[float], trt_convert_params: trt.TrtConversionParams = None): return super(TestResult, cls).__new__(cls, outputs,", "= 100, allow_to_use_gpu: bool = False) -> TestResult: \"\"\"Runs the", "default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def __new__(cls, config: ModelConfig,", "None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result", "as sess: importer.import_graph_def(self.meta_graph.graph_def) try: for _ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs)", "ModelHandlers for aggregrated testing/benchmarking.\"\"\" def __init__( self, model_config: ModelConfig, default_trt_convert_params:", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "inputs = inputs or self.generate_random_inputs() try: device = \"/device:gpu:0\" if", "writing, software # distributed under the License is distributed on", "to input tensors.\"\"\" @abc.abstractmethod def run(self, inputs=None, warmup_iterations: int =", "return str(self._model_config) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property", "__str__(self) -> str: base = super(_TrtModelHandlerBase, self).__str__() return \"{}, TrtConversionParams:", "for _ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency = [] for", "tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import tag_constants # pylint: disable=bad-whitespace", "def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property def model_config(self)", "tensors in TF2. If `None`, ramdomly generated inputs will be", "None or batch_size <= 0: raise ValueError(\"Must provide a valid", "\"\"\"Loads, converts, and runs sample models.\"\"\" import abc import collections", "int = 1): return super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size)", "class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler", "and runs the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): return", "batch_size <= 0: raise ValueError(\"Must provide a valid batch size", "config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for running a model.\"\"\"", "in TF2.\"\"\" @property def graph_func(self): graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags,", "framework_ops.Tensor, batch_size: Optional[int] = None) -> framework_ops.Tensor: \"\"\"Generates a random", "size!\") return shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int] = None)", "inputs=None, warmup_iterations: int = 10, benchmark_iterations: int = 100, allow_to_use_gpu:", "time.time() outputs = self.graph_func(*inputs) latency.append(time.time() - before) except Exception as", "with session.Session() as sess: meta_graph = saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags,", "__repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod @abc.abstractmethod def", "tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes class", "any(filter(lambda x: x < 0, shape)): raise ValueError(\"Cannot have dynamic", "= [ tensor.name.split(\":\")[0] for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def =", "session.Session(): return random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor: framework_ops.Tensor,", "self.generate_random_inputs() try: device = \"/device:gpu:0\" if allow_to_use_gpu else \"/device:cpu:0\" with", "shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session():", "= False) -> TestResult: \"\"\"Runs the model with provided or", "graph_def.node]: raise RuntimeError(\"Failed to convert to TensorRT! \" \"Model Information:", "self._trt_models.append(trt_model) self._result_collection = TestResultCollection( results=[], config=model_config) def __str__(self) -> str:", "saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return", "return [ _generate_random_tensor_v2(tensor, batch_size) for tensor in self.graph_func.inputs ] def", "@abc.abstractmethod def generate_random_inputs( self, batch_size: Optional[int] = None ) ->", "TF1 model with TensorRT and runs the converted model.\"\"\" def", "= 10, benchmark_iterations: int = 100, allow_to_use_gpu: bool = False)", "imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\",", "from exc outputs = dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency, outputs=outputs if", "\"\"\"Runs a model in TF1.\"\"\" @property def meta_graph(self) -> meta_graph_pb2.MetaGraphDef:", "inputs else None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for converting and", "the corresponding TF version.\"\"\" @abc.abstractmethod def _check_conversion(self, conversion_output): \"\"\"Checks if", "model.\"\"\" def __init__(self, model_config: ModelConfig): self._model_config = model_config def __str__(self)", "max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion =", "results=[], config=model_config) def __str__(self) -> str: return \"Input Model: {}\".format(str(self._ori_model))", "self.graph_func(*inputs) latency.append(time.time() - before) except Exception as exc: raise RuntimeError(\"Failed", "-> TestResult: inputs = inputs or self.generate_random_inputs() config_proto = None", "conversion_nodes_denylist = self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist,", "__init__( self, model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params", "model inference! \" \"Model information: {}\".format(str(self))) from exc outputs =", "_generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int] = None) -> np.ndarray: \"\"\"Generates a", "\"\"\"Configurations for test models.\"\"\" def __new__(cls, saved_model_dir: str, saved_model_tags: Sequence[str]", "not allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0}) with session.Session(config=config_proto)", "latency. Returns: `TestResultCollection` summarizing timing and numerics information for different", "# Using cache can reduce I/O. @functools.lru_cache() def load_meta_graph( saved_model_dir:", "in TF2.\"\"\" imported = saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key]", "def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult:", "np.ndarray]: batch_size = batch_size or self.model_config.default_batch_size return { tensor_info.name: _generate_random_tensor_v1(tensor_info,", "False) -> TestResult: \"\"\"Runs the model with provided or randomly", "ModelConfig, trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params = trt_convert_params self._converter", "raise ValueError(\"The tensor cannot have a rank of 0!\") if", "inputs = inputs or self.generate_random_inputs() config_proto = None if not", "`tf.MetaGraphDef` in TF1.\"\"\" with session.Session() as sess: meta_graph = saved_model_loader.load(", "= batch_size or self.model_config.default_batch_size return { tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for", "tensorflow.python.client import session from tensorflow.python.compiler.tensorrt import trt_convert as trt from", "= load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self):", "saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self): return [tensor.name", "# Copyright 2020 The TensorFlow Authors. All Rights Reserved. #", "conversion_params=trt_convert_params) def _check_conversion(self, graph_func): graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self,", "None: \"\"\"Saves a TensorRT converted model.\"\"\" if self._conversion_is_saved and not", "_create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags,", "return shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int] = None) ->", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "saved_model_dir: str, saved_model_tags: Sequence[str] = (tag_constants.SERVING,), saved_model_signature_key: str = (", "trt.TrtConversionParams): \"\"\"Creates a converter for the corresponding TF version.\"\"\" @abc.abstractmethod", "return [info.name for info in self.input_tensor_info.values()] @property def output_tensor_names(self) ->", "limitations under the License. # ============================================================================== \"\"\"Loads, converts, and runs", "modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The", "outputs, latency, trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\",", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV2.run(", "or self.model_config.default_batch_size return { tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for tensor_info in", "exc: raise RuntimeError(\"Failed to run model inference! \" \"Model information:", "for batch size!\") return shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int]", "Sequence, Union from absl import logging import numpy as np", "collections import functools import tempfile import time from typing import", "tensor_info in self.input_tensor_info.values() } def run(self, inputs: Optional[Mapping[str, np.ndarray]] =", "of inferences to measure the latency. Returns: `TestResultCollection` summarizing timing", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "100) -> TestResultCollection: \"\"\"Runs model inference with provided or randomly", "\" \"Model information: {}\".format(str(self))) from exc outputs = dict(zip(self.output_tensor_names, outputs))", "Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping from names to input tensors.\"\"\"", "inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False)", "tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] = None) -> Sequence[int]: \"\"\"Gets a concrete", "the tensor has a dynamic batch size!\") shape[0] = batch_size", "max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion = _TrtModelHandlerBase._check_contains_trt_engine", "abc import collections import functools import tempfile import time from", "of tensors in TF2. If `None`, ramdomly generated inputs will", "tensor shape.\"\"\" shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform( shape=shape, dtype=tensor.dtype,", "else None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model in TF2.\"\"\" @property", "allow_to_use_gpu: Whether it is allowed to use GPU or not.", "randomly generated input tensors. Args: inputs: Mapping from names to", "generate_random_inputs(self, batch_size: Optional[int] = None): return self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None,", "inputs = inputs or self.generate_random_inputs() results = [ model.run(inputs, warmup_iterations,", "benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a TF2", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration,", "< 0, shape)): raise ValueError(\"Cannot have dynamic dimensions except for", "str: return str(self._model_config) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self))", "converts, and runs sample models.\"\"\" import abc import collections import", "Rights Reserved. # # Licensed under the Apache License, Version", "for tensor in self.graph_func.outputs] def generate_random_inputs(self, batch_size: Optional[int] = None", "try: for _ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency = []", "specific language governing permissions and # limitations under the License.", "_ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking.\"\"\" def", "class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a TF2 model with TensorRT and", "return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a TF2 model with", "\"\"\"Base class for running a model.\"\"\" def __init__(self, model_config: ModelConfig):", "= self.model_handler_cls(model_config) self._trt_models = [] for trt_convert_params in trt_convert_params_updater( default_trt_convert_params):", "the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir,", "of ModelHandlers for aggregrated testing/benchmarking in TF1.\"\"\" model_handler_cls = ModelHandlerV1", "import trt_convert as trt from tensorflow.python.framework import convert_to_constants from tensorflow.python.framework", "inference! \" \"Model information: {}\".format(str(self))) from exc outputs = dict(zip(self.output_tensor_names,", "self.output_tensor_info.values()] def generate_random_inputs(self, batch_size: Optional[int] = None ) -> Mapping[str,", "# you may not use this file except in compliance", "import abc import collections import functools import tempfile import time", "class for running a model.\"\"\" def __init__(self, model_config: ModelConfig): self._model_config", "TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self): return self._ori_model.model_config def", "Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with", "TF2. If `None`, ramdomly generated inputs will be used instead.", "trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=(", "from absl import logging import numpy as np from tensorflow.core.framework", "def model_handler_cls(cls): \"\"\"The modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod", "to warm up the runtime. benchmark_iterations: Number of inferences to", "def _check_conversion(self, graph_func): graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self, inputs:", "self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved = True class TrtModelHandlerV1(_TrtModelHandlerBase,", "-> str: base = super(_TrtModelHandlerBase, self).__str__() return \"{}, TrtConversionParams: {}\".format(base,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "be used instead. warmup_iterations: Number of inferences to warm up", "feed_dict=inputs) latency.append(time.time() - before) except Exception as exc: raise RuntimeError(\"Failed", "running a model.\"\"\" def __init__(self, model_config: ModelConfig): self._model_config = model_config", "tensorflow.python.compiler.tensorrt import trt_convert as trt from tensorflow.python.framework import convert_to_constants from", "concrete tensor shape without dynamic dimensions.\"\"\" if tensor_shape.unknown_rank: raise ValueError(\"Cannot", "meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self) ->", "None ) -> Mapping[str, np.ndarray]: batch_size = batch_size or self.model_config.default_batch_size", "= dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency, outputs=outputs if inputs else None)", "-> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property def model_config(self) -> ModelConfig:", "np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs =", "under the Apache License, Version 2.0 (the \"License\"); # you", "[self._ori_model] + self._trt_models ] return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a", "import Callable, Iterable, List, Mapping, Optional, Sequence, Union from absl", "inferences to warm up the runtime. benchmark_iterations: Number of inferences", "batch_size = batch_size or self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor, batch_size) for", "conversion settings. \"\"\" inputs = inputs or self.generate_random_inputs() results =", "tensorflow.python.framework import dtypes as tf_dtypes from tensorflow.python.framework import importer from", "tensorflow.python.framework import ops as framework_ops from tensorflow.python.ops import random_ops from", "inference with provided or randomly generated input tensors. Args: inputs:", "self.input_tensor_info.values()] @property def output_tensor_names(self) -> Sequence[str]: return [info.name for info", "self._trt_models = [] for trt_convert_params in trt_convert_params_updater( default_trt_convert_params): trt_model =", "loaded for different TensorRT conversion settings. # Using cache can", "_TrtModelHandlerBase._check_contains_trt_engine def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100)", "inputs or self.generate_random_inputs() try: device = \"/device:gpu:0\" if allow_to_use_gpu else", "= 1): return super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size) class", "str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef` in TF1.\"\"\" with session.Session()", "convert to TensorRT! \" \"Model Information: {}\".format(str(self))) def __str__(self) ->", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "tf_dtypes from tensorflow.python.framework import importer from tensorflow.python.framework import ops as", "Sequence[str]: \"\"\"Names of output tensors.\"\"\" @abc.abstractmethod def generate_random_inputs( self, batch_size:", "a valid batch size \" \"as the tensor has a", "Number of inferences to measure the latency. allow_to_use_gpu: Whether it", "measure the latency. Returns: `TestResultCollection` summarizing timing and numerics information", "allow_to_use_gpu else \"/device:cpu:0\" with framework_ops.device(device): for _ in range(warmup_iterations): self.graph_func(*inputs)", "= tuple()): return super(TestResultCollection, cls).__new__(cls, config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base", "return { tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for tensor_info in self.input_tensor_info.values() }", "0: if batch_size is None or batch_size <= 0: raise", "model_config(self): return self._ori_model.model_config def generate_random_inputs(self, batch_size: Optional[int] = None): return", "corresponding TF version.\"\"\" @abc.abstractmethod def _check_conversion(self, conversion_output): \"\"\"Checks if conversion", "config_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.python.client import session from", "in self.output_tensor_info.values()] def generate_random_inputs(self, batch_size: Optional[int] = None ) ->", "tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] = None) -> Sequence[int]: \"\"\"Gets a", "= [ model.run(inputs, warmup_iterations, benchmark_iterations) for model in [self._ori_model] +", "TensorRT engines.\"\"\" def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\" not in", "-> str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod @abc.abstractmethod def model_handler_cls(cls):", "typing import Callable, Iterable, List, Mapping, Optional, Sequence, Union from", "in range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency = [] for _ in", "(tag_constants.SERVING,), saved_model_signature_key: str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int = 1):", "@property def meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key)", "self._conversion_is_saved = True class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1): \"\"\"Converts a TF1 model", "tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for tensor_info in self.input_tensor_info.values() } def run(self,", "from tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.python.client", "[node.op for node in graph_def.node]: raise RuntimeError(\"Failed to convert to", "= None) -> Sequence[int]: \"\"\"Gets a concrete tensor shape without", "or batch_size <= 0: raise ValueError(\"Must provide a valid batch", "not overwrite: return output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving TensorRT", "a TF1 model with TensorRT and runs the converted model.\"\"\"", "for trt_convert_params in trt_convert_params_updater( default_trt_convert_params): trt_model = self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params)", "return super(TestResult, cls).__new__(cls, outputs, latency, trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\", [", "import graph_pb2 from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.protobuf import config_pb2", "str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property def model_config(self) -> ModelConfig: return", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size, is_dynamic_op=trt_convert_params.is_dynamic_op, maximum_cached_engines=trt_convert_params.maximum_cached_engines, use_calibration=trt_convert_params.use_calibration, ) _check_conversion", "from names to input tensors.\"\"\" @abc.abstractmethod def run(self, inputs=None, warmup_iterations:", "config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0}) with session.Session(config=config_proto) as sess:", "Authors. All Rights Reserved. # # Licensed under the Apache", "conversion_output): \"\"\"Checks if conversion output has any TensorRT engines.\"\"\" def", "input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self, graph_func): graph_def =", "saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs", "is allowed to use GPU or not. Returns: `TestResult` summarizing", "TestResult: inputs = inputs or self.generate_random_inputs() try: device = \"/device:gpu:0\"", "a series of ModelHandlers for aggregrated testing/benchmarking in TF2.\"\"\" model_handler_cls", "sample models.\"\"\" import abc import collections import functools import tempfile", "Apache License, Version 2.0 (the \"License\"); # you may not", "### Helper Functions def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] =", "either express or implied. # See the License for the", "0, shape)): raise ValueError(\"Cannot have dynamic dimensions except for batch", "class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking", "trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def", "dynamic batch size!\") shape[0] = batch_size if any(filter(lambda x: x", "the latency. Returns: `TestResultCollection` summarizing timing and numerics information for", "if batch_size is None or batch_size <= 0: raise ValueError(\"Must", "shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size: Optional[int] =", "batch_size: Optional[int] = None): return self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None, warmup_iterations:", "Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self) -> Sequence[str]:", "are repeatedly loaded for different TensorRT conversion settings. # Using", "outputs = dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency, outputs=outputs if inputs else", "load as saved_model_load from tensorflow.python.saved_model import loader as saved_model_loader from", "def output_tensor_names(self): return [tensor.name for tensor in self.graph_func.outputs] def generate_random_inputs(self,", "instead. warmup_iterations: Number of inferences to warm up the runtime.", "run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations: int = 100)", "test_result = ModelHandlerV2.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params)", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size: Optional[int] = None) -> np.ndarray:", "\"/device:gpu:0\" if allow_to_use_gpu else \"/device:cpu:0\" with framework_ops.device(device): for _ in", "Optional[int] = None) -> Sequence[int]: \"\"\"Gets a concrete tensor shape", "ModelConfig, results: Sequence[TestResult] = tuple()): return super(TestResultCollection, cls).__new__(cls, config, results)", "random_ops from tensorflow.python.saved_model import load as saved_model_load from tensorflow.python.saved_model import", "self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor, batch_size) for tensor in self.graph_func.inputs ]", "import tempfile import time from typing import Callable, Iterable, List,", "or not. Returns: `TestResult` summarizing timing and numerics information. \"\"\"", "in TF1, or a sequence of tensors in TF2. If", "saved_model_loader from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import tag_constants #", "= output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving TensorRT model to %s!\", output_saved_model_dir)", "a series of ModelHandlers for aggregrated testing/benchmarking in TF1.\"\"\" model_handler_cls", "session.Session() as sess: meta_graph = saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, )", "has any TensorRT engines.\"\"\" def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\"", "return output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp() logging.info(\"Saving TensorRT model to", "run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult:", "def generate_random_inputs(self, batch_size: Optional[int] = None ) -> Sequence[framework_ops.Tensor]: batch_size", "tensorflow.python.saved_model import tag_constants # pylint: disable=bad-whitespace ### Helper Functions def", "= inputs or self.generate_random_inputs() try: device = \"/device:gpu:0\" if allow_to_use_gpu", "None): return super(TestResult, cls).__new__(cls, outputs, latency, trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\",", "runtime. benchmark_iterations: Number of inferences to measure the latency. Returns:", "= saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ###", "def model_config(self) -> ModelConfig: return self._model_config @property def input_tensort_names(self) ->", "not. Returns: `TestResult` summarizing timing and numerics information. \"\"\" class", "return TestResult(latency=latency, outputs=outputs if inputs else None) class _TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base", "generate_random_inputs(self, batch_size: Optional[int] = None ) -> Sequence[framework_ops.Tensor]: batch_size =", "if inputs else None) class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model in", "return [info.name for info in self.output_tensor_info.values()] def generate_random_inputs(self, batch_size: Optional[int]", "with session.Session(): return random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor:", "names to input ndarrays in TF1, or a sequence of", "= None) -> np.ndarray: \"\"\"Generates a random tensor based on", "input ndarrays in TF1, or a sequence of tensors in", "ValueError(\"Must provide a valid batch size \" \"as the tensor", "a model.\"\"\" def __init__( self, model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams, ):", "class ModelHandlerV2(_ModelHandlerBase): \"\"\"Runs a model in TF2.\"\"\" @property def graph_func(self):", "tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.python.client import session from tensorflow.python.compiler.tensorrt import", "TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved = False @abc.abstractmethod def _create_converter(self, trt_convert_params: trt.TrtConversionParams):", "ValueError(\"Cannot generates random tensors for unknown rank!\") shape = [dim.size", "and # limitations under the License. # ============================================================================== \"\"\"Loads, converts,", "@functools.lru_cache() def load_meta_graph( saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str) ->", "export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names = [ tensor.name.split(\":\")[0] for tensor in", ") -> Mapping[str, np.ndarray]: batch_size = batch_size or self.model_config.default_batch_size return", "graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None,", "Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult: inputs", "model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection = TestResultCollection( results=[], config=model_config) def __str__(self)", "Returns: `TestResult` summarizing timing and numerics information. \"\"\" class ModelHandlerV1(_ModelHandlerBase):", "imported = saved_model_load.load( export_dir=saved_model_dir, tags=saved_model_tags) graph_func = imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func)", "def generate_random_inputs(self, batch_size: Optional[int] = None ) -> Mapping[str, np.ndarray]:", "def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def", "class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking", "-> trt.TrtConversionParams: return self._trt_convert_params def save(self, output_saved_model_dir: Optional[str] = None,", "on the data type and tensor shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype)", "use this file except in compliance with the License. #", "np from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import tensor_shape_pb2 from", "model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=(", "model in TF1.\"\"\" @property def meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return load_meta_graph(", "\"\"\"The modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod @abc.abstractmethod def trt_model_handler_cls(cls):", "< 0: if batch_size is None or batch_size <= 0:", "self, batch_size: Optional[int] = None ) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]:", "convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])):", "with session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def) try: for _ in range(warmup_iterations):", "tensor in self.graph_func.inputs ] def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None,", "of inferences to warm up the runtime. benchmark_iterations: Number of", "output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].outputs @property def input_tensort_names(self)", "The TensorFlow Authors. All Rights Reserved. # # Licensed under", "= 100) -> TestResultCollection: \"\"\"Runs model inference with provided or", "the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist = self.output_tensor_names", "@property @classmethod @abc.abstractmethod def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\"", "trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a converter for the corresponding TF version.\"\"\"", "tensors.\"\"\" @abc.abstractmethod def run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations:", "a TF2 model with TensorRT and runs the converted model.\"\"\"", "cls).__new__(cls, outputs, latency, trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\",", "import time from typing import Callable, Iterable, List, Mapping, Optional,", "1): return super(ModelConfig, cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size) class TestResultCollection(", "\"GPU\": 0}) with session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def) try: for _", "= None): return self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None, warmup_iterations: int =", "-> Sequence[str]: return [info.name for info in self.output_tensor_info.values()] def generate_random_inputs(self,", "batch size!\") shape[0] = batch_size if any(filter(lambda x: x <", "absl import logging import numpy as np from tensorflow.core.framework import", "TestResultCollection( results=[], config=model_config) def __str__(self) -> str: return \"Input Model:", "valid batch size \" \"as the tensor has a dynamic", "tensor_shape.unknown_rank: raise ValueError(\"Cannot generates random tensors for unknown rank!\") shape", "@functools.lru_cache() def load_graph_func(saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str): \"\"\"Loads a", "return \"{}({})\".format(self.__class__.__name__, str(self)) @property def model_config(self) -> ModelConfig: return self._model_config", "= batch_size if any(filter(lambda x: x < 0, shape)): raise", "in compliance with the License. # You may obtain a", "provide a valid batch size \" \"as the tensor has", "__str__(self) -> str: return str(self._model_config) def __repr__(self) -> str: return", "saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func) @property def input_tensor_names(self): return [tensor.name for", "software # distributed under the License is distributed on an", "in TF1.\"\"\" with session.Session() as sess: meta_graph = saved_model_loader.load( sess=sess,", "tensorflow.python.ops import random_ops from tensorflow.python.saved_model import load as saved_model_load from", "def __new__(cls, outputs: Mapping[str, np.ndarray], latency: List[float], trt_convert_params: trt.TrtConversionParams =", "-> TestResult: \"\"\"Runs the model with provided or randomly generated", "used instead. warmup_iterations: Number of inferences to warm up the", "TensorRT!\") test_result = ModelHandlerV1.run( self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return", "\"\"\"Runs model inference with provided or randomly generated input tensors.", "trt.TrtConversionParams): return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self,", "generated input tensors will be used instead. warmup_iterations: Number of", "@abc.abstractmethod def model_handler_cls(cls): \"\"\"The modle handler class. ModelHandleV1/ModelHandlerV2.\"\"\" @property @classmethod", "@property def model_config(self) -> ModelConfig: return self._model_config @property def input_tensort_names(self)", "str, saved_model_tags: str, saved_model_signature_key: str): \"\"\"Loads a graph function in", "super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params = trt_convert_params self._converter = self._create_converter(trt_convert_params) logging.info(\"Converting to", "return [tensor.name for tensor in self.graph_func.outputs] def generate_random_inputs(self, batch_size: Optional[int]", "names to input tensors.\"\"\" @abc.abstractmethod def run(self, inputs=None, warmup_iterations: int", "import signature_constants from tensorflow.python.saved_model import tag_constants # pylint: disable=bad-whitespace ###", "= batch_size or self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor, batch_size) for tensor", "allow_to_use_gpu: bool = False) -> TestResult: \"\"\"Runs the model with", "without dynamic dimensions.\"\"\" if tensor_shape.unknown_rank: raise ValueError(\"Cannot generates random tensors", "self.graph_func.outputs] def generate_random_inputs(self, batch_size: Optional[int] = None ) -> Sequence[framework_ops.Tensor]:", "trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode, minimum_segment_size=trt_convert_params.minimum_segment_size,", "random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size: Optional[int]", "except Exception as exc: raise RuntimeError(\"Failed to run model inference!", "sequence of tensors in TF2. If `None`, ramdomly generated inputs", "self._model_config @property def input_tensort_names(self) -> Sequence[str]: \"\"\"Names of input tensors.\"\"\"", "with the License. # You may obtain a copy of", "trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self): return", "def generate_random_inputs( self, batch_size: Optional[int] = None ) -> Mapping[str,", "in self.input_tensor_info.values()] @property def output_tensor_names(self) -> Sequence[str]: return [info.name for", "in [self._ori_model] + self._trt_models ] return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages", "= self.graph_func(*inputs) latency.append(time.time() - before) except Exception as exc: raise", "-> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping from names to input", "numpy as np from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import", "tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.python.client import", "range(warmup_iterations): sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency = [] for _ in range(benchmark_iterations):", "tensorflow.python.framework import convert_to_constants from tensorflow.python.framework import dtypes as tf_dtypes from", "a random tensor based on the data type and tensor", "saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def", "outputs)) return TestResult(latency=latency, outputs=outputs if inputs else None) class _TrtModelHandlerBase(_ModelHandlerBase):", "_create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a converter for the corresponding TF", "int = 10, benchmark_iterations: int = 100) -> TestResultCollection: \"\"\"Runs", "RuntimeError(\"Failed to convert to TensorRT! \" \"Model Information: {}\".format(str(self))) def", "def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100) ->", "express or implied. # See the License for the specific", "for tensor_info in self.input_tensor_info.values() } def run(self, inputs: Optional[Mapping[str, np.ndarray]]", "except in compliance with the License. # You may obtain", "trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params = trt_convert_params self._converter = self._create_converter(trt_convert_params)", "ValueError(\"The tensor cannot have a rank of 0!\") if shape[0]", "tensor based on the data type and tensor shape.\"\"\" shape", "\" \"as the tensor has a dynamic batch size!\") shape[0]", "@property def input_tensort_names(self) -> Sequence[str]: return [info.name for info in", "self.output_tensor_names return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes,", "def load_graph_func(saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str): \"\"\"Loads a graph", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "to %s!\", output_saved_model_dir) self._converter.save(output_saved_model_dir) self._model_config = self.model_config._replace( saved_model_dir=output_saved_model_dir) self._conversion_is_saved =", "TF version.\"\"\" @abc.abstractmethod def _check_conversion(self, conversion_output): \"\"\"Checks if conversion output", "Sequence[framework_ops.Tensor]: batch_size = batch_size or self.model_config.default_batch_size return [ _generate_random_tensor_v2(tensor, batch_size)", "np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100) -> TestResult: self.save(overwrite=False) logging.info(\"Running with", "dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval() def _generate_random_tensor_v2( tensor: framework_ops.Tensor, batch_size: Optional[int] = None)", "or randomly generated input tensors. Args: inputs: Mapping from names", "if not allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0}) with", "CONDITIONS OF ANY KIND, either express or implied. # See", "TF2. If `None`, ramdomly generated input tensors will be used", "latency, trt_convert_params) class ModelConfig( collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\"", "time from typing import Callable, Iterable, List, Mapping, Optional, Sequence,", "in graph_def.node]: raise RuntimeError(\"Failed to convert to TensorRT! \" \"Model", "= ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series", "mapping from names to input tensors.\"\"\" @abc.abstractmethod def run(self, inputs=None,", "load_meta_graph( saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads", "__str__(self) -> str: return \"Input Model: {}\".format(str(self._ori_model)) def __repr__(self) ->", "in [node.op for node in graph_def.node]: raise RuntimeError(\"Failed to convert", "use GPU or not. Returns: `TestResult` summarizing timing and numerics", "shape=shape, dtype=tensor.dtype, name=tensor.name) # Models are repeatedly loaded for different", "random tensor based on the data type and tensor shape.\"\"\"", "-> str: return str(self._model_config) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__,", "ndarrays in TF1. Or a sequence of tensors in TF2.", "= _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session(): return random_ops.random_uniform( shape=shape, dtype=dtype, name=tensor_info.name.split(\":\")[0]).eval()", "class TestResult( collections.namedtuple(\"TestResult\", [\"outputs\", \"latency\", \"trt_convert_params\"])): def __new__(cls, outputs: Mapping[str,", "model_config: ModelConfig, trt_convert_params: trt.TrtConversionParams, ): super(_TrtModelHandlerBase, self).__init__(model_config) self._trt_convert_params = trt_convert_params", "pylint: disable=bad-whitespace ### Helper Functions def _get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size:", "List[float], trt_convert_params: trt.TrtConversionParams = None): return super(TestResult, cls).__new__(cls, outputs, latency,", "shape without dynamic dimensions.\"\"\" if tensor_shape.unknown_rank: raise ValueError(\"Cannot generates random", "run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False) -> TestResult:", "dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with session.Session(): return", "def __init__( self, model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams, trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]):", "benchmark_iterations: int = 100, allow_to_use_gpu: bool = False) -> TestResult:", "settings. \"\"\" inputs = inputs or self.generate_random_inputs() results = [", "self.input_tensor_info.values() } def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10,", "inputs: Mapping from names to input ndarrays in TF1. Or", "ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model in TF1.\"\"\" @property def meta_graph(self) ->", "ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking in", "def input_tensor_names(self): return [tensor.name for tensor in self.graph_func.inputs] @property def", "{ tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size) for tensor_info in self.input_tensor_info.values() } def", "sess.run(fetches=self.output_tensor_names, feed_dict=inputs) latency.append(time.time() - before) except Exception as exc: raise", "from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import tag_constants # pylint:", "self._conversion_is_saved and not overwrite: return output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp()", "series of ModelHandlers for aggregrated testing/benchmarking in TF2.\"\"\" model_handler_cls =", "_ in range(warmup_iterations): self.graph_func(*inputs) latency = [] for _ in", "TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a TF2 model with TensorRT and runs", "models.\"\"\" import abc import collections import functools import tempfile import", "tempfile import time from typing import Callable, Iterable, List, Mapping,", "names to input ndarrays in TF1. Or a sequence of", "import load as saved_model_load from tensorflow.python.saved_model import loader as saved_model_loader", "None) -> Sequence[int]: \"\"\"Gets a concrete tensor shape without dynamic", "config_proto = None if not allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1,", "def trt_model_handler_cls(cls): \"\"\"The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2.\"\"\" @property def model_config(self):", "_get_concrete_tensor_shape( tensor_shape: tensor_shape_pb2.TensorShapeProto, batch_size: Optional[int] = None) -> Sequence[int]: \"\"\"Gets", "framework_ops.Tensor: \"\"\"Generates a random tensor based on the data type", "output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache() def load_graph_func(saved_model_dir: str, saved_model_tags: str,", "int = 100, allow_to_use_gpu: bool = False) -> TestResult: \"\"\"Runs", "_check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\" not in [node.op for node", "tensorflow.python.saved_model import loader as saved_model_loader from tensorflow.python.saved_model import signature_constants from", "\"\"\"Loads a `tf.MetaGraphDef` in TF1.\"\"\" with session.Session() as sess: meta_graph", "graph_func = imported.signatures[saved_model_signature_key] return convert_to_constants.convert_variables_to_constants_v2(graph_func) ### Test Classes class TestResult(", "and numerics information. \"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model in", "or self.generate_random_inputs() try: device = \"/device:gpu:0\" if allow_to_use_gpu else \"/device:cpu:0\"", "\"Model information: {}\".format(str(self))) from exc outputs = dict(zip(self.output_tensor_names, outputs)) return", "{}\".format(str(self))) from exc outputs = dict(zip(self.output_tensor_names, outputs)) return TestResult(latency=latency, outputs=outputs", "= ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess, meta_graph.graph_def, output_node_names)) meta_graph.graph_def.CopyFrom(graph_def) return meta_graph @functools.lru_cache()", "if conversion output has any TensorRT engines.\"\"\" def _check_contains_trt_engine(self, graph_def:", "= self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection = TestResultCollection( results=[], config=model_config)", "\"/device:cpu:0\" with framework_ops.device(device): for _ in range(warmup_iterations): self.graph_func(*inputs) latency =", "super(TestResultCollection, cls).__new__(cls, config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for running", "trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self, graph_func): graph_def", "= self._create_converter(trt_convert_params) logging.info(\"Converting to TensorRT!\") self._check_conversion(self._converter.convert()) self._conversion_is_saved = False @abc.abstractmethod", "[dim.size for dim in tensor_shape.dim] if not shape: raise ValueError(\"The", "self.generate_random_inputs() results = [ model.run(inputs, warmup_iterations, benchmark_iterations) for model in", "return trt.TrtGraphConverter( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), nodes_denylist=conversion_nodes_denylist, max_batch_size=trt_convert_params.max_batch_size, max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes, precision_mode=trt_convert_params.precision_mode,", "collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def __new__(cls, config: ModelConfig, results: Sequence[TestResult] =", "as sess: meta_graph = saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names", "saved_model_tags, saved_model_signature_key, default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])): def __new__(cls,", "generated inputs will be used instead. warmup_iterations: Number of inferences", "False @abc.abstractmethod def _create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a converter for", "Returns: `TestResultCollection` summarizing timing and numerics information for different TensorRT", "= TestResultCollection( results=[], config=model_config) def __str__(self) -> str: return \"Input", "-> Sequence[int]: \"\"\"Gets a concrete tensor shape without dynamic dimensions.\"\"\"", "import loader as saved_model_loader from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model", "as np from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import tensor_shape_pb2", "def save(self, output_saved_model_dir: Optional[str] = None, overwrite=True) -> None: \"\"\"Saves", "_get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size) return random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name) # Models are", "inputs: Mapping from names to input ndarrays in TF1, or", "warmup_iterations: int = 10, benchmark_iterations: int = 100) -> TestResultCollection:", "return \"Input Model: {}\".format(str(self._ori_model)) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__,", "return self._ori_model.generate_random_inputs(batch_size) def run(self, inputs=None, warmup_iterations: int = 10, benchmark_iterations:", "])): \"\"\"Configurations for test models.\"\"\" def __new__(cls, saved_model_dir: str, saved_model_tags:", "from names to input ndarrays in TF1. Or a sequence", "can reduce I/O. @functools.lru_cache() def load_meta_graph( saved_model_dir: str, saved_model_tags: str,", "= config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0}) with session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def)", "collections.namedtuple(\"ModelConfig\", [ \"saved_model_dir\", \"saved_model_tags\", \"saved_model_signature_key\", \"default_batch_size\" ])): \"\"\"Configurations for test", "session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def) try: for _ in range(warmup_iterations): sess.run(fetches=self.output_tensor_names,", "config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0}) with session.Session(config=config_proto) as sess: importer.import_graph_def(self.meta_graph.graph_def) try:", "from tensorflow.python.compiler.tensorrt import trt_convert as trt from tensorflow.python.framework import convert_to_constants", "ModelHandlerV1): \"\"\"Converts a TF1 model with TensorRT and runs the", "a dynamic batch size!\") shape[0] = batch_size if any(filter(lambda x:", "@property def input_tensort_names(self) -> Sequence[str]: \"\"\"Names of input tensors.\"\"\" @property", "= [] for _ in range(benchmark_iterations): before = time.time() outputs", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "self._conversion_is_saved = False @abc.abstractmethod def _create_converter(self, trt_convert_params: trt.TrtConversionParams): \"\"\"Creates a", "@property def trt_convert_params(self) -> trt.TrtConversionParams: return self._trt_convert_params def save(self, output_saved_model_dir:", "saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef` in TF1.\"\"\" with", "return random_ops.random_uniform( shape=shape, dtype=tensor.dtype, name=tensor.name) # Models are repeatedly loaded", "convert_to_constants from tensorflow.python.framework import dtypes as tf_dtypes from tensorflow.python.framework import", "trt_convert as trt from tensorflow.python.framework import convert_to_constants from tensorflow.python.framework import", "warmup_iterations: int = 10, benchmark_iterations: int = 100, allow_to_use_gpu: bool", "\"Input Model: {}\".format(str(self._ori_model)) def __repr__(self) -> str: return \"{}({})\".format(self.__class__.__name__, str(self))", "information. \"\"\" class ModelHandlerV1(_ModelHandlerBase): \"\"\"Runs a model in TF1.\"\"\" @property", "dtypes as tf_dtypes from tensorflow.python.framework import importer from tensorflow.python.framework import", "TensorRT and runs the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams):", "import importer from tensorflow.python.framework import ops as framework_ops from tensorflow.python.ops", "in self.graph_func.inputs ] def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] = None, warmup_iterations=10,", "graph_func): graph_def = graph_func.graph.as_graph_def() self._check_contains_trt_engine(graph_def) def run(self, inputs: Optional[Sequence[framework_ops.Tensor]] =", "model_config def __str__(self) -> str: return str(self._model_config) def __repr__(self) ->", "if not shape: raise ValueError(\"The tensor cannot have a rank", "tensor: framework_ops.Tensor, batch_size: Optional[int] = None) -> framework_ops.Tensor: \"\"\"Generates a", "Version 2.0 (the \"License\"); # you may not use this", "None if not allow_to_use_gpu: config_proto = config_pb2.ConfigProto(device_count={\"CPU\": 1, \"GPU\": 0})", "TestResult: inputs = inputs or self.generate_random_inputs() config_proto = None if", "self._trt_models ] return self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series of", "for aggregrated testing/benchmarking.\"\"\" def __init__( self, model_config: ModelConfig, default_trt_convert_params: trt.TrtConversionParams,", "for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ] graph_def = ( convert_to_constants.convert_variables_to_constants_from_session_graph( sess,", "trt_model = self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model) self._result_collection = TestResultCollection( results=[],", "model_handler_cls = ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1 class ModelHandlerManagerV2(_ModelHandlerManagerBase): \"\"\"Manages a", "TF1. Or a sequence of tensors in TF2. If `None`,", "has a dynamic batch size!\") shape[0] = batch_size if any(filter(lambda", "@property def output_tensor_names(self) -> Sequence[str]: return [info.name for info in", "str: return \"{}({})\".format(self.__class__.__name__, str(self)) @property @classmethod @abc.abstractmethod def model_handler_cls(cls): \"\"\"The", "import meta_graph_pb2 from tensorflow.python.client import session from tensorflow.python.compiler.tensorrt import trt_convert", "except for batch size!\") return shape def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo, batch_size:", "by applicable law or agreed to in writing, software #", "raise ValueError(\"Must provide a valid batch size \" \"as the", "-> ModelConfig: return self._model_config @property def input_tensort_names(self) -> Sequence[str]: \"\"\"Names", "graph_pb2 from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.protobuf import config_pb2 from", "= ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int = 1): return super(ModelConfig, cls).__new__(cls,", "saved_model_tags: str, saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a `tf.MetaGraphDef` in", "self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV2.run( self, inputs, warmup_iterations,", "of inferences to measure the latency. allow_to_use_gpu: Whether it is", "warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts a", "None) -> framework_ops.Tensor: \"\"\"Generates a random tensor based on the", "information for different TensorRT conversion settings. \"\"\" inputs = inputs", "ops as framework_ops from tensorflow.python.ops import random_ops from tensorflow.python.saved_model import", "info in self.input_tensor_info.values()] @property def output_tensor_names(self) -> Sequence[str]: return [info.name", "def graph_func(self): graph_func = load_graph_func( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) return convert_to_constants.convert_variables_to_constants_v2(graph_func)", "_TrtModelHandlerBase(_ModelHandlerBase): \"\"\"Base class for converting and running a model.\"\"\" def", "def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[ self.model_config.saved_model_signature_key].inputs @property def", "saved_model_loader.load( sess=sess, export_dir=saved_model_dir, tags=saved_model_tags, ) output_node_names = [ tensor.name.split(\":\")[0] for", "tensor shape.\"\"\" dtype = tf_dtypes.as_dtype(tensor_info.dtype) shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size) with", "Args: inputs: Mapping from names to input ndarrays in TF1,", "trt_convert_params in trt_convert_params_updater( default_trt_convert_params): trt_model = self.trt_model_handler_cls( model_config, trt_convert_params=trt_convert_params) self._trt_models.append(trt_model)", "tensor based on the data type and tensor shape.\"\"\" dtype", "trt_convert_params_updater: Callable[[trt.TrtConversionParams], Iterable[trt.TrtConversionParams]]): self._ori_model = self.model_handler_cls(model_config) self._trt_models = [] for", "input tensors will be used instead. warmup_iterations: Number of inferences", "-> Mapping[str, np.ndarray]: batch_size = batch_size or self.model_config.default_batch_size return {", "\"\"\"Generates mapping from names to input tensors.\"\"\" @abc.abstractmethod def run(self,", "range(benchmark_iterations): before = time.time() outputs = self.graph_func(*inputs) latency.append(time.time() - before)", "applicable law or agreed to in writing, software # distributed", "saved_model_signature_key: str = ( signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY), default_batch_size: int = 1): return", "output_tensor_names(self) -> Sequence[str]: \"\"\"Names of output tensors.\"\"\" @abc.abstractmethod def generate_random_inputs(", "\"\"\"Manages a series of ModelHandlers for aggregrated testing/benchmarking.\"\"\" def __init__(", "a model.\"\"\" def __init__(self, model_config: ModelConfig): self._model_config = model_config def", "testing/benchmarking in TF1.\"\"\" model_handler_cls = ModelHandlerV1 trt_model_handler_cls = TrtModelHandlerV1 class", "Using cache can reduce I/O. @functools.lru_cache() def load_meta_graph( saved_model_dir: str,", "node in graph_def.node]: raise RuntimeError(\"Failed to convert to TensorRT! \"", "save(self, output_saved_model_dir: Optional[str] = None, overwrite=True) -> None: \"\"\"Saves a", "self._trt_convert_params def save(self, output_saved_model_dir: Optional[str] = None, overwrite=True) -> None:", "Optional[int] = None) -> framework_ops.Tensor: \"\"\"Generates a random tensor based", "series of ModelHandlers for aggregrated testing/benchmarking in TF1.\"\"\" model_handler_cls =", "def __new__(cls, config: ModelConfig, results: Sequence[TestResult] = tuple()): return super(TestResultCollection,", "[\"results\", \"config\"])): def __new__(cls, config: ModelConfig, results: Sequence[TestResult] = tuple()):", "def input_tensort_names(self) -> Sequence[str]: \"\"\"Names of input tensors.\"\"\" @property def", "self._result_collection._replace(results=results) class ModelHandlerManagerV1(_ModelHandlerManagerBase): \"\"\"Manages a series of ModelHandlers for aggregrated", "class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for running a model.\"\"\" def __init__(self,", ") output_node_names = [ tensor.name.split(\":\")[0] for tensor in meta_graph.signature_def[saved_model_signature_key].outputs.values() ]", "cls).__new__(cls, saved_model_dir, saved_model_tags, saved_model_signature_key, default_batch_size) class TestResultCollection( collections.namedtuple(\"TestResultCollection\", [\"results\", \"config\"])):", "benchmark_iterations: Number of inferences to measure the latency. Returns: `TestResultCollection`", "def meta_graph(self) -> meta_graph_pb2.MetaGraphDef: return load_meta_graph( saved_model_dir=self.model_config.saved_model_dir, saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property", "for test models.\"\"\" def __new__(cls, saved_model_dir: str, saved_model_tags: Sequence[str] =", "# You may obtain a copy of the License at", "saved_model_dir: str, saved_model_tags: str, saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef: \"\"\"Loads a", "np.ndarray: \"\"\"Generates a random tensor based on the data type", "input ndarrays in TF1. Or a sequence of tensors in", "tuple()): return super(TestResultCollection, cls).__new__(cls, config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class", ") -> Sequence[framework_ops.Tensor]: batch_size = batch_size or self.model_config.default_batch_size return [", "model_config: ModelConfig): self._model_config = model_config def __str__(self) -> str: return", "np.ndarray], latency: List[float], trt_convert_params: trt.TrtConversionParams = None): return super(TestResult, cls).__new__(cls,", "device = \"/device:gpu:0\" if allow_to_use_gpu else \"/device:cpu:0\" with framework_ops.device(device): for", "for aggregrated testing/benchmarking in TF1.\"\"\" model_handler_cls = ModelHandlerV1 trt_model_handler_cls =", "self, inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class _ModelHandlerManagerBase(metaclass=abc.ABCMeta): \"\"\"Manages", "Models are repeatedly loaded for different TensorRT conversion settings. #", "Information: {}\".format(str(self))) def __str__(self) -> str: base = super(_TrtModelHandlerBase, self).__str__()", "Sequence[TestResult] = tuple()): return super(TestResultCollection, cls).__new__(cls, config, results) class _ModelHandlerBase(metaclass=abc.ABCMeta):", "\"\"\"Gets a concrete tensor shape without dynamic dimensions.\"\"\" if tensor_shape.unknown_rank:", "for converting and running a model.\"\"\" def __init__( self, model_config:", "\"latency\", \"trt_convert_params\"])): def __new__(cls, outputs: Mapping[str, np.ndarray], latency: List[float], trt_convert_params:", "session from tensorflow.python.compiler.tensorrt import trt_convert as trt from tensorflow.python.framework import", "converting and running a model.\"\"\" def __init__( self, model_config: ModelConfig,", "results) class _ModelHandlerBase(metaclass=abc.ABCMeta): \"\"\"Base class for running a model.\"\"\" def", "runs the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): conversion_nodes_denylist =", "import collections import functools import tempfile import time from typing", "return trt.TrtGraphConverterV2( input_saved_model_dir=self.model_config.saved_model_dir, input_saved_model_tags=self.model_config.saved_model_tags, input_saved_model_signature_key=( self.model_config.saved_model_signature_key), conversion_params=trt_convert_params) def _check_conversion(self, graph_func):", "batch_size: Optional[int] = None) -> np.ndarray: \"\"\"Generates a random tensor", "benchmark_iterations: Number of inferences to measure the latency. allow_to_use_gpu: Whether", "of ModelHandlers for aggregrated testing/benchmarking in TF2.\"\"\" model_handler_cls = ModelHandlerV2", "trt from tensorflow.python.framework import convert_to_constants from tensorflow.python.framework import dtypes as", "import ops as framework_ops from tensorflow.python.ops import random_ops from tensorflow.python.saved_model", "before) except Exception as exc: raise RuntimeError(\"Failed to run model", "in self.graph_func.outputs] def generate_random_inputs(self, batch_size: Optional[int] = None ) ->", "trt_convert_params: trt.TrtConversionParams = None): return super(TestResult, cls).__new__(cls, outputs, latency, trt_convert_params)", "Sequence[int]: \"\"\"Gets a concrete tensor shape without dynamic dimensions.\"\"\" if", "inferences to measure the latency. Returns: `TestResultCollection` summarizing timing and", "\"License\"); # you may not use this file except in", "def run(self, inputs: Optional[Mapping[str, np.ndarray]] = None, warmup_iterations=10, benchmark_iterations=100, allow_to_use_gpu=False)", "saved_model_tags=self.model_config.saved_model_tags, saved_model_signature_key=self.model_config.saved_model_signature_key) @property def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]: return self.meta_graph.signature_def[", "settings. # Using cache can reduce I/O. @functools.lru_cache() def load_meta_graph(", "saved_model_tags: str, saved_model_signature_key: str): \"\"\"Loads a graph function in TF2.\"\"\"", ") -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]: \"\"\"Generates mapping from names to", "runs the converted model.\"\"\" def _create_converter(self, trt_convert_params: trt.TrtConversionParams): return trt.TrtGraphConverterV2(", "output_saved_model_dir: Optional[str] = None, overwrite=True) -> None: \"\"\"Saves a TensorRT", "saved_model_load from tensorflow.python.saved_model import loader as saved_model_loader from tensorflow.python.saved_model import", "engines.\"\"\" def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef): if \"TRTEngineOp\" not in [node.op", "TensorRT converted model.\"\"\" if self._conversion_is_saved and not overwrite: return output_saved_model_dir", "for aggregrated testing/benchmarking in TF2.\"\"\" model_handler_cls = ModelHandlerV2 trt_model_handler_cls =", "TestResult: self.save(overwrite=False) logging.info(\"Running with TensorRT!\") test_result = ModelHandlerV1.run( self, inputs,", "tensors for unknown rank!\") shape = [dim.size for dim in", "input_tensor_names(self): return [tensor.name for tensor in self.graph_func.inputs] @property def output_tensor_names(self):", "inputs, warmup_iterations, benchmark_iterations, allow_to_use_gpu=True) return test_result._replace(trt_convert_params=self._trt_convert_params) class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2): \"\"\"Converts" ]
[]
[ "#---------------------- n=list(ev*100) pc=[] for i in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #----------------------", "Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal Components ') #----------------------", "= pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing import StandardScaler features =", "# Standardizing the features x = StandardScaler().fit_transform(x) from sklearn.decomposition import", "Ratio of IRIS Dataset using kernel:'+str(ker)) plt.show() #--------------------------------------------------- # *Since", "into Pandas DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing", "out the features x = df.loc[:, features].values # Separating out", "2 principal components have high variance. # so, we select", "= (8,8)) ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize = 15) ax.set_ylabel('PC-2',", "= 15) ax.set_ylabel('PC-2', fontsize = 15) ax.set_title('KPCA on IRIS Dataset", "pandas as pd # load dataset into Pandas DataFrame df", "plt.show() # FOR SHOWING THE PLOT #------------------- SAVING DATA INTO", "/ np.sum(explained_variance) #--------- Bar Graph for Explained Variance Ratio ------------", "components have high variance. # so, we select pc-1 and", "linear,rbf,poly # def Kernel_Pca(ker): kpca = KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca", "plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal Components ') #---------------------- n=list(ev*100) pc=[] for", "THE PLOT #------------------- SAVING DATA INTO CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv')", "from sklearn.preprocessing import StandardScaler features = ['sepal length', 'sepal width',", "= pd.concat([principalDf, df[['target']]], axis = 1) # Plotting pc1 &", "plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance Ratio of IRIS Dataset", "principalComponents = kpca.fit_transform(x) principalDf = pd.DataFrame(data = principalComponents , columns", "['sepal length', 'sepal width', 'petal length', 'petal width'] # Separating", "pd.DataFrame(data = principalComponents , columns = ['PC-1', 'PC-2']) # Adding", "import numpy as np import matplotlib.pyplot as plt import pandas", "length', 'petal width'] # Separating out the features x =", "ev = explained_variance / np.sum(explained_variance) #--------- Bar Graph for Explained", "StandardScaler features = ['sepal length', 'sepal width', 'petal length', 'petal", "= 20) targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors = ['r',", "plt.title('Variance Ratio of IRIS Dataset using kernel:'+str(ker)) plt.show() #--------------------------------------------------- #", "fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize = 15) ax.set_ylabel('PC-2', fontsize = 15) ax.set_title('KPCA", "finalDf.loc[indicesToKeep, 'PC-2'] , c = color , s = 30)", "components # KERNELS : linear,rbf,poly # def Kernel_Pca(ker): kpca =", "initial 2 principal components have high variance. # so, we", "df.loc[:, features].values # Separating out the target y = df.loc[:,['target']].values", "= principalComponents , columns = ['PC-1', 'PC-2']) # Adding lables", "= df.loc[:,['target']].values # Standardizing the features x = StandardScaler().fit_transform(x) from", "the features x = df.loc[:, features].values # Separating out the", "pd # load dataset into Pandas DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris", "15) ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker), fontsize = 20)", "plt.show() #--------------------------------------------------- # *Since the initial 2 principal components have", "the principle components # KERNELS : linear,rbf,poly # def Kernel_Pca(ker):", "'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2'] , c = color , s", "colors = ['r', 'g', 'b'] for target, color in zip(targets,colors):", "x_kpca = kpca.fit_transform(x) principalComponents = kpca.fit_transform(x) principalDf = pd.DataFrame(data =", "DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing import StandardScaler", "= df.loc[:, features].values # Separating out the target y =", "kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x) explained_variance =", "= plt.figure(figsize = (8,8)) ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize =", "ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker), fontsize = 20) targets", "plt.figure(figsize = (8,8)) ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize = 15)", "explained_variance / np.sum(explained_variance) #--------- Bar Graph for Explained Variance Ratio", "n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance Ratio", "Plotting pc1 & pc2 fig = plt.figure(figsize = (8,8)) ax", "IRIS Dataset using kernel:'+str(ker)) plt.show() #--------------------------------------------------- # *Since the initial", "lables finalDf = pd.concat([principalDf, df[['target']]], axis = 1) # Plotting", "width', 'petal length', 'petal width'] # Separating out the features", "= ['sepal length', 'sepal width', 'petal length', 'petal width'] #", "Graph for Explained Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal", "------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal Components ') #---------------------- n=list(ev*100) pc=[]", "ax.set_ylabel('PC-2', fontsize = 15) ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker),", "kpca_transform = kpca.fit_transform(x) explained_variance = np.var(kpca_transform, axis=0) ev = explained_variance", "KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x) explained_variance", "FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------ k=['linear','rbf','poly'] for i in k: Kernel_Pca(i)", "ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2'] , c = color ,", "= ['PC-1', 'PC-2']) # Adding lables finalDf = pd.concat([principalDf, df[['target']]],", "#------------------- SAVING DATA INTO CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------ k=['linear','rbf','poly']", "dataset into Pandas DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from", "kpca.fit_transform(x) principalComponents = kpca.fit_transform(x) principalDf = pd.DataFrame(data = principalComponents ,", "fontsize = 15) ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker), fontsize", "def Kernel_Pca(ker): kpca = KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x)", "high variance. # so, we select pc-1 and pc-2. #---------------------------------------------------", "Separating out the features x = df.loc[:, features].values # Separating", "load dataset into Pandas DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv')", "pc-2. #--------------------------------------------------- kpca = KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x)", "= KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) principalComponents = kpca.fit_transform(x)", "'Iris-virginica'] colors = ['r', 'g', 'b'] for target, color in", "from sklearn.decomposition import KernelPCA ## Finding the principle components #", "sklearn.preprocessing import StandardScaler features = ['sepal length', 'sepal width', 'petal", "as pd # load dataset into Pandas DataFrame df =", "df.loc[:,['target']].values # Standardizing the features x = StandardScaler().fit_transform(x) from sklearn.decomposition", "kpca.fit_transform(x) explained_variance = np.var(kpca_transform, axis=0) ev = explained_variance / np.sum(explained_variance)", "and pc-2. #--------------------------------------------------- kpca = KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca =", "#--------------------------------------------------- kpca = KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) principalComponents", "import matplotlib.pyplot as plt import pandas as pd # load", "= kpca.fit_transform(x) principalDf = pd.DataFrame(data = principalComponents , columns =", "Dataset using kernel:'+str(ker)) plt.show() #--------------------------------------------------- # *Since the initial 2", "length', 'sepal width', 'petal length', 'petal width'] # Separating out", "pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance Ratio of", "plt.legend() plt.xlabel('Principal Components ') #---------------------- n=list(ev*100) pc=[] for i in", "'petal length', 'petal width'] # Separating out the features x", "the initial 2 principal components have high variance. # so,", "using kernel:'+str(ker), fontsize = 20) targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']", "#---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance Ratio of IRIS", "the features x = StandardScaler().fit_transform(x) from sklearn.decomposition import KernelPCA ##", "principal components have high variance. # so, we select pc-1", "= fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize = 15) ax.set_ylabel('PC-2', fontsize = 15)", "plt import pandas as pd # load dataset into Pandas", "pd.concat([principalDf, df[['target']]], axis = 1) # Plotting pc1 & pc2", "width'] # Separating out the features x = df.loc[:, features].values", "1) # Plotting pc1 & pc2 fig = plt.figure(figsize =", "= color , s = 30) ax.legend(targets) ax.grid() plt.show() #", "for Explained Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal Components", "KERNELS : linear,rbf,poly # def Kernel_Pca(ker): kpca = KernelPCA(n_components=4, kernel=ker,", "np import matplotlib.pyplot as plt import pandas as pd #", "= np.var(kpca_transform, axis=0) ev = explained_variance / np.sum(explained_variance) #--------- Bar", "on IRIS Dataset using kernel:'+str(ker), fontsize = 20) targets =", "features x = StandardScaler().fit_transform(x) from sklearn.decomposition import KernelPCA ## Finding", "SHOWING THE PLOT #------------------- SAVING DATA INTO CSV FILE ------------", "Separating out the target y = df.loc[:,['target']].values # Standardizing the", "of IRIS Dataset using kernel:'+str(ker)) plt.show() #--------------------------------------------------- # *Since the", "target y = df.loc[:,['target']].values # Standardizing the features x =", "kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) principalComponents = kpca.fit_transform(x) principalDf =", "= KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x)", "gamma=15) x_kpca = kpca.fit_transform(x) principalComponents = kpca.fit_transform(x) principalDf = pd.DataFrame(data", "= ['r', 'g', 'b'] for target, color in zip(targets,colors): indicesToKeep", "indicesToKeep = finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2']", "pc2 fig = plt.figure(figsize = (8,8)) ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1',", "range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance", "SAVING DATA INTO CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------ k=['linear','rbf','poly'] for", "# Separating out the features x = df.loc[:, features].values #", "= 1) # Plotting pc1 & pc2 fig = plt.figure(figsize", "import KernelPCA ## Finding the principle components # KERNELS :", "pc1 & pc2 fig = plt.figure(figsize = (8,8)) ax =", "# KERNELS : linear,rbf,poly # def Kernel_Pca(ker): kpca = KernelPCA(n_components=4,", "columns = ['PC-1', 'PC-2']) # Adding lables finalDf = pd.concat([principalDf,", "matplotlib.pyplot as plt import pandas as pd # load dataset", "['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors = ['r', 'g', 'b'] for target,", "c = color , s = 30) ax.legend(targets) ax.grid() plt.show()", "INTO CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------ k=['linear','rbf','poly'] for i in", "numpy as np import matplotlib.pyplot as plt import pandas as", "Adding lables finalDf = pd.concat([principalDf, df[['target']]], axis = 1) #", "'sepal width', 'petal length', 'petal width'] # Separating out the", "StandardScaler().fit_transform(x) from sklearn.decomposition import KernelPCA ## Finding the principle components", "Explained Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal Components ')", "Standardizing the features x = StandardScaler().fit_transform(x) from sklearn.decomposition import KernelPCA", "kpca = KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) principalComponents =", "principalComponents , columns = ['PC-1', 'PC-2']) # Adding lables finalDf", "kernel:'+str(ker), fontsize = 20) targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors", "principle components # KERNELS : linear,rbf,poly # def Kernel_Pca(ker): kpca", "df[['target']]], axis = 1) # Plotting pc1 & pc2 fig", "ax.grid() plt.show() # FOR SHOWING THE PLOT #------------------- SAVING DATA", "features x = df.loc[:, features].values # Separating out the target", "# def Kernel_Pca(ker): kpca = KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca =", "*Since the initial 2 principal components have high variance. #", "KernelPCA ## Finding the principle components # KERNELS : linear,rbf,poly", "'PC-2'] , c = color , s = 30) ax.legend(targets)", "pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing import StandardScaler features = ['sepal", "IRIS Dataset using kernel:'+str(ker), fontsize = 20) targets = ['Iris-setosa',", "axis=0) ev = explained_variance / np.sum(explained_variance) #--------- Bar Graph for", "= 30) ax.legend(targets) ax.grid() plt.show() # FOR SHOWING THE PLOT", "#--------------------------------------------------- # *Since the initial 2 principal components have high", "= StandardScaler().fit_transform(x) from sklearn.decomposition import KernelPCA ## Finding the principle", "sklearn.decomposition import KernelPCA ## Finding the principle components # KERNELS", "# FOR SHOWING THE PLOT #------------------- SAVING DATA INTO CSV", "# load dataset into Pandas DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\")", "') #---------------------- n=list(ev*100) pc=[] for i in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')')", "#--------- Bar Graph for Explained Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b')", "# Plotting pc1 & pc2 fig = plt.figure(figsize = (8,8))", "target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2'] , c = color", "so, we select pc-1 and pc-2. #--------------------------------------------------- kpca = KernelPCA(n_components=2,", "'g', 'b'] for target, color in zip(targets,colors): indicesToKeep = finalDf['target']", "['PC-1', 'PC-2']) # Adding lables finalDf = pd.concat([principalDf, df[['target']]], axis", "gamma=15) x_kpca = kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x) explained_variance = np.var(kpca_transform,", "as np import matplotlib.pyplot as plt import pandas as pd", "np.sum(explained_variance) #--------- Bar Graph for Explained Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal", "Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend() plt.xlabel('Principal Components ') #---------------------- n=list(ev*100)", "n=list(ev*100) pc=[] for i in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc,", "we select pc-1 and pc-2. #--------------------------------------------------- kpca = KernelPCA(n_components=2, kernel=ker,", "y = df.loc[:,['target']].values # Standardizing the features x = StandardScaler().fit_transform(x)", "i in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance", "for i in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30)", "Ratio') plt.title('Variance Ratio of IRIS Dataset using kernel:'+str(ker)) plt.show() #---------------------------------------------------", "targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors = ['r', 'g', 'b']", "#df.to_csv('iris.csv') from sklearn.preprocessing import StandardScaler features = ['sepal length', 'sepal", "for target, color in zip(targets,colors): indicesToKeep = finalDf['target'] == target", "ax.set_xlabel('PC-1', fontsize = 15) ax.set_ylabel('PC-2', fontsize = 15) ax.set_title('KPCA on", "Pandas DataFrame df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing import", "= kpca.fit_transform(x) explained_variance = np.var(kpca_transform, axis=0) ev = explained_variance /", "pc=[] for i in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7,", "Dataset using kernel:'+str(ker), fontsize = 20) targets = ['Iris-setosa', 'Iris-versicolor',", "fontsize = 20) targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors =", "20) targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors = ['r', 'g',", "import StandardScaler features = ['sepal length', 'sepal width', 'petal length',", "principalDf = pd.DataFrame(data = principalComponents , columns = ['PC-1', 'PC-2'])", "kpca = KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) kpca_transform =", "have high variance. # so, we select pc-1 and pc-2.", "Finding the principle components # KERNELS : linear,rbf,poly # def", ": linear,rbf,poly # def Kernel_Pca(ker): kpca = KernelPCA(n_components=4, kernel=ker, gamma=15)", "rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance Ratio of IRIS Dataset using kernel:'+str(ker))", "plt.ylabel('Variance Ratio') plt.title('Variance Ratio of IRIS Dataset using kernel:'+str(ker)) plt.show()", "kernel:'+str(ker)) plt.show() #--------------------------------------------------- # *Since the initial 2 principal components", "in zip(targets,colors): indicesToKeep = finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] ,", "target, color in zip(targets,colors): indicesToKeep = finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep,", "= finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2'] ,", ", finalDf.loc[indicesToKeep, 'PC-2'] , c = color , s =", "kpca.fit_transform(x) principalDf = pd.DataFrame(data = principalComponents , columns = ['PC-1',", "CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------ k=['linear','rbf','poly'] for i in k:", ", columns = ['PC-1', 'PC-2']) # Adding lables finalDf =", "DATA INTO CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------ k=['linear','rbf','poly'] for i", "in range(len(n)): n[i]=round(n[i],4) pc.append('PC-'+str(i+1)+'('+str(n[i])+')') #---------------------- plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30) plt.ylabel('Variance Ratio')", "15) ax.set_ylabel('PC-2', fontsize = 15) ax.set_title('KPCA on IRIS Dataset using", "= kpca.fit_transform(x) principalComponents = kpca.fit_transform(x) principalDf = pd.DataFrame(data = principalComponents", "# so, we select pc-1 and pc-2. #--------------------------------------------------- kpca =", "kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x) explained_variance = np.var(kpca_transform, axis=0) ev =", "'petal width'] # Separating out the features x = df.loc[:,", "Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing import StandardScaler features = ['sepal length',", "features].values # Separating out the target y = df.loc[:,['target']].values #", "(8,8)) ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize = 15) ax.set_ylabel('PC-2', fontsize", "KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) principalComponents = kpca.fit_transform(x) principalDf", "Components ') #---------------------- n=list(ev*100) pc=[] for i in range(len(n)): n[i]=round(n[i],4)", "zip(targets,colors): indicesToKeep = finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep,", "'PC-2']) # Adding lables finalDf = pd.concat([principalDf, df[['target']]], axis =", "'b'] for target, color in zip(targets,colors): indicesToKeep = finalDf['target'] ==", "= pd.DataFrame(data = principalComponents , columns = ['PC-1', 'PC-2']) #", "['r', 'g', 'b'] for target, color in zip(targets,colors): indicesToKeep =", "plt.xlabel('Principal Components ') #---------------------- n=list(ev*100) pc=[] for i in range(len(n)):", "the target y = df.loc[:,['target']].values # Standardizing the features x", "s = 30) ax.legend(targets) ax.grid() plt.show() # FOR SHOWING THE", "= ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'] colors = ['r', 'g', 'b'] for", "== target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2'] , c =", "= 15) ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker), fontsize =", "axis = 1) # Plotting pc1 & pc2 fig =", "pc-1 and pc-2. #--------------------------------------------------- kpca = KernelPCA(n_components=2, kernel=ker, gamma=15) x_kpca", ", c = color , s = 30) ax.legend(targets) ax.grid()", "fig = plt.figure(figsize = (8,8)) ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize", "np.var(kpca_transform, axis=0) ev = explained_variance / np.sum(explained_variance) #--------- Bar Graph", "& pc2 fig = plt.figure(figsize = (8,8)) ax = fig.add_subplot(1,1,1)", "out the target y = df.loc[:,['target']].values # Standardizing the features", "finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1'] , finalDf.loc[indicesToKeep, 'PC-2'] , c", "fontsize=7, rotation=30) plt.ylabel('Variance Ratio') plt.title('Variance Ratio of IRIS Dataset using", "PLOT #------------------- SAVING DATA INTO CSV FILE ------------ finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv') #------------------------------------------------------", "variance. # so, we select pc-1 and pc-2. #--------------------------------------------------- kpca", "Kernel_Pca(ker): kpca = KernelPCA(n_components=4, kernel=ker, gamma=15) x_kpca = kpca.fit_transform(x) kpca_transform", "fontsize = 15) ax.set_ylabel('PC-2', fontsize = 15) ax.set_title('KPCA on IRIS", "'Iris-versicolor', 'Iris-virginica'] colors = ['r', 'g', 'b'] for target, color", "x = StandardScaler().fit_transform(x) from sklearn.decomposition import KernelPCA ## Finding the", "df = pd.read_csv(\"D:\\Python_programs\\ML\\Iris Data\\KPCA\\iris.csv\") #df.to_csv('iris.csv') from sklearn.preprocessing import StandardScaler features", "## Finding the principle components # KERNELS : linear,rbf,poly #", "x_kpca = kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x) explained_variance = np.var(kpca_transform, axis=0)", "import pandas as pd # load dataset into Pandas DataFrame", "using kernel:'+str(ker)) plt.show() #--------------------------------------------------- # *Since the initial 2 principal", "FOR SHOWING THE PLOT #------------------- SAVING DATA INTO CSV FILE", ", s = 30) ax.legend(targets) ax.grid() plt.show() # FOR SHOWING", "explained_variance = np.var(kpca_transform, axis=0) ev = explained_variance / np.sum(explained_variance) #---------", "ax = fig.add_subplot(1,1,1) ax.set_xlabel('PC-1', fontsize = 15) ax.set_ylabel('PC-2', fontsize =", "as plt import pandas as pd # load dataset into", "Components',color='b') plt.legend() plt.xlabel('Principal Components ') #---------------------- n=list(ev*100) pc=[] for i", "30) ax.legend(targets) ax.grid() plt.show() # FOR SHOWING THE PLOT #-------------------", "# *Since the initial 2 principal components have high variance.", "= kpca.fit_transform(x) kpca_transform = kpca.fit_transform(x) explained_variance = np.var(kpca_transform, axis=0) ev", "# Separating out the target y = df.loc[:,['target']].values # Standardizing", "features = ['sepal length', 'sepal width', 'petal length', 'petal width']", "Bar Graph for Explained Variance Ratio ------------ plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b') plt.legend()", "# Adding lables finalDf = pd.concat([principalDf, df[['target']]], axis = 1)", "finalDf = pd.concat([principalDf, df[['target']]], axis = 1) # Plotting pc1", "= explained_variance / np.sum(explained_variance) #--------- Bar Graph for Explained Variance", "color , s = 30) ax.legend(targets) ax.grid() plt.show() # FOR", "color in zip(targets,colors): indicesToKeep = finalDf['target'] == target ax.scatter(finalDf.loc[indicesToKeep, 'PC-1']", "<gh_stars>1-10 import numpy as np import matplotlib.pyplot as plt import", "x = df.loc[:, features].values # Separating out the target y", "ax.legend(targets) ax.grid() plt.show() # FOR SHOWING THE PLOT #------------------- SAVING", "select pc-1 and pc-2. #--------------------------------------------------- kpca = KernelPCA(n_components=2, kernel=ker, gamma=15)" ]
[ "@property def decade_with_century_regex(self) -> Pattern: return self._decade_with_century_regex @property def future_regex(self)", "BaseNumberParser from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration", "return self._cardinal_extractor @property def time_unit_regex(self) -> Pattern: return self._time_unit_regex @property", "from .date_extractor_config import ItalianDateExtractorConfiguration from recognizers_text.extractor import Extractor from recognizers_number", "return self._integer_extractor @property def number_parser(self) -> BaseNumberParser: return self._number_parser @property", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex),", "return self._month_num_regex @property def century_suffix_regex(self) -> Pattern: return self._century_suffix_regex @property", "class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self) -> Pattern: return self._previous_prefix_regex @property", "self._range_unit_regex @property def date_point_extractor(self) -> DateTimeExtractor: return self._date_point_extractor @property def", "= self.before_regex.search(source) if match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1)", "ItalianDateTime.YearPeriodRegex ) self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex = RegExpUtility.get_safe_reg_exp(", "month_num_regex(self) -> Pattern: return self._month_num_regex @property def century_suffix_regex(self) -> Pattern:", "-> Pattern: return self._time_unit_regex @property def within_next_prefix_regex(self) -> Pattern: return", ") self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex", "@property def past_regex(self) -> Pattern: return self._past_regex @property def decade_with_century_regex(self)", "self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex =", "time_unit_regex(self) -> Pattern: return self._time_unit_regex @property def within_next_prefix_regex(self) -> Pattern:", "@property def range_unit_regex(self) -> Pattern: return self._range_unit_regex @property def date_point_extractor(self)", "-> Pattern: return self._range_unit_regex @property def date_point_extractor(self) -> DateTimeExtractor: return", "# Licensed under the MIT License. from typing import List,", "-> Pattern: return self._day_regex @property def week_day_regex(self) -> Pattern: return", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp(", "@property def week_day_of_month_regex(self) -> Pattern: return self._week_day_of_month_regex @property def all_half_year_regex(self)", "till_regex(self) -> Pattern: return self._till_regex @property def followed_unit(self) -> Pattern:", "@property def month_of_regex(self) -> Pattern: return self._month_of_regex @property def date_unit_regex(self)", "..base_date import BaseDateExtractor from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config", "= ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor = ItalianCardinalExtractor()", "[ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex),", "def illegal_year_regex(self) -> Pattern: return self._illegal_year_regex @property def year_regex(self) ->", "from ..base_date import BaseDateExtractor from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex from", "range_unit_regex(self) -> Pattern: return self._range_unit_regex @property def date_point_extractor(self) -> DateTimeExtractor:", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex),", "self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex =", "return self._illegal_year_regex @property def year_regex(self) -> Pattern: return self._year_regex @property", "self._week_of_regex @property def month_of_regex(self) -> Pattern: return self._month_of_regex @property def", "Pattern: return self._future_suffix_regex @property def ago_regex(self) -> Pattern: return self._ago_regex", "MIT License. from typing import List, Pattern from recognizers_text.utilities import", "implementation for these properties is added, change the None values", "@property def cardinal_extractor(self) -> Extractor: return self._cardinal_extractor @property def time_unit_regex(self)", "Pattern: return self._in_connector_regex @property def range_unit_regex(self) -> Pattern: return self._range_unit_regex", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex =", "Pattern: return self._month_num_regex @property def century_suffix_regex(self) -> Pattern: return self._century_suffix_regex", "recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from ...resources.base_date_time", "self._month_suffix_regex @property def past_prefix_regex(self) -> Pattern: return self._past_prefix_regex @property def", "ordinal_extractor(self) -> BaseNumberExtractor: return self._ordinal_extractor @property def cardinal_extractor(self) -> Extractor:", "ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp(", "ItalianDateTime.AgoRegex ) self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex = RegExpUtility.get_safe_reg_exp(", "self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex =", "-> Pattern: return self._month_of_regex @property def date_unit_regex(self) -> Pattern: return", "return self._duration_date_restrictions @property def year_period_regex(self) -> Pattern: return self._year_period_regex @property", "import BaseNumberParser from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers import", "BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor() self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor", "-> Pattern: return self._more_than_regex @property def duration_date_restrictions(self) -> [str]: return", "self._date_point_extractor @property def integer_extractor(self) -> BaseNumberExtractor: return self._integer_extractor @property def", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex =", "def future_suffix_regex(self) -> Pattern: return self._future_suffix_regex @property def ago_regex(self) ->", "DateTimeExtractor: return self._date_point_extractor @property def integer_extractor(self) -> BaseNumberExtractor: return self._integer_extractor", "-> Pattern: return self._week_of_regex @property def month_of_regex(self) -> Pattern: return", "@property def simple_cases_regexes(self) -> List[Pattern]: return self._simple_cases_regexes @property def illegal_year_regex(self)", "-> DateTimeExtractor: return self._date_point_extractor @property def integer_extractor(self) -> BaseNumberExtractor: return", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex)", "@property def day_regex(self) -> Pattern: return self._day_regex @property def week_day_regex(self)", "get_from_token_index(self, source: str) -> MatchedIndex: match = self.from_regex.search(source) if match:", "def this_prefix_regex(self) -> Pattern: return self._this_prefix_regex @property def which_week_regex(self) ->", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex)", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex", "from ..extractors import DateTimeExtractor from ..base_duration import BaseDurationExtractor from ..base_date", "@property def now_regex(self) -> Pattern: return self._now_regex @property def future_suffix_regex(self)", "def past_prefix_regex(self) -> Pattern: return self._past_prefix_regex @property def next_prefix_regex(self) ->", "duration_extractor(self) -> DateTimeExtractor: return self._duration_extractor @property def now_regex(self) -> Pattern:", "MatchedIndex(False, -1) def get_between_token_index(self, source: str) -> MatchedIndex: match =", "return self._year_regex @property def till_regex(self) -> Pattern: return self._till_regex @property", "self._within_next_prefix_regex @property def range_connector_regex(self) -> Pattern: return self._range_connector_regex @property def", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp(", "BaseNumberExtractor: return self._ordinal_extractor @property def cardinal_extractor(self) -> Extractor: return self._cardinal_extractor", "self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex =", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex =", "= BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor() self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration())", "-> Pattern: return self._now_regex @property def future_suffix_regex(self) -> Pattern: return", "if match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def get_between_token_index(self,", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex)", "match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def get_between_token_index(self, source:", "Pattern: return self._illegal_year_regex @property def year_regex(self) -> Pattern: return self._year_regex", "Pattern: return self._week_day_of_month_regex @property def all_half_year_regex(self) -> Pattern: return self._all_half_year_regex", "import DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config import ItalianDurationExtractorConfiguration from .date_extractor_config import", "Pattern: return self._month_suffix_regex @property def past_prefix_regex(self) -> Pattern: return self._past_prefix_regex", "BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex)", "return self._followed_unit @property def number_combined_with_unit(self) -> Pattern: return self._number_combined_with_unit @property", "import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self) ->", "future_regex(self) -> Pattern: return self._future_regex @property def week_of_regex(self) -> Pattern:", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex),", "@property def year_regex(self) -> Pattern: return self._year_regex @property def till_regex(self)", "-> Pattern: return self._ago_regex @property def later_regex(self) -> Pattern: return", "-> Pattern: return self._year_regex @property def till_regex(self) -> Pattern: return", "@property def number_parser(self) -> BaseNumberParser: return self._number_parser @property def duration_extractor(self)", "-> bool: return self._check_both_before_after @property def simple_cases_regexes(self) -> List[Pattern]: return", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp(", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex =", "self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter", "@property def future_suffix_regex(self) -> Pattern: return self._future_suffix_regex @property def ago_regex(self)", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex)", "ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex),", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex),", "MatchedIndex: match = self.before_regex.search(source) if match: return MatchedIndex(True, match.start()) return", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront),", "Pattern: return self._century_suffix_regex @property def ordinal_extractor(self) -> BaseNumberExtractor: return self._ordinal_extractor", "self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex)", "self._which_week_regex @property def rest_of_date_regex(self) -> Pattern: return self._rest_of_date_regex @property def", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex)", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex)", "match = self.before_regex.search(source) if match: return MatchedIndex(True, match.start()) return MatchedIndex(False,", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex)", "return self._simple_cases_regexes @property def illegal_year_regex(self) -> Pattern: return self._illegal_year_regex @property", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp(", "-> [str]: return self._duration_date_restrictions @property def year_period_regex(self) -> Pattern: return", "properties is added, change the None values to their respective", "self._duration_date_restrictions @property def year_period_regex(self) -> Pattern: return self._year_period_regex @property def", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex)", "self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex =", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex)", "BaseDateExtractor from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config import ItalianDurationExtractorConfiguration", "which_week_regex(self) -> Pattern: return self._which_week_regex @property def rest_of_date_regex(self) -> Pattern:", "return self._future_regex @property def week_of_regex(self) -> Pattern: return self._week_of_regex @property", "if match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def has_connector_token(self,", "Pattern: return self._range_connector_regex @property def day_regex(self) -> Pattern: return self._day_regex", "the MIT License. from typing import List, Pattern from recognizers_text.utilities", "self._week_day_of_month_regex @property def all_half_year_regex(self) -> Pattern: return self._all_half_year_regex def __init__(self):", "self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex =", "Pattern: return self._week_day_regex @property def relative_month_regex(self) -> Pattern: return self._relative_month_regex", "self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear),", "List[Pattern]: return self._simple_cases_regexes @property def illegal_year_regex(self) -> Pattern: return self._illegal_year_regex", "def cardinal_extractor(self) -> Extractor: return self._cardinal_extractor @property def time_unit_regex(self) ->", "from ..base_duration import BaseDurationExtractor from ..base_date import BaseDateExtractor from ..base_dateperiod", "ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self) -> Pattern: return self._previous_prefix_regex", "values to their respective Regexps self._time_unit_regex = None def get_from_token_index(self,", "return MatchedIndex(False, -1) def has_connector_token(self, source: str) -> bool: return", "in_connector_regex(self) -> Pattern: return self._in_connector_regex @property def range_unit_regex(self) -> Pattern:", "return self._next_prefix_regex @property def this_prefix_regex(self) -> Pattern: return self._this_prefix_regex @property", "self._this_prefix_regex @property def which_week_regex(self) -> Pattern: return self._which_week_regex @property def", "match.start()) return MatchedIndex(False, -1) def has_connector_token(self, source: str) -> bool:", "@property def illegal_year_regex(self) -> Pattern: return self._illegal_year_regex @property def year_regex(self)", "self._illegal_year_regex @property def year_regex(self) -> Pattern: return self._year_regex @property def", "ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex", "self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex", "integer_extractor(self) -> BaseNumberExtractor: return self._integer_extractor @property def number_parser(self) -> BaseNumberParser:", "= [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex),", "BaseDateTime from ...resources.italian_date_time import ItalianDateTime from ..extractors import DateTimeExtractor from", ".date_extractor_config import ItalianDateExtractorConfiguration from recognizers_text.extractor import Extractor from recognizers_number import", "= ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex)", "self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex", "def less_than_regex(self) -> Pattern: return self._less_than_regex @property def more_than_regex(self) ->", "-> Pattern: return self._past_regex @property def decade_with_century_regex(self) -> Pattern: return", "def check_both_before_after(self) -> bool: return self._check_both_before_after @property def simple_cases_regexes(self) ->", "def duration_date_restrictions(self) -> [str]: return self._duration_date_restrictions @property def year_period_regex(self) ->", "return self._till_regex @property def followed_unit(self) -> Pattern: return self._followed_unit @property", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor() self._number_parser", "self._time_unit_regex @property def within_next_prefix_regex(self) -> Pattern: return self._within_next_prefix_regex @property def", "def past_regex(self) -> Pattern: return self._past_regex @property def decade_with_century_regex(self) ->", "self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex =", "from ...resources.base_date_time import BaseDateTime from ...resources.italian_date_time import ItalianDateTime from ..extractors", "@property def next_prefix_regex(self) -> Pattern: return self._next_prefix_regex @property def this_prefix_regex(self)", "-> Pattern: return self._month_suffix_regex @property def past_prefix_regex(self) -> Pattern: return", "# TODO When the implementation for these properties is added,", "return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def has_connector_token(self, source: str)", "later_regex(self) -> Pattern: return self._later_regex @property def less_than_regex(self) -> Pattern:", "ago_regex(self) -> Pattern: return self._ago_regex @property def later_regex(self) -> Pattern:", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex =", "] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex =", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex)", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex", "return self._number_combined_with_unit @property def past_regex(self) -> Pattern: return self._past_regex @property", "return self._number_parser @property def duration_extractor(self) -> DateTimeExtractor: return self._duration_extractor @property", "self._ordinal_extractor @property def cardinal_extractor(self) -> Extractor: return self._cardinal_extractor @property def", "from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from", "def in_connector_regex(self) -> Pattern: return self._in_connector_regex @property def range_unit_regex(self) ->", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex =", "def time_unit_regex(self) -> Pattern: return self._time_unit_regex @property def within_next_prefix_regex(self) ->", "self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit =", "self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex =", "typing import List, Pattern from recognizers_text.utilities import RegExpUtility from recognizers_number.number", "self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor() self._number_parser = BaseNumberParser(", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex)", "@property def week_of_regex(self) -> Pattern: return self._week_of_regex @property def month_of_regex(self)", "List, Pattern from recognizers_text.utilities import RegExpUtility from recognizers_number.number import BaseNumberParser", "def month_num_regex(self) -> Pattern: return self._month_num_regex @property def century_suffix_regex(self) ->", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex =", "ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self) -> Pattern: return self._previous_prefix_regex @property def", "import ItalianDateTime from ..extractors import DateTimeExtractor from ..base_duration import BaseDurationExtractor", "return self._check_both_before_after @property def simple_cases_regexes(self) -> List[Pattern]: return self._simple_cases_regexes @property", "self._date_unit_regex @property def in_connector_regex(self) -> Pattern: return self._in_connector_regex @property def", "these properties is added, change the None values to their", "date_point_extractor(self) -> DateTimeExtractor: return self._date_point_extractor @property def integer_extractor(self) -> BaseNumberExtractor:", "rights reserved. # Licensed under the MIT License. from typing", "ItalianIntegerExtractor() self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex", "RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex)", "under the MIT License. from typing import List, Pattern from", "range_connector_regex(self) -> Pattern: return self._range_connector_regex @property def day_regex(self) -> Pattern:", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex),", "def week_day_of_month_regex(self) -> Pattern: return self._week_day_of_month_regex @property def all_half_year_regex(self) ->", "@property def relative_month_regex(self) -> Pattern: return self._relative_month_regex @property def month_suffix_regex(self)", "from .duration_extractor_config import ItalianDurationExtractorConfiguration from .date_extractor_config import ItalianDateExtractorConfiguration from recognizers_text.extractor", "Pattern: return self._year_regex @property def till_regex(self) -> Pattern: return self._till_regex", "self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex", "@property def previous_prefix_regex(self) -> Pattern: return self._previous_prefix_regex @property def check_both_before_after(self)", "recognizers_text.utilities import RegExpUtility from recognizers_number.number import BaseNumberParser from recognizers_number.number.italian.extractors import", "-> Pattern: return self._week_day_regex @property def relative_month_regex(self) -> Pattern: return", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex =", "previous_prefix_regex(self) -> Pattern: return self._previous_prefix_regex @property def check_both_before_after(self) -> bool:", "Pattern: return self._date_unit_regex @property def in_connector_regex(self) -> Pattern: return self._in_connector_regex", "self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes =", "ItalianDurationExtractorConfiguration from .date_extractor_config import ItalianDateExtractorConfiguration from recognizers_text.extractor import Extractor from", "@property def range_connector_regex(self) -> Pattern: return self._range_connector_regex @property def day_regex(self)", "ItalianDateTime.LaterRegex ) self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex = RegExpUtility.get_safe_reg_exp(", "from ...resources.italian_date_time import ItalianDateTime from ..extractors import DateTimeExtractor from ..base_duration", "Pattern: return self._more_than_regex @property def duration_date_restrictions(self) -> [str]: return self._duration_date_restrictions", "self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex =", "-> Pattern: return self._all_half_year_regex def __init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex", "@property def date_point_extractor(self) -> DateTimeExtractor: return self._date_point_extractor @property def integer_extractor(self)", "-> Pattern: return self._number_combined_with_unit @property def past_regex(self) -> Pattern: return", "def has_connector_token(self, source: str) -> bool: return not self.connector_and_regex.search(source) is", "def simple_cases_regexes(self) -> List[Pattern]: return self._simple_cases_regexes @property def illegal_year_regex(self) ->", "self._in_connector_regex @property def range_unit_regex(self) -> Pattern: return self._range_unit_regex @property def", "Pattern: return self._past_prefix_regex @property def next_prefix_regex(self) -> Pattern: return self._next_prefix_regex", "Pattern: return self._later_regex @property def less_than_regex(self) -> Pattern: return self._less_than_regex", "import ItalianNumberParserConfiguration from ...resources.base_date_time import BaseDateTime from ...resources.italian_date_time import ItalianDateTime", "self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after =", "ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self) -> Pattern:", "Pattern from recognizers_text.utilities import RegExpUtility from recognizers_number.number import BaseNumberParser from", "return self._relative_month_regex @property def month_suffix_regex(self) -> Pattern: return self._month_suffix_regex @property", "self._cardinal_extractor @property def time_unit_regex(self) -> Pattern: return self._time_unit_regex @property def", "self._year_regex @property def till_regex(self) -> Pattern: return self._till_regex @property def", "self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex =", "@property def less_than_regex(self) -> Pattern: return self._less_than_regex @property def more_than_regex(self)", "@property def year_period_regex(self) -> Pattern: return self._year_period_regex @property def month_num_regex(self)", "return MatchedIndex(False, -1) def get_between_token_index(self, source: str) -> MatchedIndex: match", "def relative_month_regex(self) -> Pattern: return self._relative_month_regex @property def month_suffix_regex(self) ->", "self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex", "return self._duration_extractor @property def now_regex(self) -> Pattern: return self._now_regex @property", "ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit", "def __init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex =", "ItalianDateExtractorConfiguration from recognizers_text.extractor import Extractor from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor,", "Pattern: return self._number_combined_with_unit @property def past_regex(self) -> Pattern: return self._past_regex", "-> BaseNumberParser: return self._number_parser @property def duration_extractor(self) -> DateTimeExtractor: return", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration())", ") self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(", "DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config import ItalianDurationExtractorConfiguration from .date_extractor_config import ItalianDateExtractorConfiguration", "BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex", "return self._date_point_extractor @property def integer_extractor(self) -> BaseNumberExtractor: return self._integer_extractor @property", "Pattern: return self._ago_regex @property def later_regex(self) -> Pattern: return self._later_regex", "self.from_regex.search(source) if match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def", "-> DateTimeExtractor: return self._duration_extractor @property def now_regex(self) -> Pattern: return", "number_parser(self) -> BaseNumberParser: return self._number_parser @property def duration_extractor(self) -> DateTimeExtractor:", "ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor", "Pattern: return self._decade_with_century_regex @property def future_regex(self) -> Pattern: return self._future_regex", "return self._date_unit_regex @property def in_connector_regex(self) -> Pattern: return self._in_connector_regex @property", "year_period_regex(self) -> Pattern: return self._year_period_regex @property def month_num_regex(self) -> Pattern:", "Pattern: return self._time_unit_regex @property def within_next_prefix_regex(self) -> Pattern: return self._within_next_prefix_regex", "return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def get_between_token_index(self, source: str)", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp(", "Pattern: return self._now_regex @property def future_suffix_regex(self) -> Pattern: return self._future_suffix_regex", "ItalianDateTime from ..extractors import DateTimeExtractor from ..base_duration import BaseDurationExtractor from", "@property def number_combined_with_unit(self) -> Pattern: return self._number_combined_with_unit @property def past_regex(self)", "@property def month_num_regex(self) -> Pattern: return self._month_num_regex @property def century_suffix_regex(self)", "past_regex(self) -> Pattern: return self._past_regex @property def decade_with_century_regex(self) -> Pattern:", "date_unit_regex(self) -> Pattern: return self._date_unit_regex @property def in_connector_regex(self) -> Pattern:", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor", "@property def future_regex(self) -> Pattern: return self._future_regex @property def week_of_regex(self)", "@property def in_connector_regex(self) -> Pattern: return self._in_connector_regex @property def range_unit_regex(self)", "Pattern: return self._this_prefix_regex @property def which_week_regex(self) -> Pattern: return self._which_week_regex", "def century_suffix_regex(self) -> Pattern: return self._century_suffix_regex @property def ordinal_extractor(self) ->", "self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex", "def date_unit_regex(self) -> Pattern: return self._date_unit_regex @property def in_connector_regex(self) ->", "def year_period_regex(self) -> Pattern: return self._year_period_regex @property def month_num_regex(self) ->", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions", "self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor =", "@property def century_suffix_regex(self) -> Pattern: return self._century_suffix_regex @property def ordinal_extractor(self)", "to their respective Regexps self._time_unit_regex = None def get_from_token_index(self, source:", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex =", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex)", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex),", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes", "self.before_regex.search(source) if match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex)", "more_than_regex(self) -> Pattern: return self._more_than_regex @property def duration_date_restrictions(self) -> [str]:", "def within_next_prefix_regex(self) -> Pattern: return self._within_next_prefix_regex @property def range_connector_regex(self) ->", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp(", "ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex )", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp(", ") self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex", "= ItalianIntegerExtractor() self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration())", "MatchedIndex(False, -1) def has_connector_token(self, source: str) -> bool: return not", "def all_half_year_regex(self) -> Pattern: return self._all_half_year_regex def __init__(self): self._all_half_year_regex =", "ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex", "BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit", "When the implementation for these properties is added, change the", "self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor =", "self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex", "def year_regex(self) -> Pattern: return self._year_regex @property def till_regex(self) ->", "self._number_combined_with_unit @property def past_regex(self) -> Pattern: return self._past_regex @property def", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "self._number_parser @property def duration_extractor(self) -> DateTimeExtractor: return self._duration_extractor @property def", "match.start()) return MatchedIndex(False, -1) def get_between_token_index(self, source: str) -> MatchedIndex:", "ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex )", "def future_regex(self) -> Pattern: return self._future_regex @property def week_of_regex(self) ->", "self._more_than_regex @property def duration_date_restrictions(self) -> [str]: return self._duration_date_restrictions @property def", "def rest_of_date_regex(self) -> Pattern: return self._rest_of_date_regex @property def complex_date_period_regex(self) ->", "within_next_prefix_regex(self) -> Pattern: return self._within_next_prefix_regex @property def range_connector_regex(self) -> Pattern:", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex)", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after", "BaseNumberExtractor: return self._integer_extractor @property def number_parser(self) -> BaseNumberParser: return self._number_parser", "-> Pattern: return self._year_period_regex @property def month_num_regex(self) -> Pattern: return", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex =", "ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor", "MatchedIndex: match = self.from_regex.search(source) if match: return MatchedIndex(True, match.start()) return", "import RegExpUtility from recognizers_number.number import BaseNumberParser from recognizers_number.number.italian.extractors import ItalianIntegerExtractor,", "followed_unit(self) -> Pattern: return self._followed_unit @property def number_combined_with_unit(self) -> Pattern:", "Pattern: return self._till_regex @property def followed_unit(self) -> Pattern: return self._followed_unit", "self._all_half_year_regex def __init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit)", "return self._in_connector_regex @property def range_unit_regex(self) -> Pattern: return self._range_unit_regex @property", "month_suffix_regex(self) -> Pattern: return self._month_suffix_regex @property def past_prefix_regex(self) -> Pattern:", "...resources.italian_date_time import ItalianDateTime from ..extractors import DateTimeExtractor from ..base_duration import", "def get_between_token_index(self, source: str) -> MatchedIndex: match = self.before_regex.search(source) if", ") self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex", "import ItalianDurationExtractorConfiguration from .date_extractor_config import ItalianDateExtractorConfiguration from recognizers_text.extractor import Extractor", "self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex =", "ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ]", "from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config import ItalianDurationExtractorConfiguration from", "their respective Regexps self._time_unit_regex = None def get_from_token_index(self, source: str)", "Pattern: return self._previous_prefix_regex @property def check_both_before_after(self) -> bool: return self._check_both_before_after", "Pattern: return self._rest_of_date_regex @property def complex_date_period_regex(self) -> Pattern: return self._complex_date_period_regex", "-> Pattern: return self._which_week_regex @property def rest_of_date_regex(self) -> Pattern: return", "past_prefix_regex(self) -> Pattern: return self._past_prefix_regex @property def next_prefix_regex(self) -> Pattern:", "def previous_prefix_regex(self) -> Pattern: return self._previous_prefix_regex @property def check_both_before_after(self) ->", "def next_prefix_regex(self) -> Pattern: return self._next_prefix_regex @property def this_prefix_regex(self) ->", "Licensed under the MIT License. from typing import List, Pattern", "return self._month_suffix_regex @property def past_prefix_regex(self) -> Pattern: return self._past_prefix_regex @property", "respective Regexps self._time_unit_regex = None def get_from_token_index(self, source: str) ->", "..base_duration import BaseDurationExtractor from ..base_date import BaseDateExtractor from ..base_dateperiod import", "Pattern: return self._complex_date_period_regex @property def week_day_of_month_regex(self) -> Pattern: return self._week_day_of_month_regex", "return self._rest_of_date_regex @property def complex_date_period_regex(self) -> Pattern: return self._complex_date_period_regex @property", "from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from ...resources.base_date_time import BaseDateTime from ...resources.italian_date_time", "ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor = ItalianCardinalExtractor() #", "-> BaseNumberExtractor: return self._integer_extractor @property def number_parser(self) -> BaseNumberParser: return", "-> Pattern: return self._rest_of_date_regex @property def complex_date_period_regex(self) -> Pattern: return", "@property def complex_date_period_regex(self) -> Pattern: return self._complex_date_period_regex @property def week_day_of_month_regex(self)", "self._century_suffix_regex @property def ordinal_extractor(self) -> BaseNumberExtractor: return self._ordinal_extractor @property def", "return self._later_regex @property def less_than_regex(self) -> Pattern: return self._less_than_regex @property", "ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex", "added, change the None values to their respective Regexps self._time_unit_regex", "return self._week_day_regex @property def relative_month_regex(self) -> Pattern: return self._relative_month_regex @property", "def which_week_regex(self) -> Pattern: return self._which_week_regex @property def rest_of_date_regex(self) ->", "import DateTimeExtractor from ..base_duration import BaseDurationExtractor from ..base_date import BaseDateExtractor", "self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor = ItalianCardinalExtractor() # TODO", "Extractor: return self._cardinal_extractor @property def time_unit_regex(self) -> Pattern: return self._time_unit_regex", "self._cardinal_extractor = ItalianCardinalExtractor() # TODO When the implementation for these", "def number_combined_with_unit(self) -> Pattern: return self._number_combined_with_unit @property def past_regex(self) ->", "self._integer_extractor @property def number_parser(self) -> BaseNumberParser: return self._number_parser @property def", "__init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex)", "Pattern: return self._month_of_regex @property def date_unit_regex(self) -> Pattern: return self._date_unit_regex", "-> Pattern: return self._month_num_regex @property def century_suffix_regex(self) -> Pattern: return", "@property def this_prefix_regex(self) -> Pattern: return self._this_prefix_regex @property def which_week_regex(self)", "self._check_both_before_after @property def simple_cases_regexes(self) -> List[Pattern]: return self._simple_cases_regexes @property def", "@property def week_day_regex(self) -> Pattern: return self._week_day_regex @property def relative_month_regex(self)", "from recognizers_number.number import BaseNumberParser from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor from", "self._followed_unit @property def number_combined_with_unit(self) -> Pattern: return self._number_combined_with_unit @property def", "BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self) -> Pattern: return", "self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit =", "None values to their respective Regexps self._time_unit_regex = None def", "self._later_regex @property def less_than_regex(self) -> Pattern: return self._less_than_regex @property def", "@property def ordinal_extractor(self) -> BaseNumberExtractor: return self._ordinal_extractor @property def cardinal_extractor(self)", "self._day_regex @property def week_day_regex(self) -> Pattern: return self._week_day_regex @property def", "match = self.from_regex.search(source) if match: return MatchedIndex(True, match.start()) return MatchedIndex(False,", "return self._more_than_regex @property def duration_date_restrictions(self) -> [str]: return self._duration_date_restrictions @property", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex)", "self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor =", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp(", "def ordinal_extractor(self) -> BaseNumberExtractor: return self._ordinal_extractor @property def cardinal_extractor(self) ->", "def month_suffix_regex(self) -> Pattern: return self._month_suffix_regex @property def past_prefix_regex(self) ->", "self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex", "= RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex = RegExpUtility.get_safe_reg_exp(", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex)", "self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex )", "Pattern: return self._which_week_regex @property def rest_of_date_regex(self) -> Pattern: return self._rest_of_date_regex", "ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex =", "return self._which_week_regex @property def rest_of_date_regex(self) -> Pattern: return self._rest_of_date_regex @property", "the None values to their respective Regexps self._time_unit_regex = None", "ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex", "return self._range_unit_regex @property def date_point_extractor(self) -> DateTimeExtractor: return self._date_point_extractor @property", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex)", "self._now_regex @property def future_suffix_regex(self) -> Pattern: return self._future_suffix_regex @property def", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp(", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "return self._complex_date_period_regex @property def week_day_of_month_regex(self) -> Pattern: return self._week_day_of_month_regex @property", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex", "Pattern: return self._within_next_prefix_regex @property def range_connector_regex(self) -> Pattern: return self._range_connector_regex", "self._range_connector_regex @property def day_regex(self) -> Pattern: return self._day_regex @property def", "@property def later_regex(self) -> Pattern: return self._later_regex @property def less_than_regex(self)", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor()", "@property def rest_of_date_regex(self) -> Pattern: return self._rest_of_date_regex @property def complex_date_period_regex(self)", "-> Pattern: return self._date_unit_regex @property def in_connector_regex(self) -> Pattern: return", "source: str) -> MatchedIndex: match = self.from_regex.search(source) if match: return", "def week_day_regex(self) -> Pattern: return self._week_day_regex @property def relative_month_regex(self) ->", "Regexps self._time_unit_regex = None def get_from_token_index(self, source: str) -> MatchedIndex:", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex =", "self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex),", "def duration_extractor(self) -> DateTimeExtractor: return self._duration_extractor @property def now_regex(self) ->", "ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor =", "ItalianDateTime.MonthNumRegex ) self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor = ItalianOrdinalExtractor()", "-> Pattern: return self._followed_unit @property def number_combined_with_unit(self) -> Pattern: return", "import BaseDurationExtractor from ..base_date import BaseDateExtractor from ..base_dateperiod import DatePeriodExtractorConfiguration,", "self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex", "ItalianDateTime.LessThanRegex ) self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions", ") self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex", "return self._decade_with_century_regex @property def future_regex(self) -> Pattern: return self._future_regex @property", ") self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex", "self._duration_extractor @property def now_regex(self) -> Pattern: return self._now_regex @property def", "Corporation. All rights reserved. # Licensed under the MIT License.", "DateTimeExtractor from ..base_duration import BaseDurationExtractor from ..base_date import BaseDateExtractor from", "now_regex(self) -> Pattern: return self._now_regex @property def future_suffix_regex(self) -> Pattern:", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor = ItalianCardinalExtractor() # TODO When", "self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex )", "recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from ...resources.base_date_time import BaseDateTime from ...resources.italian_date_time import", ") self._cardinal_extractor = ItalianCardinalExtractor() # TODO When the implementation for", "ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor = ItalianCardinalExtractor() # TODO When the implementation", "ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from ...resources.base_date_time import BaseDateTime", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp(", "-> Pattern: return self._past_prefix_regex @property def next_prefix_regex(self) -> Pattern: return", "@property def which_week_regex(self) -> Pattern: return self._which_week_regex @property def rest_of_date_regex(self)", "self._month_of_regex @property def date_unit_regex(self) -> Pattern: return self._date_unit_regex @property def", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex ) self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions =", "MatchedIndex from .duration_extractor_config import ItalianDurationExtractorConfiguration from .date_extractor_config import ItalianDateExtractorConfiguration from", "-> Pattern: return self._century_suffix_regex @property def ordinal_extractor(self) -> BaseNumberExtractor: return", "return self._week_day_of_month_regex @property def all_half_year_regex(self) -> Pattern: return self._all_half_year_regex def", "week_day_of_month_regex(self) -> Pattern: return self._week_day_of_month_regex @property def all_half_year_regex(self) -> Pattern:", "for these properties is added, change the None values to", "return self._ordinal_extractor @property def cardinal_extractor(self) -> Extractor: return self._cardinal_extractor @property", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex", "-> Pattern: return self._complex_date_period_regex @property def week_day_of_month_regex(self) -> Pattern: return", "def get_from_token_index(self, source: str) -> MatchedIndex: match = self.from_regex.search(source) if", "-> Pattern: return self._decade_with_century_regex @property def future_regex(self) -> Pattern: return", "@property def ago_regex(self) -> Pattern: return self._ago_regex @property def later_regex(self)", "= BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp(", "ItalianCardinalExtractor from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from ...resources.base_date_time import BaseDateTime from", "def ago_regex(self) -> Pattern: return self._ago_regex @property def later_regex(self) ->", "import Extractor from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration):", "number_combined_with_unit(self) -> Pattern: return self._number_combined_with_unit @property def past_regex(self) -> Pattern:", "return self._day_regex @property def week_day_regex(self) -> Pattern: return self._week_day_regex @property", "ItalianDateTime.WeekOfRegex) self._month_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex", "ItalianCardinalExtractor() # TODO When the implementation for these properties is", "Pattern: return self._past_regex @property def decade_with_century_regex(self) -> Pattern: return self._decade_with_century_regex", "self._relative_month_regex @property def month_suffix_regex(self) -> Pattern: return self._month_suffix_regex @property def", "def decade_with_century_regex(self) -> Pattern: return self._decade_with_century_regex @property def future_regex(self) ->", "@property def integer_extractor(self) -> BaseNumberExtractor: return self._integer_extractor @property def number_parser(self)", "check_both_before_after(self) -> bool: return self._check_both_before_after @property def simple_cases_regexes(self) -> List[Pattern]:", "self._next_prefix_regex @property def this_prefix_regex(self) -> Pattern: return self._this_prefix_regex @property def", "ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex", "get_between_token_index(self, source: str) -> MatchedIndex: match = self.before_regex.search(source) if match:", "= self.from_regex.search(source) if match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1)", "Pattern: return self._relative_month_regex @property def month_suffix_regex(self) -> Pattern: return self._month_suffix_regex", "Pattern: return self._all_half_year_regex def __init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex =", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex)", "ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor() self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor =", "@property def more_than_regex(self) -> Pattern: return self._more_than_regex @property def duration_date_restrictions(self)", "-> Pattern: return self._this_prefix_regex @property def which_week_regex(self) -> Pattern: return", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp(", "-> Pattern: return self._within_next_prefix_regex @property def range_connector_regex(self) -> Pattern: return", "Pattern: return self._followed_unit @property def number_combined_with_unit(self) -> Pattern: return self._number_combined_with_unit", "= ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex )", "-> Pattern: return self._range_connector_regex @property def day_regex(self) -> Pattern: return", "self._decade_with_century_regex @property def future_regex(self) -> Pattern: return self._future_regex @property def", "self._more_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MoreThanRegex ) self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex =", "-> Pattern: return self._future_regex @property def week_of_regex(self) -> Pattern: return", "ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex", "return self._future_suffix_regex @property def ago_regex(self) -> Pattern: return self._ago_regex @property", "self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex )", "ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex", "this_prefix_regex(self) -> Pattern: return self._this_prefix_regex @property def which_week_regex(self) -> Pattern:", "self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex )", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthOfRegex) self._date_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp(", "def integer_extractor(self) -> BaseNumberExtractor: return self._integer_extractor @property def number_parser(self) ->", "rest_of_date_regex(self) -> Pattern: return self._rest_of_date_regex @property def complex_date_period_regex(self) -> Pattern:", "-1) def get_between_token_index(self, source: str) -> MatchedIndex: match = self.before_regex.search(source)", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex),", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex)", "def range_unit_regex(self) -> Pattern: return self._range_unit_regex @property def date_point_extractor(self) ->", "ItalianDateTime.FutureSuffixRegex ) self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex = RegExpUtility.get_safe_reg_exp(", "from recognizers_text.utilities import RegExpUtility from recognizers_number.number import BaseNumberParser from recognizers_number.number.italian.extractors", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex),", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp(", "ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.InConnectorRegex) self._range_unit_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex", "import ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration from ...resources.base_date_time import", "License. from typing import List, Pattern from recognizers_text.utilities import RegExpUtility", "import List, Pattern from recognizers_text.utilities import RegExpUtility from recognizers_number.number import", "self._previous_prefix_regex @property def check_both_before_after(self) -> bool: return self._check_both_before_after @property def", "def month_of_regex(self) -> Pattern: return self._month_of_regex @property def date_unit_regex(self) ->", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after =", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp(", "self._time_unit_regex = None def get_from_token_index(self, source: str) -> MatchedIndex: match", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FutureSuffixRegex ) self._ago_regex = RegExpUtility.get_safe_reg_exp(", "import BaseDateTime from ...resources.italian_date_time import ItalianDateTime from ..extractors import DateTimeExtractor", "ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex", "self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex", "relative_month_regex(self) -> Pattern: return self._relative_month_regex @property def month_suffix_regex(self) -> Pattern:", "duration_date_restrictions(self) -> [str]: return self._duration_date_restrictions @property def year_period_regex(self) -> Pattern:", "self._ago_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex )", "-> List[Pattern]: return self._simple_cases_regexes @property def illegal_year_regex(self) -> Pattern: return", "self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex", "self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor =", "..extractors import DateTimeExtractor from ..base_duration import BaseDurationExtractor from ..base_date import", "less_than_regex(self) -> Pattern: return self._less_than_regex @property def more_than_regex(self) -> Pattern:", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontSimpleCasesRegex),", "@property def followed_unit(self) -> Pattern: return self._followed_unit @property def number_combined_with_unit(self)", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex)", "BaseDurationExtractor from ..base_date import BaseDateExtractor from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex", "def more_than_regex(self) -> Pattern: return self._more_than_regex @property def duration_date_restrictions(self) ->", "..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config import ItalianDurationExtractorConfiguration from .date_extractor_config", "month_of_regex(self) -> Pattern: return self._month_of_regex @property def date_unit_regex(self) -> Pattern:", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp(", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2)", "...resources.base_date_time import BaseDateTime from ...resources.italian_date_time import ItalianDateTime from ..extractors import", "BaseNumberParser: return self._number_parser @property def duration_extractor(self) -> DateTimeExtractor: return self._duration_extractor", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.DateUnitRegex) self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WithinNextPrefixRegex) self._in_connector_regex = RegExpUtility.get_safe_reg_exp(", "def till_regex(self) -> Pattern: return self._till_regex @property def followed_unit(self) ->", "self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [", "@property def all_half_year_regex(self) -> Pattern: return self._all_half_year_regex def __init__(self): self._all_half_year_regex", "all_half_year_regex(self) -> Pattern: return self._all_half_year_regex def __init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex)", "return self._month_of_regex @property def date_unit_regex(self) -> Pattern: return self._date_unit_regex @property", "@property def past_prefix_regex(self) -> Pattern: return self._past_prefix_regex @property def next_prefix_regex(self)", "@property def month_suffix_regex(self) -> Pattern: return self._month_suffix_regex @property def past_prefix_regex(self)", "Pattern: return self._less_than_regex @property def more_than_regex(self) -> Pattern: return self._more_than_regex", "-> Pattern: return self._less_than_regex @property def more_than_regex(self) -> Pattern: return", "cardinal_extractor(self) -> Extractor: return self._cardinal_extractor @property def time_unit_regex(self) -> Pattern:", "-> Pattern: return self._relative_month_regex @property def month_suffix_regex(self) -> Pattern: return", "self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex", "bool: return self._check_both_before_after @property def simple_cases_regexes(self) -> List[Pattern]: return self._simple_cases_regexes", ") self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex", "def later_regex(self) -> Pattern: return self._later_regex @property def less_than_regex(self) ->", "@property def within_next_prefix_regex(self) -> Pattern: return self._within_next_prefix_regex @property def range_connector_regex(self)", "future_suffix_regex(self) -> Pattern: return self._future_suffix_regex @property def ago_regex(self) -> Pattern:", "self._past_regex @property def decade_with_century_regex(self) -> Pattern: return self._decade_with_century_regex @property def", "self._simple_cases_regexes @property def illegal_year_regex(self) -> Pattern: return self._illegal_year_regex @property def", "-> Pattern: return self._in_connector_regex @property def range_unit_regex(self) -> Pattern: return", "return self._all_half_year_regex def __init__(self): self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex)", "self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex =", "= None def get_from_token_index(self, source: str) -> MatchedIndex: match =", "illegal_year_regex(self) -> Pattern: return self._illegal_year_regex @property def year_regex(self) -> Pattern:", "MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def has_connector_token(self, source: str) ->", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp(", "def complex_date_period_regex(self) -> Pattern: return self._complex_date_period_regex @property def week_day_of_month_regex(self) ->", "import BaseDateExtractor from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex from .duration_extractor_config import", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor()", "= BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp(", "self._month_num_regex @property def century_suffix_regex(self) -> Pattern: return self._century_suffix_regex @property def", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.RangeUnitRegex) self.from_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FromRegex) self.connector_and_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex)", "TODO When the implementation for these properties is added, change", "ItalianNumberParserConfiguration from ...resources.base_date_time import BaseDateTime from ...resources.italian_date_time import ItalianDateTime from", "simple_cases_regexes(self) -> List[Pattern]: return self._simple_cases_regexes @property def illegal_year_regex(self) -> Pattern:", "-> Pattern: return self._next_prefix_regex @property def this_prefix_regex(self) -> Pattern: return", "self._rest_of_date_regex @property def complex_date_period_regex(self) -> Pattern: return self._complex_date_period_regex @property def", "decade_with_century_regex(self) -> Pattern: return self._decade_with_century_regex @property def future_regex(self) -> Pattern:", "-> Extractor: return self._cardinal_extractor @property def time_unit_regex(self) -> Pattern: return", "ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex", "self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex) self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex) self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex", "from recognizers_text.extractor import Extractor from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.AgoRegex ) self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex =", "match: return MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def has_connector_token(self, source:", "return self._within_next_prefix_regex @property def range_connector_regex(self) -> Pattern: return self._range_connector_regex @property", "self._week_day_regex @property def relative_month_regex(self) -> Pattern: return self._relative_month_regex @property def", "recognizers_text.extractor import Extractor from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class", "from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def", "is added, change the None values to their respective Regexps", "@property def duration_extractor(self) -> DateTimeExtractor: return self._duration_extractor @property def now_regex(self)", "return self._year_period_regex @property def month_num_regex(self) -> Pattern: return self._month_num_regex @property", "year_regex(self) -> Pattern: return self._year_regex @property def till_regex(self) -> Pattern:", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex)", "return self._now_regex @property def future_suffix_regex(self) -> Pattern: return self._future_suffix_regex @property", "self._less_than_regex @property def more_than_regex(self) -> Pattern: return self._more_than_regex @property def", "week_day_regex(self) -> Pattern: return self._week_day_regex @property def relative_month_regex(self) -> Pattern:", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex) self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex) self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex =", "self._complex_date_period_regex @property def week_day_of_month_regex(self) -> Pattern: return self._week_day_of_month_regex @property def", "None def get_from_token_index(self, source: str) -> MatchedIndex: match = self.from_regex.search(source)", "return self._time_unit_regex @property def within_next_prefix_regex(self) -> Pattern: return self._within_next_prefix_regex @property", "[str]: return self._duration_date_restrictions @property def year_period_regex(self) -> Pattern: return self._year_period_regex", "self._past_prefix_regex @property def next_prefix_regex(self) -> Pattern: return self._next_prefix_regex @property def", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex =", "def day_regex(self) -> Pattern: return self._day_regex @property def week_day_regex(self) ->", "Pattern: return self._week_of_regex @property def month_of_regex(self) -> Pattern: return self._month_of_regex", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp(", "def week_of_regex(self) -> Pattern: return self._week_of_regex @property def month_of_regex(self) ->", "from typing import List, Pattern from recognizers_text.utilities import RegExpUtility from", "Pattern: return self._range_unit_regex @property def date_point_extractor(self) -> DateTimeExtractor: return self._date_point_extractor", "All rights reserved. # Licensed under the MIT License. from", "= ItalianCardinalExtractor() # TODO When the implementation for these properties", "@property def time_unit_regex(self) -> Pattern: return self._time_unit_regex @property def within_next_prefix_regex(self)", "= ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex = RegExpUtility.get_safe_reg_exp(", "return self._previous_prefix_regex @property def check_both_before_after(self) -> bool: return self._check_both_before_after @property", "-1) def has_connector_token(self, source: str) -> bool: return not self.connector_and_regex.search(source)", "-> MatchedIndex: match = self.before_regex.search(source) if match: return MatchedIndex(True, match.start())", "day_regex(self) -> Pattern: return self._day_regex @property def week_day_regex(self) -> Pattern:", "import ItalianDateExtractorConfiguration from recognizers_text.extractor import Extractor from recognizers_number import ItalianOrdinalExtractor,", "return self._ago_regex @property def later_regex(self) -> Pattern: return self._later_regex @property", "-> BaseNumberExtractor: return self._ordinal_extractor @property def cardinal_extractor(self) -> Extractor: return", "-> Pattern: return self._later_regex @property def less_than_regex(self) -> Pattern: return", "return self._range_connector_regex @property def day_regex(self) -> Pattern: return self._day_regex @property", "def number_parser(self) -> BaseNumberParser: return self._number_parser @property def duration_extractor(self) ->", "self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions self._year_period_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex =", "Pattern: return self._year_period_regex @property def month_num_regex(self) -> Pattern: return self._month_num_regex", "ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NowRegex) self._future_suffix_regex", "return self._past_regex @property def decade_with_century_regex(self) -> Pattern: return self._decade_with_century_regex @property", "@property def till_regex(self) -> Pattern: return self._till_regex @property def followed_unit(self)", "century_suffix_regex(self) -> Pattern: return self._century_suffix_regex @property def ordinal_extractor(self) -> BaseNumberExtractor:", "= RegExpUtility.get_safe_reg_exp( ItalianDateTime.ConnectorAndRegex) self.before_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor(", "-> Pattern: return self._week_day_of_month_regex @property def all_half_year_regex(self) -> Pattern: return", "self._till_regex @property def followed_unit(self) -> Pattern: return self._followed_unit @property def", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex", "self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex =", "complex_date_period_regex(self) -> Pattern: return self._complex_date_period_regex @property def week_day_of_month_regex(self) -> Pattern:", "self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp(", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearPeriodRegex ) self._month_num_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthNumRegex ) self._century_suffix_regex =", "-> Pattern: return self._illegal_year_regex @property def year_regex(self) -> Pattern: return", "change the None values to their respective Regexps self._time_unit_regex =", "return self._this_prefix_regex @property def which_week_regex(self) -> Pattern: return self._which_week_regex @property", "recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property def previous_prefix_regex(self)", "return self._century_suffix_regex @property def ordinal_extractor(self) -> BaseNumberExtractor: return self._ordinal_extractor @property", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex) self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex) self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex) self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(", "= ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear),", "self._ago_regex @property def later_regex(self) -> Pattern: return self._later_regex @property def", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.TillRegex) self._followed_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit)", "@property def duration_date_restrictions(self) -> [str]: return self._duration_date_restrictions @property def year_period_regex(self)", "Pattern: return self._day_regex @property def week_day_regex(self) -> Pattern: return self._week_day_regex", "the implementation for these properties is added, change the None", "ItalianDateTime.FollowedDateUnit) self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NumberCombinedWithDateUnit) self._past_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PastSuffixRegex) self._future_regex", "-> Pattern: return self._previous_prefix_regex @property def check_both_before_after(self) -> bool: return", "-> MatchedIndex: match = self.from_regex.search(source) if match: return MatchedIndex(True, match.start())", "str) -> MatchedIndex: match = self.before_regex.search(source) if match: return MatchedIndex(True,", "def range_connector_regex(self) -> Pattern: return self._range_connector_regex @property def day_regex(self) ->", "str) -> MatchedIndex: match = self.from_regex.search(source) if match: return MatchedIndex(True,", "def now_regex(self) -> Pattern: return self._now_regex @property def future_suffix_regex(self) ->", "self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor( ItalianDurationExtractorConfiguration()) self._now_regex =", "-> Pattern: return self._future_suffix_regex @property def ago_regex(self) -> Pattern: return", "has_connector_token(self, source: str) -> bool: return not self.connector_and_regex.search(source) is None", "return self._past_prefix_regex @property def next_prefix_regex(self) -> Pattern: return self._next_prefix_regex @property", "ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex =", "source: str) -> MatchedIndex: match = self.before_regex.search(source) if match: return", ".duration_extractor_config import ItalianDurationExtractorConfiguration from .date_extractor_config import ItalianDateExtractorConfiguration from recognizers_text.extractor import", "self._future_regex @property def week_of_regex(self) -> Pattern: return self._week_of_regex @property def", "return self._week_of_regex @property def month_of_regex(self) -> Pattern: return self._month_of_regex @property", "def date_point_extractor(self) -> DateTimeExtractor: return self._date_point_extractor @property def integer_extractor(self) ->", "self._later_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterRegex ) self._less_than_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.LessThanRegex )", "self._integer_extractor = ItalianIntegerExtractor() self._number_parser = BaseNumberParser( ItalianNumberParserConfiguration()) self._duration_extractor = BaseDurationExtractor(", "RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor = ItalianCardinalExtractor() # TODO When the", "-> Pattern: return self._till_regex @property def followed_unit(self) -> Pattern: return", "RegExpUtility from recognizers_number.number import BaseNumberParser from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor", "DateTimeExtractor: return self._duration_extractor @property def now_regex(self) -> Pattern: return self._now_regex", "self._year_period_regex @property def month_num_regex(self) -> Pattern: return self._month_num_regex @property def", "next_prefix_regex(self) -> Pattern: return self._next_prefix_regex @property def this_prefix_regex(self) -> Pattern:", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex) ] self._check_both_before_after", "ItalianDateTime.PastSuffixRegex) self._future_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.NextSuffixRegex) self._week_of_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekOfRegex) self._month_of_regex", "recognizers_number.number import BaseNumberParser from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor from recognizers_number.number.italian.parsers", "MatchedIndex(True, match.start()) return MatchedIndex(False, -1) def get_between_token_index(self, source: str) ->", "ItalianDateTime.CheckBothBeforeAfter self._illegal_year_regex = RegExpUtility.get_safe_reg_exp( BaseDateTime.IllegalYearRegex) self._year_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.YearRegex) self._till_regex", "@property def date_unit_regex(self) -> Pattern: return self._date_unit_regex @property def in_connector_regex(self)", ") self._century_suffix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.CenturySuffixRegex ) self._ordinal_extractor = ItalianOrdinalExtractor() self._cardinal_extractor", "Pattern: return self._next_prefix_regex @property def this_prefix_regex(self) -> Pattern: return self._this_prefix_regex", "return self._less_than_regex @property def more_than_regex(self) -> Pattern: return self._more_than_regex @property", "ItalianDateTime.CheckBothBeforeAfter self._simple_cases_regexes = [ RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex),", "@property def check_both_before_after(self) -> bool: return self._check_both_before_after @property def simple_cases_regexes(self)", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex) self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex =", "ItalianOrdinalExtractor() self._cardinal_extractor = ItalianCardinalExtractor() self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp( ItalianDateTime.PreviousPrefixRegex ) self._cardinal_extractor", "ItalianDateTime.BeforeRegex2) self._date_point_extractor = BaseDateExtractor( ItalianDateExtractorConfiguration()) self._integer_extractor = ItalianIntegerExtractor() self._number_parser =", "def followed_unit(self) -> Pattern: return self._followed_unit @property def number_combined_with_unit(self) ->", "Pattern: return self._future_regex @property def week_of_regex(self) -> Pattern: return self._week_of_regex", "self._future_suffix_regex @property def ago_regex(self) -> Pattern: return self._ago_regex @property def", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex) self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex) self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex) self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex)", "Extractor from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration): @property", "reserved. # Licensed under the MIT License. from typing import", "week_of_regex(self) -> Pattern: return self._week_of_regex @property def month_of_regex(self) -> Pattern:", "ItalianDateTime.MonthFrontSimpleCasesRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.QuarterRegexYearFront), RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.LaterEarlyPeriodRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.WeekWithWeekDayRangeRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex),", "= RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex) self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex) self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex) self._day_regex =", "RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex), RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex), RegExpUtility.get_safe_reg_exp( ItalianDateTime.MonthFrontBetweenRegex), RegExpUtility.get_safe_reg_exp(" ]
[ "= None, ) -> NoReturn: \"\"\"Start real driver connection from", "AnyStr -> SQL query statement args: Optional[Iterable[Any]] -> Object with", "\"\"\" if not args: return cursor.execute(sql) return cursor.execute(sql, tuple(args)) def", "be replaced in query \"\"\" if not args: return cursor.execute(sql)", "= url self.__conn = sqlite3.connect(url) self.__commit = autocommit @staticmethod def", ":param url: Database connection url :param autocommit: Auto commit transactions", "-> NoReturn: \"\"\"Reset place holder status (do nothing)\"\"\" def __repr__(self):", "== \"true\" self.__url = url self.__conn = sqlite3.connect(url) self.__commit =", "of tuple records found by query \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor", "record \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ = self.__execute(cursor,", "return cursor.execute(sql, tuple(args)) def query(self, **kwargs) -> List[Tuple]: \"\"\"Execute a", "replacement values :return List[Tuple]: List of tuple records found by", "-> AnyStr: \"\"\"Return query place holder.\"\"\" return '?' def reset_placeholder(self)", "None: url = ':memory:' if autocommit is None: autocommit =", "nothing)\"\"\" def __repr__(self): \"\"\"Mysql driver representation.\"\"\" return f\"SQLite({self.__url})\" def __commit_transaction(self):", ":return List[Tuple]: List of tuple records found by query \"\"\"", "os.getenv('DATABASE_COMMIT', None) is not None: autocommit = os.getenv('DATABASE_COMMIT').lower() == \"true\"", "*kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchall() cursor.close() return res def", "List of tuple records found by query \"\"\" self._validate_params({'sql'}, set(kwargs.keys()))", "Object with query replacement values \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor =", ":param kwargs: Parameters to execute query statement. sql: AnyStr ->", "NoReturn: \"\"\"Start real driver connection from parameters. :param url: Database", "not None: url = os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None) is not", "Object with query replacement values :return List[Tuple]: List of tuple", "Database file ulr on the system. If it's an in", "query to be executed :param args: List of arguments passed", "connection class. Environment variables: DATABASE_URL: Database file ulr on the", "query \"\"\" if not args: return cursor.execute(sql) return cursor.execute(sql, tuple(args))", "do not return any result value. :param kwargs: Parameters to", "autocommit: Auto commit transactions \"\"\" if url is None: url", "DATABASE_COMMIT: default('false') Auto commit transaction flag :type url: :param url:", "): super().__init__() self.__build_connection(url, autocommit) def __build_connection( self, url: Optional[AnyStr] =", "def query_none(self, **kwargs) -> NoReturn: \"\"\"Execute a query and do", "ulr on the system. If it's an in memory database", "reset_placeholder(self) -> NoReturn: \"\"\"Reset place holder status (do nothing)\"\"\" def", "List, NoReturn, Optional, Tuple from pydbrepo.drivers.driver import Driver class SQLite(Driver):", "should be None or `:memory:` string DATABASE_COMMIT: default('false') Auto commit", "-> NoReturn: \"\"\"Execute a query and do not return any", "import sqlite3 from typing import Any, AnyStr, List, NoReturn, Optional,", "cursor statement :param sql: Raw query to be executed :param", "If it's an in memory database the url should be", "None: autocommit = False if os.getenv('DATABASE_URL', None) is not None:", "url = os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None) is not None: autocommit", "and return all values. :param kwargs: Parameters to execute query", "return res def query_none(self, **kwargs) -> NoReturn: \"\"\"Execute a query", "None: url = os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None) is not None:", "# pylint: disable=R0201 import os import sqlite3 from typing import", "Any, AnyStr, List, NoReturn, Optional, Tuple from pydbrepo.drivers.driver import Driver", "kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() cursor.close() def commit(self) -> NoReturn: \"\"\"Commit", "False if os.getenv('DATABASE_URL', None) is not None: url = os.getenv('DATABASE_URL')", "get_real_driver(self) -> Any: \"\"\"Return real mysql driver connection.\"\"\" return self.__conn", "def query(self, **kwargs) -> List[Tuple]: \"\"\"Execute a query and return", "disable=R0201 import os import sqlite3 from typing import Any, AnyStr,", "\"true\" self.__url = url self.__conn = sqlite3.connect(url) self.__commit = autocommit", "cursor.close() return res def query_one(self, **kwargs) -> Tuple[Any, ...]: \"\"\"Execute", "self, url: Optional[AnyStr] = None, autocommit: Optional[bool] = None, )", "query \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ = self.__execute(cursor,", "url :param autocommit: Auto commit transactions \"\"\" def __init__( self,", "sql: AnyStr, *args) -> Any: \"\"\"Execute query and attempt to", "Any: \"\"\"Execute query and attempt to replace with arguments. :param", "args: Optional[Iterable[Any]] -> Object with query replacement values \"\"\" self._validate_params({'sql'},", "close(self) -> NoReturn: \"\"\"Close current connection.\"\"\" self.__conn.close() def get_real_driver(self) ->", "from parameters. :param url: Database connection url :param autocommit: Auto", "to execute query statement. sql: AnyStr -> SQL query statement", "self.__build_connection(url, autocommit) def __build_connection( self, url: Optional[AnyStr] = None, autocommit:", "variables: DATABASE_URL: Database file ulr on the system. If it's", "set(kwargs.keys())) cursor = self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))", "args: Optional[Iterable[Any]] -> Object with query replacement values :return Tuple:", "_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() cursor.close() def commit(self)", "Driver implementation.\"\"\" # pylint: disable=R0201 import os import sqlite3 from", "DATABASE_URL: Database file ulr on the system. If it's an", "[])) self.__commit_transaction() cursor.close() def commit(self) -> NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit()", "**kwargs) -> List[Tuple]: \"\"\"Execute a query and return all values.", "Found record \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ =", "the url should be None or `:memory:` string DATABASE_COMMIT: default('false')", "Tuple: Found record \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _", "class SQLite(Driver): \"\"\"SQLite Driver connection class. Environment variables: DATABASE_URL: Database", "`:memory:` string DATABASE_COMMIT: default('false') Auto commit transaction flag :type url:", "any result value. :param kwargs: Parameters to execute query statement.", "-> Tuple[Any, ...]: \"\"\"Execute a query and do not return", "= self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() cursor.close()", "List[Tuple]: \"\"\"Execute a query and return all values. :param kwargs:", "a query and do not return any result value. :param", "representation.\"\"\" return f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute commit operation if the", "self.__conn def placeholder(self, **kwargs) -> AnyStr: \"\"\"Return query place holder.\"\"\"", "values \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ = self.__execute(cursor,", "commit transactions \"\"\" if url is None: url = ':memory:'", "values. :param kwargs: Parameters to execute query statement. sql: AnyStr", "query replacement values :return Tuple: Found record \"\"\" self._validate_params({'sql'}, set(kwargs.keys()))", "cursor.fetchall() cursor.close() return res def query_one(self, **kwargs) -> Tuple[Any, ...]:", "kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchone() cursor.close() return res", "__repr__(self): \"\"\"Mysql driver representation.\"\"\" return f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute commit", "commit operation if the __commit flag is True.\"\"\" if self.__commit:", "not args: return cursor.execute(sql) return cursor.execute(sql, tuple(args)) def query(self, **kwargs)", "replaced in query \"\"\" if not args: return cursor.execute(sql) return", "implementation.\"\"\" # pylint: disable=R0201 import os import sqlite3 from typing", "-> Any: \"\"\"Return real mysql driver connection.\"\"\" return self.__conn def", "url is None: url = ':memory:' if autocommit is None:", "Optional, Tuple from pydbrepo.drivers.driver import Driver class SQLite(Driver): \"\"\"SQLite Driver", "Auto commit transaction flag :type url: :param url: Database connection", "*kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchone() cursor.close() return res def", "commit transaction flag :type url: :param url: Database connection url", "import Driver class SQLite(Driver): \"\"\"SQLite Driver connection class. Environment variables:", "sqlite3.connect(url) self.__commit = autocommit @staticmethod def __execute(cursor, sql: AnyStr, *args)", "query and do not return any result value. :param kwargs:", "url should be None or `:memory:` string DATABASE_COMMIT: default('false') Auto", "*args) -> Any: \"\"\"Execute query and attempt to replace with", "if url is None: url = ':memory:' if autocommit is", "transaction.\"\"\" self.__conn.commit() def rollback(self) -> NoReturn: self.__conn.rollback() def close(self) ->", "-> NoReturn: \"\"\"Close current connection.\"\"\" self.__conn.close() def get_real_driver(self) -> Any:", "statement args: Optional[Iterable[Any]] -> Object with query replacement values :return", "cursor.close() return res def query_none(self, **kwargs) -> NoReturn: \"\"\"Execute a", "\"\"\" def __init__( self, url: Optional[AnyStr] = None, autocommit: Optional[bool]", "= None, ): super().__init__() self.__build_connection(url, autocommit) def __build_connection( self, url:", "be None or `:memory:` string DATABASE_COMMIT: default('false') Auto commit transaction", "url: Optional[AnyStr] = None, autocommit: Optional[bool] = None, ): super().__init__()", "\"\"\"SQLite Driver connection class. Environment variables: DATABASE_URL: Database file ulr", "with query replacement values \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor()", "def close(self) -> NoReturn: \"\"\"Close current connection.\"\"\" self.__conn.close() def get_real_driver(self)", "holder.\"\"\" return '?' def reset_placeholder(self) -> NoReturn: \"\"\"Reset place holder", "it's an in memory database the url should be None", "if not args: return cursor.execute(sql) return cursor.execute(sql, tuple(args)) def query(self,", "by query \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ =", "\"\"\"Mysql driver representation.\"\"\" return f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute commit operation", "flag :type url: :param url: Database connection url :param autocommit:", "autocommit = os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url = url self.__conn =", "replacement values \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ =", "= cursor.fetchall() cursor.close() return res def query_one(self, **kwargs) -> Tuple[Any,", "AnyStr: \"\"\"Return query place holder.\"\"\" return '?' def reset_placeholder(self) ->", "\"\"\"Reset place holder status (do nothing)\"\"\" def __repr__(self): \"\"\"Mysql driver", "autocommit @staticmethod def __execute(cursor, sql: AnyStr, *args) -> Any: \"\"\"Execute", "\"\"\"Start real driver connection from parameters. :param url: Database connection", "execute query statement. sql: AnyStr -> SQL query statement args:", "= None, autocommit: Optional[bool] = None, ) -> NoReturn: \"\"\"Start", "autocommit: Optional[bool] = None, ) -> NoReturn: \"\"\"Start real driver", "cursor = self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction()", "Optional[AnyStr] = None, autocommit: Optional[bool] = None, ) -> NoReturn:", "values :return Tuple: Found record \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor =", "driver representation.\"\"\" return f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute commit operation if", "parameters. :param url: Database connection url :param autocommit: Auto commit", "is None: autocommit = False if os.getenv('DATABASE_URL', None) is not", "NoReturn: \"\"\"Close current connection.\"\"\" self.__conn.close() def get_real_driver(self) -> Any: \"\"\"Return", "connection.\"\"\" self.__conn.close() def get_real_driver(self) -> Any: \"\"\"Return real mysql driver", "None) is not None: url = os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None)", "typing import Any, AnyStr, List, NoReturn, Optional, Tuple from pydbrepo.drivers.driver", "List[Tuple]: List of tuple records found by query \"\"\" self._validate_params({'sql'},", "all values. :param kwargs: Parameters to execute query statement. sql:", "replace with arguments. :param cursor: Connection cursor statement :param sql:", "_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchone()", "-> NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit() def rollback(self) -> NoReturn: self.__conn.rollback()", "def __repr__(self): \"\"\"Mysql driver representation.\"\"\" return f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute", "= False if os.getenv('DATABASE_URL', None) is not None: url =", "sql: Raw query to be executed :param args: List of", "NoReturn, Optional, Tuple from pydbrepo.drivers.driver import Driver class SQLite(Driver): \"\"\"SQLite", "cursor: Connection cursor statement :param sql: Raw query to be", "connection url :param autocommit: Auto commit transactions \"\"\" if url", "os.getenv('DATABASE_URL', None) is not None: url = os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT',", "Tuple[Any, ...]: \"\"\"Execute a query and do not return any", "place holder.\"\"\" return '?' def reset_placeholder(self) -> NoReturn: \"\"\"Reset place", "in memory database the url should be None or `:memory:`", "def rollback(self) -> NoReturn: self.__conn.rollback() def close(self) -> NoReturn: \"\"\"Close", "None) is not None: autocommit = os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url", "*kwargs.get('args', [])) self.__commit_transaction() cursor.close() def commit(self) -> NoReturn: \"\"\"Commit transaction.\"\"\"", "Object with query replacement values :return Tuple: Found record \"\"\"", "place holder status (do nothing)\"\"\" def __repr__(self): \"\"\"Mysql driver representation.\"\"\"", "def __build_connection( self, url: Optional[AnyStr] = None, autocommit: Optional[bool] =", "-> Object with query replacement values :return Tuple: Found record", "transactions \"\"\" if url is None: url = ':memory:' if", ":param args: List of arguments passed to be replaced in", "super().__init__() self.__build_connection(url, autocommit) def __build_connection( self, url: Optional[AnyStr] = None,", "real mysql driver connection.\"\"\" return self.__conn def placeholder(self, **kwargs) ->", "-> Object with query replacement values \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor", "found by query \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _", "Auto commit transactions \"\"\" def __init__( self, url: Optional[AnyStr] =", "class. Environment variables: DATABASE_URL: Database file ulr on the system.", "'?' def reset_placeholder(self) -> NoReturn: \"\"\"Reset place holder status (do", "= os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None) is not None: autocommit =", "NoReturn: \"\"\"Execute a query and do not return any result", "Optional[bool] = None, ): super().__init__() self.__build_connection(url, autocommit) def __build_connection( self,", "= self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchall() cursor.close()", "arguments. :param cursor: Connection cursor statement :param sql: Raw query", "to be executed :param args: List of arguments passed to", "SQLite(Driver): \"\"\"SQLite Driver connection class. Environment variables: DATABASE_URL: Database file", "is None: url = ':memory:' if autocommit is None: autocommit", "-> List[Tuple]: \"\"\"Execute a query and return all values. :param", "url = ':memory:' if autocommit is None: autocommit = False", ":param autocommit: Auto commit transactions \"\"\" def __init__( self, url:", "Database connection url :param autocommit: Auto commit transactions \"\"\" def", "AnyStr, *args) -> Any: \"\"\"Execute query and attempt to replace", "sqlite3 from typing import Any, AnyStr, List, NoReturn, Optional, Tuple", "query and return all values. :param kwargs: Parameters to execute", "Connection cursor statement :param sql: Raw query to be executed", "__commit_transaction(self): \"\"\"Execute commit operation if the __commit flag is True.\"\"\"", "transactions \"\"\" def __init__( self, url: Optional[AnyStr] = None, autocommit:", "self.__url = url self.__conn = sqlite3.connect(url) self.__commit = autocommit @staticmethod", "self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() cursor.close() def", "query replacement values \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _", "replacement values :return Tuple: Found record \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor", "kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchall() cursor.close() return res", "system. If it's an in memory database the url should", "def __init__( self, url: Optional[AnyStr] = None, autocommit: Optional[bool] =", "__init__( self, url: Optional[AnyStr] = None, autocommit: Optional[bool] = None,", "default('false') Auto commit transaction flag :type url: :param url: Database", "if autocommit is None: autocommit = False if os.getenv('DATABASE_URL', None)", "__build_connection( self, url: Optional[AnyStr] = None, autocommit: Optional[bool] = None,", "-> Object with query replacement values :return List[Tuple]: List of", "Optional[Iterable[Any]] -> Object with query replacement values \"\"\" self._validate_params({'sql'}, set(kwargs.keys()))", "driver connection from parameters. :param url: Database connection url :param", "self.__commit_transaction() res = cursor.fetchall() cursor.close() return res def query_one(self, **kwargs)", "-> NoReturn: self.__conn.rollback() def close(self) -> NoReturn: \"\"\"Close current connection.\"\"\"", "Driver class SQLite(Driver): \"\"\"SQLite Driver connection class. Environment variables: DATABASE_URL:", "self.__conn.close() def get_real_driver(self) -> Any: \"\"\"Return real mysql driver connection.\"\"\"", "rollback(self) -> NoReturn: self.__conn.rollback() def close(self) -> NoReturn: \"\"\"Close current", "and do not return any result value. :param kwargs: Parameters", "self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res =", "connection from parameters. :param url: Database connection url :param autocommit:", "holder status (do nothing)\"\"\" def __repr__(self): \"\"\"Mysql driver representation.\"\"\" return", "self.__commit_transaction() res = cursor.fetchone() cursor.close() return res def query_none(self, **kwargs)", "Raw query to be executed :param args: List of arguments", "of arguments passed to be replaced in query \"\"\" if", "cursor.execute(sql, tuple(args)) def query(self, **kwargs) -> List[Tuple]: \"\"\"Execute a query", "string DATABASE_COMMIT: default('false') Auto commit transaction flag :type url: :param", "res def query_one(self, **kwargs) -> Tuple[Any, ...]: \"\"\"Execute a query", "to replace with arguments. :param cursor: Connection cursor statement :param", "\"\"\"Return real mysql driver connection.\"\"\" return self.__conn def placeholder(self, **kwargs)", "\"\"\"Return query place holder.\"\"\" return '?' def reset_placeholder(self) -> NoReturn:", "in query \"\"\" if not args: return cursor.execute(sql) return cursor.execute(sql,", "= self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() cursor.close() def commit(self) ->", "an in memory database the url should be None or", "def placeholder(self, **kwargs) -> AnyStr: \"\"\"Return query place holder.\"\"\" return", "and attempt to replace with arguments. :param cursor: Connection cursor", "pydbrepo.drivers.driver import Driver class SQLite(Driver): \"\"\"SQLite Driver connection class. Environment", "None, autocommit: Optional[bool] = None, ): super().__init__() self.__build_connection(url, autocommit) def", "status (do nothing)\"\"\" def __repr__(self): \"\"\"Mysql driver representation.\"\"\" return f\"SQLite({self.__url})\"", "url: :param url: Database connection url :param autocommit: Auto commit", "autocommit is None: autocommit = False if os.getenv('DATABASE_URL', None) is", "cursor.fetchone() cursor.close() return res def query_none(self, **kwargs) -> NoReturn: \"\"\"Execute", "return f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute commit operation if the __commit", "tuple records found by query \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor =", "be executed :param args: List of arguments passed to be", ":param sql: Raw query to be executed :param args: List", "self, url: Optional[AnyStr] = None, autocommit: Optional[bool] = None, ):", "None: autocommit = os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url = url self.__conn", "transaction flag :type url: :param url: Database connection url :param", "os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None) is not None: autocommit = os.getenv('DATABASE_COMMIT').lower()", "[])) self.__commit_transaction() res = cursor.fetchone() cursor.close() return res def query_none(self,", "is not None: autocommit = os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url =", "def __commit_transaction(self): \"\"\"Execute commit operation if the __commit flag is", "= sqlite3.connect(url) self.__commit = autocommit @staticmethod def __execute(cursor, sql: AnyStr,", "-> SQL query statement args: Optional[Iterable[Any]] -> Object with query", "with query replacement values :return Tuple: Found record \"\"\" self._validate_params({'sql'},", "on the system. If it's an in memory database the", "query and attempt to replace with arguments. :param cursor: Connection", "None or `:memory:` string DATABASE_COMMIT: default('false') Auto commit transaction flag", "Any: \"\"\"Return real mysql driver connection.\"\"\" return self.__conn def placeholder(self,", "query statement. sql: AnyStr -> SQL query statement args: Optional[Iterable[Any]]", "(do nothing)\"\"\" def __repr__(self): \"\"\"Mysql driver representation.\"\"\" return f\"SQLite({self.__url})\" def", "_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchall()", "= os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url = url self.__conn = sqlite3.connect(url)", "\"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'],", "memory database the url should be None or `:memory:` string", "res = cursor.fetchall() cursor.close() return res def query_one(self, **kwargs) ->", "None, autocommit: Optional[bool] = None, ) -> NoReturn: \"\"\"Start real", "autocommit: Optional[bool] = None, ): super().__init__() self.__build_connection(url, autocommit) def __build_connection(", "None, ): super().__init__() self.__build_connection(url, autocommit) def __build_connection( self, url: Optional[AnyStr]", "[])) self.__commit_transaction() res = cursor.fetchall() cursor.close() return res def query_one(self,", "operation if the __commit flag is True.\"\"\" if self.__commit: self.commit()", "res def query_none(self, **kwargs) -> NoReturn: \"\"\"Execute a query and", "result value. :param kwargs: Parameters to execute query statement. sql:", "Parameters to execute query statement. sql: AnyStr -> SQL query", "or `:memory:` string DATABASE_COMMIT: default('false') Auto commit transaction flag :type", ":type url: :param url: Database connection url :param autocommit: Auto", "self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchall() cursor.close() return", "NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit() def rollback(self) -> NoReturn: self.__conn.rollback() def", "not None: autocommit = os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url = url", "driver connection.\"\"\" return self.__conn def placeholder(self, **kwargs) -> AnyStr: \"\"\"Return", "with arguments. :param cursor: Connection cursor statement :param sql: Raw", "records found by query \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor()", "the system. If it's an in memory database the url", "not return any result value. :param kwargs: Parameters to execute", "commit(self) -> NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit() def rollback(self) -> NoReturn:", "args: return cursor.execute(sql) return cursor.execute(sql, tuple(args)) def query(self, **kwargs) ->", "':memory:' if autocommit is None: autocommit = False if os.getenv('DATABASE_URL',", "self.__conn.commit() def rollback(self) -> NoReturn: self.__conn.rollback() def close(self) -> NoReturn:", "real driver connection from parameters. :param url: Database connection url", "query replacement values :return List[Tuple]: List of tuple records found", "__execute(cursor, sql: AnyStr, *args) -> Any: \"\"\"Execute query and attempt", "self.__conn.rollback() def close(self) -> NoReturn: \"\"\"Close current connection.\"\"\" self.__conn.close() def", "-> Any: \"\"\"Execute query and attempt to replace with arguments.", "NoReturn: \"\"\"Reset place holder status (do nothing)\"\"\" def __repr__(self): \"\"\"Mysql", "if os.getenv('DATABASE_COMMIT', None) is not None: autocommit = os.getenv('DATABASE_COMMIT').lower() ==", "autocommit) def __build_connection( self, url: Optional[AnyStr] = None, autocommit: Optional[bool]", "if os.getenv('DATABASE_URL', None) is not None: url = os.getenv('DATABASE_URL') if", "to be replaced in query \"\"\" if not args: return", "= None, autocommit: Optional[bool] = None, ): super().__init__() self.__build_connection(url, autocommit)", ":return Tuple: Found record \"\"\" self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor()", "Auto commit transactions \"\"\" if url is None: url =", "Optional[AnyStr] = None, autocommit: Optional[bool] = None, ): super().__init__() self.__build_connection(url,", "None, ) -> NoReturn: \"\"\"Start real driver connection from parameters.", "args: List of arguments passed to be replaced in query", "self.__conn = sqlite3.connect(url) self.__commit = autocommit @staticmethod def __execute(cursor, sql:", "\"\"\"Execute commit operation if the __commit flag is True.\"\"\" if", "\"\"\" if url is None: url = ':memory:' if autocommit", "\"\"\"Execute a query and return all values. :param kwargs: Parameters", "tuple(args)) def query(self, **kwargs) -> List[Tuple]: \"\"\"Execute a query and", "attempt to replace with arguments. :param cursor: Connection cursor statement", "file ulr on the system. If it's an in memory", "return any result value. :param kwargs: Parameters to execute query", "url: Database connection url :param autocommit: Auto commit transactions \"\"\"", "return self.__conn def placeholder(self, **kwargs) -> AnyStr: \"\"\"Return query place", "query statement args: Optional[Iterable[Any]] -> Object with query replacement values", "SQL query statement args: Optional[Iterable[Any]] -> Object with query replacement", "statement args: Optional[Iterable[Any]] -> Object with query replacement values \"\"\"", "statement :param sql: Raw query to be executed :param args:", "statement. sql: AnyStr -> SQL query statement args: Optional[Iterable[Any]] ->", "autocommit: Auto commit transactions \"\"\" def __init__( self, url: Optional[AnyStr]", "database the url should be None or `:memory:` string DATABASE_COMMIT:", ":param autocommit: Auto commit transactions \"\"\" if url is None:", "query(self, **kwargs) -> List[Tuple]: \"\"\"Execute a query and return all", ") -> NoReturn: \"\"\"Start real driver connection from parameters. :param", "def query_one(self, **kwargs) -> Tuple[Any, ...]: \"\"\"Execute a query and", "Optional[Iterable[Any]] -> Object with query replacement values :return Tuple: Found", "values :return List[Tuple]: List of tuple records found by query", "value. :param kwargs: Parameters to execute query statement. sql: AnyStr", "os import sqlite3 from typing import Any, AnyStr, List, NoReturn,", "= ':memory:' if autocommit is None: autocommit = False if", "= self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res", "from pydbrepo.drivers.driver import Driver class SQLite(Driver): \"\"\"SQLite Driver connection class.", "arguments passed to be replaced in query \"\"\" if not", "List of arguments passed to be replaced in query \"\"\"", "autocommit = False if os.getenv('DATABASE_URL', None) is not None: url", "mysql driver connection.\"\"\" return self.__conn def placeholder(self, **kwargs) -> AnyStr:", "connection url :param autocommit: Auto commit transactions \"\"\" def __init__(", "\"\"\"SQLite Driver implementation.\"\"\" # pylint: disable=R0201 import os import sqlite3", "Optional[bool] = None, ) -> NoReturn: \"\"\"Start real driver connection", "-> NoReturn: \"\"\"Start real driver connection from parameters. :param url:", "pylint: disable=R0201 import os import sqlite3 from typing import Any,", "sql: AnyStr -> SQL query statement args: Optional[Iterable[Any]] -> Object", "import Any, AnyStr, List, NoReturn, Optional, Tuple from pydbrepo.drivers.driver import", "= cursor.fetchone() cursor.close() return res def query_none(self, **kwargs) -> NoReturn:", "return cursor.execute(sql) return cursor.execute(sql, tuple(args)) def query(self, **kwargs) -> List[Tuple]:", "@staticmethod def __execute(cursor, sql: AnyStr, *args) -> Any: \"\"\"Execute query", "self.__commit_transaction() cursor.close() def commit(self) -> NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit() def", "url self.__conn = sqlite3.connect(url) self.__commit = autocommit @staticmethod def __execute(cursor,", "self._validate_params({'sql'}, set(kwargs.keys())) cursor = self.__conn.cursor() _ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args',", "self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchone() cursor.close() return", "executed :param args: List of arguments passed to be replaced", "kwargs: Parameters to execute query statement. sql: AnyStr -> SQL", "query_one(self, **kwargs) -> Tuple[Any, ...]: \"\"\"Execute a query and do", "AnyStr, List, NoReturn, Optional, Tuple from pydbrepo.drivers.driver import Driver class", "**kwargs) -> Tuple[Any, ...]: \"\"\"Execute a query and do not", "cursor.execute(sql) return cursor.execute(sql, tuple(args)) def query(self, **kwargs) -> List[Tuple]: \"\"\"Execute", "...]: \"\"\"Execute a query and do not return any result", "a query and return all values. :param kwargs: Parameters to", "current connection.\"\"\" self.__conn.close() def get_real_driver(self) -> Any: \"\"\"Return real mysql", "NoReturn: self.__conn.rollback() def close(self) -> NoReturn: \"\"\"Close current connection.\"\"\" self.__conn.close()", "\"\"\"Close current connection.\"\"\" self.__conn.close() def get_real_driver(self) -> Any: \"\"\"Return real", "Optional[Iterable[Any]] -> Object with query replacement values :return List[Tuple]: List", "def reset_placeholder(self) -> NoReturn: \"\"\"Reset place holder status (do nothing)\"\"\"", "query place holder.\"\"\" return '?' def reset_placeholder(self) -> NoReturn: \"\"\"Reset", "\"\"\"Commit transaction.\"\"\" self.__conn.commit() def rollback(self) -> NoReturn: self.__conn.rollback() def close(self)", "def __execute(cursor, sql: AnyStr, *args) -> Any: \"\"\"Execute query and", "return all values. :param kwargs: Parameters to execute query statement.", "Database connection url :param autocommit: Auto commit transactions \"\"\" if", "= autocommit @staticmethod def __execute(cursor, sql: AnyStr, *args) -> Any:", "res = cursor.fetchone() cursor.close() return res def query_none(self, **kwargs) ->", "passed to be replaced in query \"\"\" if not args:", "f\"SQLite({self.__url})\" def __commit_transaction(self): \"\"\"Execute commit operation if the __commit flag", "cursor.close() def commit(self) -> NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit() def rollback(self)", "self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() cursor.close() def commit(self) -> NoReturn:", "def get_real_driver(self) -> Any: \"\"\"Return real mysql driver connection.\"\"\" return", "connection.\"\"\" return self.__conn def placeholder(self, **kwargs) -> AnyStr: \"\"\"Return query", "def commit(self) -> NoReturn: \"\"\"Commit transaction.\"\"\" self.__conn.commit() def rollback(self) ->", "Environment variables: DATABASE_URL: Database file ulr on the system. If", "commit transactions \"\"\" def __init__( self, url: Optional[AnyStr] = None,", "query_none(self, **kwargs) -> NoReturn: \"\"\"Execute a query and do not", "placeholder(self, **kwargs) -> AnyStr: \"\"\"Return query place holder.\"\"\" return '?'", "self.__commit = autocommit @staticmethod def __execute(cursor, sql: AnyStr, *args) ->", "from typing import Any, AnyStr, List, NoReturn, Optional, Tuple from", "os.getenv('DATABASE_COMMIT').lower() == \"true\" self.__url = url self.__conn = sqlite3.connect(url) self.__commit", ":param cursor: Connection cursor statement :param sql: Raw query to", "= self.__execute(cursor, kwargs['sql'], *kwargs.get('args', [])) self.__commit_transaction() res = cursor.fetchone() cursor.close()", "args: Optional[Iterable[Any]] -> Object with query replacement values :return List[Tuple]:", "return '?' def reset_placeholder(self) -> NoReturn: \"\"\"Reset place holder status", "with query replacement values :return List[Tuple]: List of tuple records", "\"\"\"Execute a query and do not return any result value.", "**kwargs) -> AnyStr: \"\"\"Return query place holder.\"\"\" return '?' def", "Tuple from pydbrepo.drivers.driver import Driver class SQLite(Driver): \"\"\"SQLite Driver connection", "Driver connection class. Environment variables: DATABASE_URL: Database file ulr on", "import os import sqlite3 from typing import Any, AnyStr, List,", "return res def query_one(self, **kwargs) -> Tuple[Any, ...]: \"\"\"Execute a", "<filename>pydbrepo/drivers/sqlite.py \"\"\"SQLite Driver implementation.\"\"\" # pylint: disable=R0201 import os import", "url: Optional[AnyStr] = None, autocommit: Optional[bool] = None, ) ->", "is not None: url = os.getenv('DATABASE_URL') if os.getenv('DATABASE_COMMIT', None) is", "\"\"\"Execute query and attempt to replace with arguments. :param cursor:", "url :param autocommit: Auto commit transactions \"\"\" if url is", "**kwargs) -> NoReturn: \"\"\"Execute a query and do not return" ]
[ "Module class BatchNormND(Module): def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1,", "self.saveinvvar, self.epsilon) if self.affine: self.grad, self.scalegrad, self.biasgrad = tup else:", "initFactor self.minFactor = minFactor self.numOfProps = 0 self.affine = affine", "super().__init__(name) self.inplace = inplace if inplace and Config.showWarnings: Config.getLogger().info(\"Warning: %s", "import batchNormNd, batchNormNdBackward from PuzzleLib.Variable import Variable from PuzzleLib.Modules.Module import", "self.epsilon) if self.affine: self.grad, self.scalegrad, self.biasgrad = tup else: self.grad,", "import ModuleError, Module class BatchNormND(Module): def __init__(self, nd, maps, epsilon=1e-5,", "T) elif T != np.float32: raise ModuleError(\"Unsupported dtype %s\" %", "self.scalegrad, self.biasgrad = None, None, None, None if empty: return", "in train mode is prohibited\" % self) self.numOfProps += 1", "import numpy as np from PuzzleLib import Config from PuzzleLib.Backend", "T): if Config.backend == Config.Backend.cuda: if T not in {np.float16,", "+ self.repeat(1, nd) scale = np.random.normal(1.0, sscale if affine else", "Variable from PuzzleLib.Modules.Module import ModuleError, Module class BatchNormND(Module): def __init__(self,", "affine else 0.0, shape).astype(self.calctype) var = np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale)))", "updateData(self, data): if self.train: if self.inplace: raise ModuleError(\"%s: using inplace", "self.mean, self.var, self.epsilon, factor, False ) else: self.data = batchNormNd(", "PuzzleLib.Modules.Module import ModuleError, Module class BatchNormND(Module): def __init__(self, nd, maps,", "self.saveinvvar, self.scalegrad, self.biasgrad = None, None, None, None if empty:", "def gradShapeFrom(self, shape): return shape def reset(self): super().reset() self.savemean, self.saveinvvar", "<reponame>EmilPi/PuzzleLib import numpy as np from PuzzleLib import Config from", "as np from PuzzleLib import Config from PuzzleLib.Backend import gpuarray,", "self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(),", "= inplace if inplace and Config.showWarnings: Config.getLogger().info(\"Warning: %s is using", "alpha=scale, beta=momentum ) def dataShapeFrom(self, shape): return shape def gradShapeFrom(self,", "self.affine = affine self.scale, self.bias, self.mean, self.var = None, None,", "Config.getLogger().info(\"Warning: %s is using inplace flag\", self) self.maps = maps", "super().reset() self.savemean, self.saveinvvar = None, None if self.affine: self.scalegrad, self.biasgrad", "tup else: self.grad, _, _ = tup def accGradParams(self, grad,", "= max(self.initFactor / self.numOfProps, self.minFactor) self.data, self.savemean, self.saveinvvar = batchNormNd(", "from PuzzleLib.Backend import gpuarray, Blas from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward", "is using inplace flag\", self) self.maps = maps self.epsilon =", "None ) def updateGrad(self, grad): tup = batchNormNdBackward(self.inData, grad, self.scale,", "updateGrad(self, grad): tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon)", "minFactor=0.1, sscale=0.01, affine=True, name=None, empty=False, inplace=False): super().__init__(name) self.inplace = inplace", "self.epsilon, factor, False ) else: self.data = batchNormNd( data, self.scale,", "self.inplace = inplace if inplace and Config.showWarnings: Config.getLogger().info(\"Warning: %s is", "self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum ) def dataShapeFrom(self, shape): return shape", "None if self.affine: self.scalegrad, self.biasgrad = None, None def calcMode(self,", "batchNormNd, batchNormNdBackward from PuzzleLib.Variable import Variable from PuzzleLib.Modules.Module import ModuleError,", "self.affine: self.grad, self.scalegrad, self.biasgrad = tup else: self.grad, _, _", "self.minFactor = minFactor self.numOfProps = 0 self.affine = affine self.scale,", "initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None, empty=False, inplace=False): super().__init__(name) self.inplace =", "+= 1 factor = max(self.initFactor / self.numOfProps, self.minFactor) self.data, self.savemean,", "self.numOfProps = 0 self.affine = affine self.scale, self.bias, self.mean, self.var", "self.scale, self.savemean, self.saveinvvar, self.epsilon) if self.affine: self.grad, self.scalegrad, self.biasgrad =", "var = np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\",", "beta=momentum ) def dataShapeFrom(self, shape): return shape def gradShapeFrom(self, shape):", "self.inplace: raise ModuleError(\"%s: using inplace flag in train mode is", "from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward from PuzzleLib.Variable import Variable from", "import gpuarray, Blas from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward from PuzzleLib.Variable", "self.bias, self.mean, self.var, self.epsilon, 0, True, out=data if self.inplace else", "gpuarray, Blas from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward from PuzzleLib.Variable import", "epsilon self.initFactor = initFactor self.minFactor = minFactor self.numOfProps = 0", "accGradParams(self, grad, scale=1.0, momentum=0.0): if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(),", "PuzzleLib import Config from PuzzleLib.Backend import gpuarray, Blas from PuzzleLib.Backend.Dnn", "self) self.maps = maps self.epsilon = epsilon self.initFactor = initFactor", "inplace flag\", self) self.maps = maps self.epsilon = epsilon self.initFactor", "Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum ) def dataShapeFrom(self, shape):", "self.numOfProps, self.minFactor) self.data, self.savemean, self.saveinvvar = batchNormNd( data, self.scale, self.bias,", "self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self,", "self.data = batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon, 0,", "% self) self.numOfProps += 1 factor = max(self.initFactor / self.numOfProps,", "self.savemean, self.saveinvvar = None, None if self.affine: self.scalegrad, self.biasgrad =", "0.0, shape).astype(self.calctype) var = np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape,", "epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None, empty=False, inplace=False): super().__init__(name) self.inplace", "scale = np.random.normal(1.0, sscale if affine else 0.0, shape).astype(self.calctype) var", "self) self.numOfProps += 1 factor = max(self.initFactor / self.numOfProps, self.minFactor)", "if Config.backend == Config.Backend.cuda: if T not in {np.float16, np.float32}:", "self.scale, self.bias, self.mean, self.var = None, None, None, None self.savemean,", "self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self, data): if self.train:", "= maps self.epsilon = epsilon self.initFactor = initFactor self.minFactor =", "def accGradParams(self, grad, scale=1.0, momentum=0.0): if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(),", "gpuarray.to_gpu(var)) def updateData(self, data): if self.train: if self.inplace: raise ModuleError(\"%s:", "def dataShapeFrom(self, shape): return shape def gradShapeFrom(self, shape): return shape", "ModuleError(\"Unsupported dtype %s\" % T) elif T != np.float32: raise", "class BatchNormND(Module): def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01,", "shape = (1, maps) + self.repeat(1, nd) scale = np.random.normal(1.0,", "from PuzzleLib import Config from PuzzleLib.Backend import gpuarray, Blas from", "dtype %s\" % T) elif T != np.float32: raise ModuleError(\"Unsupported", "batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon, factor, False )", "momentum=0.0): if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum )", "self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self, data): if self.train: if self.inplace: raise", "% T) elif T != np.float32: raise ModuleError(\"Unsupported dtype %s\"", "= None, None def calcMode(self, T): if Config.backend == Config.Backend.cuda:", "if self.affine: self.grad, self.scalegrad, self.biasgrad = tup else: self.grad, _,", "None, None, None, None if empty: return shape = (1,", "if self.inplace else None ) def updateGrad(self, grad): tup =", "dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self, data): if", "shape).astype(self.calctype) var = np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype)))", "from PuzzleLib.Variable import Variable from PuzzleLib.Modules.Module import ModuleError, Module class", "empty=False, inplace=False): super().__init__(name) self.inplace = inplace if inplace and Config.showWarnings:", "return shape def gradShapeFrom(self, shape): return shape def reset(self): super().reset()", "calcMode(self, T): if Config.backend == Config.Backend.cuda: if T not in", "Config from PuzzleLib.Backend import gpuarray, Blas from PuzzleLib.Backend.Dnn import batchNormNd,", "elif T != np.float32: raise ModuleError(\"Unsupported dtype %s\" % T)", "= np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape,", "Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(),", "import Variable from PuzzleLib.Modules.Module import ModuleError, Module class BatchNormND(Module): def", "if T not in {np.float16, np.float32}: raise ModuleError(\"Unsupported dtype %s\"", "= 0 self.affine = affine self.scale, self.bias, self.mean, self.var =", "= batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon, factor, False", "self.scale, self.bias, self.mean, self.var, self.epsilon, 0, True, out=data if self.inplace", ") def updateGrad(self, grad): tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean,", "not in {np.float16, np.float32}: raise ModuleError(\"Unsupported dtype %s\" % T)", "gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self, data): if self.train: if", "= None, None if self.affine: self.scalegrad, self.biasgrad = None, None", "def calcMode(self, T): if Config.backend == Config.Backend.cuda: if T not", "0 self.affine = affine self.scale, self.bias, self.mean, self.var = None,", "sscale=0.01, affine=True, name=None, empty=False, inplace=False): super().__init__(name) self.inplace = inplace if", "self.initFactor = initFactor self.minFactor = minFactor self.numOfProps = 0 self.affine", "inplace if inplace and Config.showWarnings: Config.getLogger().info(\"Warning: %s is using inplace", "%s is using inplace flag\", self) self.maps = maps self.epsilon", "None, None self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None, None, None,", "empty: return shape = (1, maps) + self.repeat(1, nd) scale", "None def calcMode(self, T): if Config.backend == Config.Backend.cuda: if T", "self.inplace else None ) def updateGrad(self, grad): tup = batchNormNdBackward(self.inData,", "= epsilon self.initFactor = initFactor self.minFactor = minFactor self.numOfProps =", "= (1, maps) + self.repeat(1, nd) scale = np.random.normal(1.0, sscale", "ModuleError(\"%s: using inplace flag in train mode is prohibited\" %", "maps self.epsilon = epsilon self.initFactor = initFactor self.minFactor = minFactor", "dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self, data): if self.train: if self.inplace:", "if affine else 0.0, shape).astype(self.calctype) var = np.ones(shape, dtype=self.calctype) self.setVar(\"scale\",", "self.biasgrad = None, None def calcMode(self, T): if Config.backend ==", "using inplace flag\", self) self.maps = maps self.epsilon = epsilon", "self.data, self.savemean, self.saveinvvar = batchNormNd( data, self.scale, self.bias, self.mean, self.var,", "self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale,", "if self.train: if self.inplace: raise ModuleError(\"%s: using inplace flag in", "batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon, 0, True, out=data", "batchNormNdBackward from PuzzleLib.Variable import Variable from PuzzleLib.Modules.Module import ModuleError, Module", "maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None, empty=False, inplace=False): super().__init__(name)", "self.affine: self.scalegrad, self.biasgrad = None, None def calcMode(self, T): if", "= batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon) if self.affine: self.grad,", "= batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon, 0, True,", "self.var, self.epsilon, 0, True, out=data if self.inplace else None )", "mode is prohibited\" % self) self.numOfProps += 1 factor =", "self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(),", "out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum", "= affine self.scale, self.bias, self.mean, self.var = None, None, None,", "inplace and Config.showWarnings: Config.getLogger().info(\"Warning: %s is using inplace flag\", self)", "def reset(self): super().reset() self.savemean, self.saveinvvar = None, None if self.affine:", "= np.random.normal(1.0, sscale if affine else 0.0, shape).astype(self.calctype) var =", "Config.showWarnings: Config.getLogger().info(\"Warning: %s is using inplace flag\", self) self.maps =", "self.saveinvvar = batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon, factor,", ") def dataShapeFrom(self, shape): return shape def gradShapeFrom(self, shape): return", "PuzzleLib.Backend import gpuarray, Blas from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward from", "np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype))", "if self.inplace: raise ModuleError(\"%s: using inplace flag in train mode", "def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None,", "ModuleError, Module class BatchNormND(Module): def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0,", "None if empty: return shape = (1, maps) + self.repeat(1,", "factor = max(self.initFactor / self.numOfProps, self.minFactor) self.data, self.savemean, self.saveinvvar =", "flag\", self) self.maps = maps self.epsilon = epsilon self.initFactor =", "if empty: return shape = (1, maps) + self.repeat(1, nd)", "Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def updateData(self, data):", "prohibited\" % self) self.numOfProps += 1 factor = max(self.initFactor /", "False ) else: self.data = batchNormNd( data, self.scale, self.bias, self.mean,", "= None, None, None, None if empty: return shape =", "0, True, out=data if self.inplace else None ) def updateGrad(self,", "beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum ) def", "True, out=data if self.inplace else None ) def updateGrad(self, grad):", "self.numOfProps += 1 factor = max(self.initFactor / self.numOfProps, self.minFactor) self.data,", "in {np.float16, np.float32}: raise ModuleError(\"Unsupported dtype %s\" % T) elif", "(1, maps) + self.repeat(1, nd) scale = np.random.normal(1.0, sscale if", "def updateGrad(self, grad): tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar,", "shape): return shape def gradShapeFrom(self, shape): return shape def reset(self):", "if self.affine: self.scalegrad, self.biasgrad = None, None def calcMode(self, T):", "np.float32}: raise ModuleError(\"Unsupported dtype %s\" % T) elif T !=", "grad, self.scale, self.savemean, self.saveinvvar, self.epsilon) if self.affine: self.grad, self.scalegrad, self.biasgrad", "self.maps = maps self.epsilon = epsilon self.initFactor = initFactor self.minFactor", "grad): tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon) if", "scale=1.0, momentum=0.0): if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum", "dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\",", "self.var, self.epsilon, factor, False ) else: self.data = batchNormNd( data,", "Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var)) def", "None, None def calcMode(self, T): if Config.backend == Config.Backend.cuda: if", "batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon) if self.affine: self.grad, self.scalegrad,", "None, None, None, None self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None,", "shape def reset(self): super().reset() self.savemean, self.saveinvvar = None, None if", "self.grad, _, _ = tup def accGradParams(self, grad, scale=1.0, momentum=0.0):", "PuzzleLib.Variable import Variable from PuzzleLib.Modules.Module import ModuleError, Module class BatchNormND(Module):", "def updateData(self, data): if self.train: if self.inplace: raise ModuleError(\"%s: using", "else None ) def updateGrad(self, grad): tup = batchNormNdBackward(self.inData, grad,", "nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None, empty=False, inplace=False):", "raise ModuleError(\"Unsupported dtype %s\" % T) elif T != np.float32:", "shape def gradShapeFrom(self, shape): return shape def reset(self): super().reset() self.savemean,", "BatchNormND(Module): def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True,", "shape): return shape def reset(self): super().reset() self.savemean, self.saveinvvar = None,", "self.epsilon = epsilon self.initFactor = initFactor self.minFactor = minFactor self.numOfProps", "inplace flag in train mode is prohibited\" % self) self.numOfProps", "self.biasgrad = tup else: self.grad, _, _ = tup def", "grad, scale=1.0, momentum=0.0): if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale,", "return shape def reset(self): super().reset() self.savemean, self.saveinvvar = None, None", "self.train: if self.inplace: raise ModuleError(\"%s: using inplace flag in train", "train mode is prohibited\" % self) self.numOfProps += 1 factor", "self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\", Variable(gpuarray.zeros(shape, dtype=self.calctype))) self.setAttr(\"mean\", gpuarray.zeros(shape, dtype=self.calctype)) self.setAttr(\"var\", gpuarray.to_gpu(var))", "sscale if affine else 0.0, shape).astype(self.calctype) var = np.ones(shape, dtype=self.calctype)", "numpy as np from PuzzleLib import Config from PuzzleLib.Backend import", "self.scalegrad, self.biasgrad = None, None def calcMode(self, T): if Config.backend", "if inplace and Config.showWarnings: Config.getLogger().info(\"Warning: %s is using inplace flag\",", "tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon) if self.affine:", "self.grad, self.scalegrad, self.biasgrad = tup else: self.grad, _, _ =", "T != np.float32: raise ModuleError(\"Unsupported dtype %s\" % T) self.calctype", "= tup def accGradParams(self, grad, scale=1.0, momentum=0.0): if self.affine: Blas.addVectorToVector(", "inplace=False): super().__init__(name) self.inplace = inplace if inplace and Config.showWarnings: Config.getLogger().info(\"Warning:", "np from PuzzleLib import Config from PuzzleLib.Backend import gpuarray, Blas", "is prohibited\" % self) self.numOfProps += 1 factor = max(self.initFactor", "T not in {np.float16, np.float32}: raise ModuleError(\"Unsupported dtype %s\" %", "None, None, None self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None, None,", "None, None if self.affine: self.scalegrad, self.biasgrad = None, None def", "from PuzzleLib.Modules.Module import ModuleError, Module class BatchNormND(Module): def __init__(self, nd,", "1 factor = max(self.initFactor / self.numOfProps, self.minFactor) self.data, self.savemean, self.saveinvvar", "self.bias, self.mean, self.var = None, None, None, None self.savemean, self.saveinvvar,", "self.mean, self.var, self.epsilon, 0, True, out=data if self.inplace else None", "self.epsilon, 0, True, out=data if self.inplace else None ) def", "dataShapeFrom(self, shape): return shape def gradShapeFrom(self, shape): return shape def", "flag in train mode is prohibited\" % self) self.numOfProps +=", "self.savemean, self.saveinvvar, self.epsilon) if self.affine: self.grad, self.scalegrad, self.biasgrad = tup", "maps) + self.repeat(1, nd) scale = np.random.normal(1.0, sscale if affine", "name=None, empty=False, inplace=False): super().__init__(name) self.inplace = inplace if inplace and", "None self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None, None, None, None", "data): if self.train: if self.inplace: raise ModuleError(\"%s: using inplace flag", "else: self.grad, _, _ = tup def accGradParams(self, grad, scale=1.0,", "tup def accGradParams(self, grad, scale=1.0, momentum=0.0): if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(),", "data, self.scale, self.bias, self.mean, self.var, self.epsilon, 0, True, out=data if", "None, None if empty: return shape = (1, maps) +", "np.random.normal(1.0, sscale if affine else 0.0, shape).astype(self.calctype) var = np.ones(shape,", "= None, None, None, None self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad =", "out=data if self.inplace else None ) def updateGrad(self, grad): tup", "Config.Backend.cuda: if T not in {np.float16, np.float32}: raise ModuleError(\"Unsupported dtype", "Blas from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward from PuzzleLib.Variable import Variable", "max(self.initFactor / self.numOfProps, self.minFactor) self.data, self.savemean, self.saveinvvar = batchNormNd( data,", "Config.backend == Config.Backend.cuda: if T not in {np.float16, np.float32}: raise", "None, None, None if empty: return shape = (1, maps)", "self.savemean, self.saveinvvar = batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon,", "%s\" % T) elif T != np.float32: raise ModuleError(\"Unsupported dtype", "PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward from PuzzleLib.Variable import Variable from PuzzleLib.Modules.Module", "affine self.scale, self.bias, self.mean, self.var = None, None, None, None", "_ = tup def accGradParams(self, grad, scale=1.0, momentum=0.0): if self.affine:", "if self.affine: Blas.addVectorToVector( self.scalegrad.ravel(), self.vars[\"scale\"].grad.ravel(), out=self.vars[\"scale\"].grad.ravel(), alpha=scale, beta=momentum ) Blas.addVectorToVector(", "self.scale, self.bias, self.mean, self.var, self.epsilon, factor, False ) else: self.data", "self.bias, self.mean, self.var, self.epsilon, factor, False ) else: self.data =", "self.scalegrad, self.biasgrad = tup else: self.grad, _, _ = tup", "minFactor self.numOfProps = 0 self.affine = affine self.scale, self.bias, self.mean,", "np.float32: raise ModuleError(\"Unsupported dtype %s\" % T) self.calctype = T", "else: self.data = batchNormNd( data, self.scale, self.bias, self.mean, self.var, self.epsilon,", "= tup else: self.grad, _, _ = tup def accGradParams(self,", "self.biasgrad = None, None, None, None if empty: return shape", "== Config.Backend.cuda: if T not in {np.float16, np.float32}: raise ModuleError(\"Unsupported", "self.var = None, None, None, None self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad", "self.mean, self.var = None, None, None, None self.savemean, self.saveinvvar, self.scalegrad,", ") else: self.data = batchNormNd( data, self.scale, self.bias, self.mean, self.var,", "gradShapeFrom(self, shape): return shape def reset(self): super().reset() self.savemean, self.saveinvvar =", "else 0.0, shape).astype(self.calctype) var = np.ones(shape, dtype=self.calctype) self.setVar(\"scale\", Variable(gpuarray.to_gpu(scale))) self.setVar(\"bias\",", "= initFactor self.minFactor = minFactor self.numOfProps = 0 self.affine =", "self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum ) def dataShapeFrom(self, shape): return", "reset(self): super().reset() self.savemean, self.saveinvvar = None, None if self.affine: self.scalegrad,", "{np.float16, np.float32}: raise ModuleError(\"Unsupported dtype %s\" % T) elif T", "nd) scale = np.random.normal(1.0, sscale if affine else 0.0, shape).astype(self.calctype)", "self.minFactor) self.data, self.savemean, self.saveinvvar = batchNormNd( data, self.scale, self.bias, self.mean,", "using inplace flag in train mode is prohibited\" % self)", "!= np.float32: raise ModuleError(\"Unsupported dtype %s\" % T) self.calctype =", "_, _ = tup def accGradParams(self, grad, scale=1.0, momentum=0.0): if", "return shape = (1, maps) + self.repeat(1, nd) scale =", "self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None, None, None, None if", ") Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum ) def dataShapeFrom(self,", "out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum ) def dataShapeFrom(self, shape): return shape def", "import Config from PuzzleLib.Backend import gpuarray, Blas from PuzzleLib.Backend.Dnn import", "/ self.numOfProps, self.minFactor) self.data, self.savemean, self.saveinvvar = batchNormNd( data, self.scale,", "and Config.showWarnings: Config.getLogger().info(\"Warning: %s is using inplace flag\", self) self.maps", "affine=True, name=None, empty=False, inplace=False): super().__init__(name) self.inplace = inplace if inplace", "self.repeat(1, nd) scale = np.random.normal(1.0, sscale if affine else 0.0,", "alpha=scale, beta=momentum ) Blas.addVectorToVector( self.biasgrad.ravel(), self.vars[\"bias\"].grad.ravel(), out=self.vars[\"bias\"].grad.ravel(), alpha=scale, beta=momentum )", "self.saveinvvar = None, None if self.affine: self.scalegrad, self.biasgrad = None,", "= minFactor self.numOfProps = 0 self.affine = affine self.scale, self.bias,", "data, self.scale, self.bias, self.mean, self.var, self.epsilon, factor, False ) else:", "raise ModuleError(\"%s: using inplace flag in train mode is prohibited\"", "factor, False ) else: self.data = batchNormNd( data, self.scale, self.bias,", "__init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None, empty=False," ]
[]
[ "import contextlib import grpc import logging import model.api.forecast_pb2_grpc as grpc_lib", "signal_name): def fn(signal_received, frame): self.logger.info('signal received', extra={'signal': signal_name}) self.event.set() return", "elapsed = time.time() - t if elapsed >= grace_period and", "self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self, signal_name): def fn(signal_received,", "self.logger.info(\"sending SIGTERM to subprocess\", extra={'proc': proc}) elif elapsed >= kill_period:", ">= kill_period: for proc in alive_procs: self.logger.warning(\"sending SIGKILL to subprocess\",", "log_handler = logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter)", "and kill it as last resort # .is_alive() also implicitly", "last resort # .is_alive() also implicitly joins the process (good", "Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start() while not shutdown.event.is_set(): time.sleep(1) t =", "implicitly joins the process (good practice in linux) alive_procs =", ">= grace_period and elapsed < kill_period: for proc in alive_procs:", "= self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try: yield sock.getsockname()[1] finally: sock.close() def", "[(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[", "hours, minutes, seconds) elif hours > 0: return '{:d}h{:d}m{:d}s'.format(hours, minutes,", "and elapsed < kill_period: for proc in alive_procs: proc.terminate() self.logger.info(\"sending", "other inter-process communication primitives can break when # process is", "extra={'proc': proc}) elif elapsed >= kill_period: for proc in alive_procs:", "grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python gRPC server...') server.start() while", ") grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python gRPC server...') server.start()", "return fn class Config(object): def __init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '')", "'') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca", "for proc in procs: self.logger.info(\"subprocess terminated\", extra={'proc': proc}) def json_logger():", "Forecaster as ProphetForecaster from multiprocessing import Event, Process, cpu_count from", "hours, seconds = divmod(seconds, 3600) minutes, seconds = divmod(seconds, 60)", "self.config = config self.logger = logger @contextlib.contextmanager def _reserve_port(self): \"\"\"Find", "< kill_period: for proc in alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM to", "= logger def pretty_timedelta(self, seconds): seconds = int(seconds) days, seconds", "def __init__(self, config, logger): self.config = config self.logger = logger", "seconds): seconds = int(seconds) days, seconds = divmod(seconds, 86400) hours,", "# .is_alive() also implicitly joins the process (good practice in", "if proc.is_alive()] if len(alive_procs) == 0: break elapsed = time.time()", "received', extra={'signal': signal_name}) self.event.set() return fn class Config(object): def __init__(self):", "import Forecaster as ProphetForecaster from multiprocessing import Event, Process, cpu_count", "procs = [] shutdown = GracefulShutdown(self.logger) for _ in range(self.config.gprc_server_process_num):", "import socket import sys import time class ForecastServicer(ProphetForecaster): def __init__(self,", "= logger @contextlib.contextmanager def _reserve_port(self): \"\"\"Find and reserve a port", "from forecaster.prophet import Forecaster as ProphetForecaster from multiprocessing import Event,", "in alive_procs: self.logger.warning(\"sending SIGKILL to subprocess\", extra={'proc': proc}) # Queues", "inter-process communication primitives can break when # process is killed,", "procs if proc.is_alive()] if len(alive_procs) == 0: break elapsed =", "proc}) def json_logger(): logger = logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) formatter", "self.logger = logger @contextlib.contextmanager def _reserve_port(self): \"\"\"Find and reserve a", "self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num =", "seconds = int(seconds) days, seconds = divmod(seconds, 86400) hours, seconds", "killed, but we don't care here proc.kill() time.sleep(1) time.sleep(1) for", "self._reserve_port(): procs = [] shutdown = GracefulShutdown(self.logger) for _ in", "int(port))) try: yield sock.getsockname()[1] finally: sock.close() def _run_server(self, shutdown_event): server_credentials", "serve(self): with self._reserve_port(): procs = [] shutdown = GracefulShutdown(self.logger) for", "'')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs", "86400) hours, seconds = divmod(seconds, 3600) minutes, seconds = divmod(seconds,", "time.sleep(1) time.sleep(1) for proc in procs: self.logger.info(\"subprocess terminated\", extra={'proc': proc})", "set SO_REUSEPORT.') _, port = self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try: yield", "config, logger): self.config = config self.logger = logger @contextlib.contextmanager def", "> 0: return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif minutes > 0:", "jsonlogger import contextlib import grpc import logging import model.api.forecast_pb2_grpc as", "= Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self,", "for all subprocesses to use\"\"\" sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET,", "python gRPC server...') server.start() while not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python", "model.api.forecast_pb2_grpc as grpc_lib import os import signal import socket import", "ProphetForecaster from multiprocessing import Event, Process, cpu_count from pythonjsonlogger import", "signal import socket import sys import time class ForecastServicer(ProphetForecaster): def", "- t if elapsed >= grace_period and elapsed < kill_period:", "self.event.set() return fn class Config(object): def __init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS',", "= time.time() grace_period = self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs while True:", "= int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object): def", "(\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials)", "ForecastServicer(ProphetForecaster): def __init__(self, logger): self.logger = logger def pretty_timedelta(self, seconds):", "seconds) elif hours > 0: return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif", "logger @contextlib.contextmanager def _reserve_port(self): \"\"\"Find and reserve a port for", "and other inter-process communication primitives can break when # process", "= str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA',", "60) if days > 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds)", "2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object): def __init__(self, config,", "SO_REUSEPORT.') _, port = self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try: yield sock.getsockname()[1]", "self.logger.info('python gRPC server stopped') def serve(self): with self._reserve_port(): procs =", "linux) alive_procs = [proc for proc in procs if proc.is_alive()]", "procs: self.logger.info(\"subprocess terminated\", extra={'proc': proc}) def json_logger(): logger = logging.getLogger()", "_ in range(self.config.gprc_server_process_num): proc = Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start() while", "# Send SIGINT if process doesn't exit quickly enough, and", "forecaster.prophet import Forecaster as ProphetForecaster from multiprocessing import Event, Process,", "frame): self.logger.info('signal received', extra={'signal': signal_name}) self.event.set() return fn class Config(object):", "divmod(seconds, 3600) minutes, seconds = divmod(seconds, 60) if days >", "seconds) else: return '{:d}s'.format(seconds) class GracefulShutdown: def __init__(self, logger): self.logger", "sock.bind(('', int(port))) try: yield sock.getsockname()[1] finally: sock.close() def _run_server(self, shutdown_event):", "int(seconds) days, seconds = divmod(seconds, 86400) hours, seconds = divmod(seconds,", "proc.terminate() self.logger.info(\"sending SIGTERM to subprocess\", extra={'proc': proc}) elif elapsed >=", "# process is killed, but we don't care here proc.kill()", "class Server(object): def __init__(self, config, logger): self.config = config self.logger", "(\"grpc.use_local_subchannel_pool\", 1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python", "int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object): def __init__(self, config, logger): self.config =", "1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: raise RuntimeError('failed to set", "to subprocess\", extra={'proc': proc}) elif elapsed >= kill_period: for proc", "elapsed < kill_period: for proc in alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM", "import logging import model.api.forecast_pb2_grpc as grpc_lib import os import signal", "int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5))", "'{:d}m{:d}s'.format(minutes, seconds) else: return '{:d}s'.format(seconds) class GracefulShutdown: def __init__(self, logger):", "socket import sys import time class ForecastServicer(ProphetForecaster): def __init__(self, logger):", "> 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds) elif hours >", "1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python gRPC", "proc in alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM to subprocess\", extra={'proc': proc})", "self.event = Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def", "port for all subprocesses to use\"\"\" sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)", "a port for all subprocesses to use\"\"\" sock = socket.socket(socket.AF_INET6,", "os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', ''))", "import jsonlogger import contextlib import grpc import logging import model.api.forecast_pb2_grpc", "seconds = divmod(seconds, 60) if days > 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days,", "_reserve_port(self): \"\"\"Find and reserve a port for all subprocesses to", "int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2))", "self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs while True: # Send SIGINT if", "from concurrent import futures from forecaster.prophet import Forecaster as ProphetForecaster", "as last resort # .is_alive() also implicitly joins the process", "Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self, signal_name):", "days, seconds = divmod(seconds, 86400) hours, seconds = divmod(seconds, 3600)", "care here proc.kill() time.sleep(1) time.sleep(1) for proc in procs: self.logger.info(\"subprocess", "if days > 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds) elif", "yield sock.getsockname()[1] finally: sock.close() def _run_server(self, shutdown_event): server_credentials = grpc.ssl_server_credentials(", "options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address,", "0: break elapsed = time.time() - t if elapsed >=", "= int(seconds) days, seconds = divmod(seconds, 86400) hours, seconds =", "can break when # process is killed, but we don't", "str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count()))", "kill it as last resort # .is_alive() also implicitly joins", "range(self.config.gprc_server_process_num): proc = Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start() while not shutdown.event.is_set():", "cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs", "self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self, signal_name): def fn(signal_received, frame): self.logger.info('signal", "os import signal import socket import sys import time class", "procs.append(proc) proc.start() while not shutdown.event.is_set(): time.sleep(1) t = time.time() grace_period", "self.config.grpc_server_kill_period_in_secs while True: # Send SIGINT if process doesn't exit", "sys import time class ForecastServicer(ProphetForecaster): def __init__(self, logger): self.logger =", "hours > 0: return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif minutes >", "int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object): def __init__(self,", "def _reserve_port(self): \"\"\"Find and reserve a port for all subprocesses", "def __init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', ''))", "proc}) elif elapsed >= kill_period: for proc in alive_procs: self.logger.warning(\"sending", "[proc for proc in procs if proc.is_alive()] if len(alive_procs) ==", "days > 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds) elif hours", "logger): self.logger = logger self.event = Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM,", "require_client_auth=True ) server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\",", "= logger self.event = Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP,", "logger): self.config = config self.logger = logger @contextlib.contextmanager def _reserve_port(self):", "== 0: raise RuntimeError('failed to set SO_REUSEPORT.') _, port =", "= config self.logger = logger @contextlib.contextmanager def _reserve_port(self): \"\"\"Find and", "server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python gRPC server...') server.start() while not shutdown_event.is_set():", "def __init__(self, logger): self.logger = logger self.event = Event() signal.signal(signal.SIGINT,", "'{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif minutes > 0: return '{:d}m{:d}s'.format(minutes, seconds)", "futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server)", "practice in linux) alive_procs = [proc for proc in procs", "grace_period and elapsed < kill_period: for proc in alive_procs: proc.terminate()", "try: yield sock.getsockname()[1] finally: sock.close() def _run_server(self, shutdown_event): server_credentials =", "joins the process (good practice in linux) alive_procs = [proc", "from multiprocessing import Event, Process, cpu_count from pythonjsonlogger import jsonlogger", "= divmod(seconds, 60) if days > 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours,", "sock.getsockname()[1] finally: sock.close() def _run_server(self, shutdown_event): server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key,", "<reponame>waltzofpearls/reckon<filename>model/server/server.py from concurrent import futures from forecaster.prophet import Forecaster as", "the process (good practice in linux) alive_procs = [proc for", "formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush = sys.stdout.flush", "self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs =", "self.logger = logger def pretty_timedelta(self, seconds): seconds = int(seconds) days,", "grpc import logging import model.api.forecast_pb2_grpc as grpc_lib import os import", "process (good practice in linux) alive_procs = [proc for proc", "minutes, seconds) elif hours > 0: return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds)", "import model.api.forecast_pb2_grpc as grpc_lib import os import signal import socket", "use\"\"\" sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET,", "in procs: self.logger.info(\"subprocess terminated\", extra={'proc': proc}) def json_logger(): logger =", "in linux) alive_procs = [proc for proc in procs if", "signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self, signal_name): def", "_run_server(self, shutdown_event): server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True )", "contextlib import grpc import logging import model.api.forecast_pb2_grpc as grpc_lib import", "self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert =", "socket.SO_REUSEPORT) == 0: raise RuntimeError('failed to set SO_REUSEPORT.') _, port", "while True: # Send SIGINT if process doesn't exit quickly", "from pythonjsonlogger import jsonlogger import contextlib import grpc import logging", "= socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) ==", "enough, and kill it as last resort # .is_alive() also", "server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python gRPC server...') server.start() while not", "= logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s", "= time.time() - t if elapsed >= grace_period and elapsed", "time.sleep(1) for proc in procs: self.logger.info(\"subprocess terminated\", extra={'proc': proc}) def", "'')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num", "5)) class Server(object): def __init__(self, config, logger): self.config = config", "time.time() - t if elapsed >= grace_period and elapsed <", "%(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush = sys.stdout.flush logger.setLevel(logging.INFO) logger.addHandler(log_handler) return", "len(alive_procs) == 0: break elapsed = time.time() - t if", "elif minutes > 0: return '{:d}m{:d}s'.format(minutes, seconds) else: return '{:d}s'.format(seconds)", "0: return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif minutes > 0: return", "= self.config.grpc_server_kill_period_in_secs while True: # Send SIGINT if process doesn't", "= Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start() while not shutdown.event.is_set(): time.sleep(1) t", "SIGKILL to subprocess\", extra={'proc': proc}) # Queues and other inter-process", "== 0: break elapsed = time.time() - t if elapsed", "when # process is killed, but we don't care here", "import Event, Process, cpu_count from pythonjsonlogger import jsonlogger import contextlib", "= os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT',", "server stopped') def serve(self): with self._reserve_port(): procs = [] shutdown", "def __init__(self, logger): self.logger = logger def pretty_timedelta(self, seconds): seconds", "to set SO_REUSEPORT.') _, port = self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try:", "also implicitly joins the process (good practice in linux) alive_procs", "grace_period = self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs while True: # Send", "doesn't exit quickly enough, and kill it as last resort", "= int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS',", "'')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num", "raise RuntimeError('failed to set SO_REUSEPORT.') _, port = self.config.grpc_server_address.split(':') sock.bind(('',", "self.logger.info('signal received', extra={'signal': signal_name}) self.event.set() return fn class Config(object): def", "kill_period = self.config.grpc_server_kill_period_in_secs while True: # Send SIGINT if process", "%(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush = sys.stdout.flush logger.setLevel(logging.INFO) logger.addHandler(log_handler) return logger", "> 0: return '{:d}m{:d}s'.format(minutes, seconds) else: return '{:d}s'.format(seconds) class GracefulShutdown:", "signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self, signal_name): def fn(signal_received, frame):", "server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server =", "Server(object): def __init__(self, config, logger): self.config = config self.logger =", "= jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush = sys.stdout.flush logger.setLevel(logging.INFO)", "def _run_server(self, shutdown_event): server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True", "don't care here proc.kill() time.sleep(1) time.sleep(1) for proc in procs:", "jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush = sys.stdout.flush logger.setLevel(logging.INFO) logger.addHandler(log_handler)", "__init__(self, logger): self.logger = logger def pretty_timedelta(self, seconds): seconds =", "elif hours > 0: return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif minutes", "logger def pretty_timedelta(self, seconds): seconds = int(seconds) days, seconds =", "kill_period: for proc in alive_procs: self.logger.warning(\"sending SIGKILL to subprocess\", extra={'proc':", "signal_name}) self.event.set() return fn class Config(object): def __init__(self): self.grpc_server_address =", "True: # Send SIGINT if process doesn't exit quickly enough,", "is killed, but we don't care here proc.kill() time.sleep(1) time.sleep(1)", "import grpc import logging import model.api.forecast_pb2_grpc as grpc_lib import os", "'{:d}s'.format(seconds) class GracefulShutdown: def __init__(self, logger): self.logger = logger self.event", "sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: raise RuntimeError('failed to set SO_REUSEPORT.') _,", "self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca =", "logger self.event = Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM')) signal.signal(signal.SIGHUP, self.handler('SIGHUP'))", "pythonjsonlogger import jsonlogger import contextlib import grpc import logging import", "minutes, seconds) elif minutes > 0: return '{:d}m{:d}s'.format(minutes, seconds) else:", "= str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM',", "return '{:d}s'.format(seconds) class GracefulShutdown: def __init__(self, logger): self.logger = logger", "@contextlib.contextmanager def _reserve_port(self): \"\"\"Find and reserve a port for all", "and reserve a port for all subprocesses to use\"\"\" sock", "handler(self, signal_name): def fn(signal_received, frame): self.logger.info('signal received', extra={'signal': signal_name}) self.event.set()", "reserve a port for all subprocesses to use\"\"\" sock =", "self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num =", "server.start() while not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC server stopped')", "gRPC server stopped') def serve(self): with self._reserve_port(): procs = []", "proc}) # Queues and other inter-process communication primitives can break", "= divmod(seconds, 86400) hours, seconds = divmod(seconds, 3600) minutes, seconds", "seconds) elif minutes > 0: return '{:d}m{:d}s'.format(minutes, seconds) else: return", "grpc_lib import os import signal import socket import sys import", "alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM to subprocess\", extra={'proc': proc}) elif elapsed", "while not shutdown.event.is_set(): time.sleep(1) t = time.time() grace_period = self.config.grpc_server_grace_period_in_secs", "Send SIGINT if process doesn't exit quickly enough, and kill", "= grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server = grpc.server(", "while not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC server stopped') def", "process doesn't exit quickly enough, and kill it as last", "__init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert", "= logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush", "def json_logger(): logger = logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) formatter =", "GracefulShutdown(self.logger) for _ in range(self.config.gprc_server_process_num): proc = Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc)", "minutes, seconds = divmod(seconds, 60) if days > 0: return", "0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds) elif hours > 0:", "t = time.time() grace_period = self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs while", "minutes > 0: return '{:d}m{:d}s'.format(minutes, seconds) else: return '{:d}s'.format(seconds) class", "as ProphetForecaster from multiprocessing import Event, Process, cpu_count from pythonjsonlogger", "[] shutdown = GracefulShutdown(self.logger) for _ in range(self.config.gprc_server_process_num): proc =", "socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: raise", "if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: raise RuntimeError('failed to set SO_REUSEPORT.')", "logger): self.logger = logger def pretty_timedelta(self, seconds): seconds = int(seconds)", "gRPC server...') server.start() while not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC", "subprocesses to use\"\"\" sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)", "t if elapsed >= grace_period and elapsed < kill_period: for", "finally: sock.close() def _run_server(self, shutdown_event): server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)],", "break when # process is killed, but we don't care", "proc in procs: self.logger.info(\"subprocess terminated\", extra={'proc': proc}) def json_logger(): logger", "= GracefulShutdown(self.logger) for _ in range(self.config.gprc_server_process_num): proc = Process(target=self._run_server, args=(shutdown.event,))", "proc = Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start() while not shutdown.event.is_set(): time.sleep(1)", "def serve(self): with self._reserve_port(): procs = [] shutdown = GracefulShutdown(self.logger)", "logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s')", "self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object): def __init__(self, config, logger):", "logging import model.api.forecast_pb2_grpc as grpc_lib import os import signal import", "class ForecastServicer(ProphetForecaster): def __init__(self, logger): self.logger = logger def pretty_timedelta(self,", "not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC server stopped') def serve(self):", "it as last resort # .is_alive() also implicitly joins the", "in alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM to subprocess\", extra={'proc': proc}) elif", "str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1))", "], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting python gRPC server...')", ") server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1),", "import futures from forecaster.prophet import Forecaster as ProphetForecaster from multiprocessing", "Event, Process, cpu_count from pythonjsonlogger import jsonlogger import contextlib import", "import sys import time class ForecastServicer(ProphetForecaster): def __init__(self, logger): self.logger", "self.logger.info(\"subprocess terminated\", extra={'proc': proc}) def json_logger(): logger = logging.getLogger() log_handler", "socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: raise RuntimeError('failed to", "exit quickly enough, and kill it as last resort #", "resort # .is_alive() also implicitly joins the process (good practice", "kill_period: for proc in alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM to subprocess\",", "server_credentials) self.logger.info('starting python gRPC server...') server.start() while not shutdown_event.is_set(): time.sleep(1)", "extra={'signal': signal_name}) self.event.set() return fn class Config(object): def __init__(self): self.grpc_server_address", "def pretty_timedelta(self, seconds): seconds = int(seconds) days, seconds = divmod(seconds,", "divmod(seconds, 60) if days > 0: return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes,", "sock.close() def _run_server(self, shutdown_event): server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca,", "= [] shutdown = GracefulShutdown(self.logger) for _ in range(self.config.gprc_server_process_num): proc", "= self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs while True: # Send SIGINT", "json_logger(): logger = logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s", "= grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1), ], )", "proc in procs if proc.is_alive()] if len(alive_procs) == 0: break", "signal.signal(signal.SIGHUP, self.handler('SIGHUP')) def handler(self, signal_name): def fn(signal_received, frame): self.logger.info('signal received',", "communication primitives can break when # process is killed, but", "Config(object): def __init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY',", "def fn(signal_received, frame): self.logger.info('signal received', extra={'signal': signal_name}) self.event.set() return fn", "fn(signal_received, frame): self.logger.info('signal received', extra={'signal': signal_name}) self.event.set() return fn class", "self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs =", "GracefulShutdown: def __init__(self, logger): self.logger = logger self.event = Event()", "args=(shutdown.event,)) procs.append(proc) proc.start() while not shutdown.event.is_set(): time.sleep(1) t = time.time()", "logger = logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s", "with self._reserve_port(): procs = [] shutdown = GracefulShutdown(self.logger) for _", "= int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS',", "for proc in alive_procs: self.logger.warning(\"sending SIGKILL to subprocess\", extra={'proc': proc})", "primitives can break when # process is killed, but we", "server...') server.start() while not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC server", "import os import signal import socket import sys import time", "self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object):", "= int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class Server(object): def __init__(self, config, logger): self.config", "0: return '{:d}m{:d}s'.format(minutes, seconds) else: return '{:d}s'.format(seconds) class GracefulShutdown: def", "Process, cpu_count from pythonjsonlogger import jsonlogger import contextlib import grpc", "root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1),", "= str.encode(os.getenv('GRPC_ROOT_CA', '')) self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count())) self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM',", "server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1), ],", "\"\"\"Find and reserve a port for all subprocesses to use\"\"\"", "elif elapsed >= kill_period: for proc in alive_procs: self.logger.warning(\"sending SIGKILL", ".is_alive() also implicitly joins the process (good practice in linux)", "to use\"\"\" sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if", "Queues and other inter-process communication primitives can break when #", "shutdown.event.is_set(): time.sleep(1) t = time.time() grace_period = self.config.grpc_server_grace_period_in_secs kill_period =", "server.stop(5).wait() self.logger.info('python gRPC server stopped') def serve(self): with self._reserve_port(): procs", "if elapsed >= grace_period and elapsed < kill_period: for proc", "in range(self.config.gprc_server_process_num): proc = Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start() while not", "return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds) elif hours > 0: return", "_, port = self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try: yield sock.getsockname()[1] finally:", "self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\",", "self.logger = logger self.event = Event() signal.signal(signal.SIGINT, self.handler('SIGINT')) signal.signal(signal.SIGTERM, self.handler('SIGTERM'))", "3600) minutes, seconds = divmod(seconds, 60) if days > 0:", "config self.logger = logger @contextlib.contextmanager def _reserve_port(self): \"\"\"Find and reserve", "time class ForecastServicer(ProphetForecaster): def __init__(self, logger): self.logger = logger def", "proc in alive_procs: self.logger.warning(\"sending SIGKILL to subprocess\", extra={'proc': proc}) #", "else: return '{:d}s'.format(seconds) class GracefulShutdown: def __init__(self, logger): self.logger =", "if process doesn't exit quickly enough, and kill it as", "# Queues and other inter-process communication primitives can break when", "= [proc for proc in procs if proc.is_alive()] if len(alive_procs)", "1)) self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)) self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)) class", "1), (\"grpc.use_local_subchannel_pool\", 1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server) server.add_secure_port(self.config.grpc_server_address, server_credentials) self.logger.info('starting", "port = self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try: yield sock.getsockname()[1] finally: sock.close()", "grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server = grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num),", "__init__(self, config, logger): self.config = config self.logger = logger @contextlib.contextmanager", "proc.start() while not shutdown.event.is_set(): time.sleep(1) t = time.time() grace_period =", "extra={'proc': proc}) # Queues and other inter-process communication primitives can", "self.logger.info('starting python gRPC server...') server.start() while not shutdown_event.is_set(): time.sleep(1) server.stop(5).wait()", "pretty_timedelta(self, seconds): seconds = int(seconds) days, seconds = divmod(seconds, 86400)", "seconds = divmod(seconds, 3600) minutes, seconds = divmod(seconds, 60) if", "but we don't care here proc.kill() time.sleep(1) time.sleep(1) for proc", "for proc in alive_procs: proc.terminate() self.logger.info(\"sending SIGTERM to subprocess\", extra={'proc':", "sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT)", "time.time() grace_period = self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs while True: #", "proc.kill() time.sleep(1) time.sleep(1) for proc in procs: self.logger.info(\"subprocess terminated\", extra={'proc':", "here proc.kill() time.sleep(1) time.sleep(1) for proc in procs: self.logger.info(\"subprocess terminated\",", "= divmod(seconds, 3600) minutes, seconds = divmod(seconds, 60) if days", "terminated\", extra={'proc': proc}) def json_logger(): logger = logging.getLogger() log_handler =", "subprocess\", extra={'proc': proc}) elif elapsed >= kill_period: for proc in", "self.config.grpc_server_address.split(':') sock.bind(('', int(port))) try: yield sock.getsockname()[1] finally: sock.close() def _run_server(self,", "shutdown_event.is_set(): time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC server stopped') def serve(self): with", "class GracefulShutdown: def __init__(self, logger): self.logger = logger self.event =", "concurrent import futures from forecaster.prophet import Forecaster as ProphetForecaster from", "socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:", "return '{:d}m{:d}s'.format(minutes, seconds) else: return '{:d}s'.format(seconds) class GracefulShutdown: def __init__(self,", "divmod(seconds, 86400) hours, seconds = divmod(seconds, 3600) minutes, seconds =", "shutdown_event): server_credentials = grpc.ssl_server_credentials( [(self.config.grpc_server_key, self.config.grpc_server_cert)], root_certificates=self.config.grpc_root_ca, require_client_auth=True ) server", "for _ in range(self.config.gprc_server_process_num): proc = Process(target=self._run_server, args=(shutdown.event,)) procs.append(proc) proc.start()", "for proc in procs if proc.is_alive()] if len(alive_procs) == 0:", "fn class Config(object): def __init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key", "multiprocessing import Event, Process, cpu_count from pythonjsonlogger import jsonlogger import", "self.handler('SIGHUP')) def handler(self, signal_name): def fn(signal_received, frame): self.logger.info('signal received', extra={'signal':", "0: raise RuntimeError('failed to set SO_REUSEPORT.') _, port = self.config.grpc_server_address.split(':')", "logging.StreamHandler(sys.stdout) formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s') log_handler.setFormatter(formatter) log_handler.flush =", "elapsed >= grace_period and elapsed < kill_period: for proc in", "cpu_count from pythonjsonlogger import jsonlogger import contextlib import grpc import", "all subprocesses to use\"\"\" sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT,", "__init__(self, logger): self.logger = logger self.event = Event() signal.signal(signal.SIGINT, self.handler('SIGINT'))", "quickly enough, and kill it as last resort # .is_alive()", "proc.is_alive()] if len(alive_procs) == 0: break elapsed = time.time() -", "str.encode(os.getenv('GRPC_SERVER_KEY', '')) self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', '')) self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', ''))", "RuntimeError('failed to set SO_REUSEPORT.') _, port = self.config.grpc_server_address.split(':') sock.bind(('', int(port)))", "SIGINT if process doesn't exit quickly enough, and kill it", "process is killed, but we don't care here proc.kill() time.sleep(1)", "class Config(object): def __init__(self): self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '') self.grpc_server_key =", "subprocess\", extra={'proc': proc}) # Queues and other inter-process communication primitives", "return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds) elif minutes > 0: return '{:d}m{:d}s'.format(minutes,", "self.logger.warning(\"sending SIGKILL to subprocess\", extra={'proc': proc}) # Queues and other", "shutdown = GracefulShutdown(self.logger) for _ in range(self.config.gprc_server_process_num): proc = Process(target=self._run_server,", "we don't care here proc.kill() time.sleep(1) time.sleep(1) for proc in", "in procs if proc.is_alive()] if len(alive_procs) == 0: break elapsed", "futures from forecaster.prophet import Forecaster as ProphetForecaster from multiprocessing import", "alive_procs = [proc for proc in procs if proc.is_alive()] if", "not shutdown.event.is_set(): time.sleep(1) t = time.time() grace_period = self.config.grpc_server_grace_period_in_secs kill_period", "if len(alive_procs) == 0: break elapsed = time.time() - t", "'{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds) elif hours > 0: return '{:d}h{:d}m{:d}s'.format(hours,", "sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: raise RuntimeError('failed", "time.sleep(1) t = time.time() grace_period = self.config.grpc_server_grace_period_in_secs kill_period = self.config.grpc_server_kill_period_in_secs", "elapsed >= kill_period: for proc in alive_procs: self.logger.warning(\"sending SIGKILL to", "import signal import socket import sys import time class ForecastServicer(ProphetForecaster):", "import time class ForecastServicer(ProphetForecaster): def __init__(self, logger): self.logger = logger", "def handler(self, signal_name): def fn(signal_received, frame): self.logger.info('signal received', extra={'signal': signal_name})", "alive_procs: self.logger.warning(\"sending SIGKILL to subprocess\", extra={'proc': proc}) # Queues and", "seconds = divmod(seconds, 86400) hours, seconds = divmod(seconds, 3600) minutes,", "to subprocess\", extra={'proc': proc}) # Queues and other inter-process communication", "time.sleep(1) server.stop(5).wait() self.logger.info('python gRPC server stopped') def serve(self): with self._reserve_port():", "(good practice in linux) alive_procs = [proc for proc in", "extra={'proc': proc}) def json_logger(): logger = logging.getLogger() log_handler = logging.StreamHandler(sys.stdout)", "grpc.server( futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num), options=[ (\"grpc.so_reuseport\", 1), (\"grpc.use_local_subchannel_pool\", 1), ], ) grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger),", "stopped') def serve(self): with self._reserve_port(): procs = [] shutdown =", "as grpc_lib import os import signal import socket import sys", "SIGTERM to subprocess\", extra={'proc': proc}) elif elapsed >= kill_period: for", "break elapsed = time.time() - t if elapsed >= grace_period" ]
[ "of the setup function \"\"\" import jupyter_libertem_proxy as jx print(\"\\nRunning", "setup function \"\"\" import jupyter_libertem_proxy as jx print(\"\\nRunning test_setupcall...\") print(jx.setup_libertem())", "test_setupcall(): \"\"\" Test the call of the setup function \"\"\"", "Test the call of the setup function \"\"\" import jupyter_libertem_proxy", "the setup function \"\"\" import jupyter_libertem_proxy as jx print(\"\\nRunning test_setupcall...\")", "def test_setupcall(): \"\"\" Test the call of the setup function", "call of the setup function \"\"\" import jupyter_libertem_proxy as jx", "the call of the setup function \"\"\" import jupyter_libertem_proxy as", "\"\"\" Test the call of the setup function \"\"\" import" ]
[ "execute in a worker. \"\"\" with self._mutex: future = futures.Future()", "env=None, **kwargs): \"\"\"Adds process worker to the runtime. Args: name:", "futures.Future() def run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager = manager try: future.set_result(f())", "2.0 (the \"License\"); # you may not use this file", "if not self._first_failure and not self._stop_counter: self._first_failure = e active", "e: future.set_exception(e) builder = lambda t, n: threading.Thread(target=t, name=n) thread", "subprocess import threading import time from typing import Optional, Sequence,", "except BaseException as e: if not self._first_failure and not self._stop_counter:", "import atexit import collections from concurrent import futures import ctypes", "= True self._sigalrm_handler = register_signal_handler( signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill())", "for these groups' workers to finish. Wait for all workers", "be notified about termination. kill_main_thread: When set to false try", "import Optional, Sequence, Text from absl import flags from absl", "= self._termination_notice_secs - self._stop_counter if pending_secs == 0: if self._termination_notice_secs", "res = worker.wait(0) active = False if res and not", "self._alarm_enabled = False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests stopping", "while True: try: active_workers = True while active_workers: with self._mutex:", "License. \"\"\"WorkerManager handles thread and process-based runtimes.\"\"\" import atexit import", "flags from absl import logging from absl.testing import absltest from", "worker in workers: if isinstance(worker, ThreadWorker): if self._stop_counter == 1:", "active = False elif isinstance(worker, subprocess.Popen): try: res = worker.wait(0)", "Notify all workers running under a proxy process. children =", "# limitations under the License. \"\"\"WorkerManager handles thread and process-based", "= ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise", "self._stop() def _kill_process_tree(self, pid): \"\"\"Kills all child processes of the", "if self._sigterm_handler is not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler = None", "License for the specific language governing permissions and # limitations", "with self._mutex: process = subprocess.Popen(command, env=env or {}, **kwargs) self._workers_count[name]", "and not self._stop_counter: self._stop() elif not has_workers: self._disable_alarm() def __del__(self):", "SIGTERM to redirect to the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill()", "thread worker. Args: name: Name of the worker group. function:", "def wait_for_stop(): \"\"\"Blocks until termination of the node's program is", "worker failure. return_on_first_completed: Whether to return upon the first completed", "the worker. **kwargs: Other parameters to be passed to `subprocess.Popen`.", "_sigquit(self, sig=None, frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill() def wait_for_stop(self):", "sig=None, frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill() def wait_for_stop(self): \"\"\"Blocks", "atexit import collections from concurrent import futures import ctypes import", "still_active.append(worker) self._active_workers[label] = still_active if has_workers and self._first_failure and not", "wait for these groups' workers to finish. Wait for all", "or {}, **kwargs) self._workers_count[name] += 1 self._active_workers[name].append(process) def register_existing_process(self, name:", "end='\\r') self._stop_counter += 1 for workers in self._active_workers.values(): for worker", "res and not self._first_failure and not self._stop_counter: self._first_failure = RuntimeError('One", "process. Args: name: Name of the workers' group. pid: Pid", "flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER", "child processes of the current process.\"\"\" parent = psutil.Process(pid) for", "Not possible to kill a thread without killing the process.", "main thread. Send a SIGTERM to redirect to the main", "Program.\"\"\" def __init__( self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes a", "= None self._stop_counter = 0 self._alarm_enabled = False self._kill_main_thread =", "Environment variables to set for the worker. **kwargs: Other parameters", "until managed runtime is being terminated.\"\"\" self._stop_event.wait() def thread_worker(self, name,", "until termination of the node's program is requested. Can be", "def remove_signal_handler(sig, handler): return signal.signal(sig, handler) def wait_for_stop(): \"\"\"Blocks until", "1: self._send_exception(worker) elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else: # Notify all", "OF ANY KIND, either express or implied. # See the", "See the License for the specific language governing permissions and", "frame) self._kill() def wait_for_stop(self): \"\"\"Blocks until managed runtime is being", "True _HAS_MAIN_MANAGER = True self._active_workers = collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda:", "process worker to the runtime. Args: name: Name of the", "self._stop_event.is_set(): self._stop_event.set() try: if self._termination_notice_secs > 0: self._alarm_enabled = True", "to in writing, software # distributed under the License is", "stop for {pending_secs}s.', 'blue'), end='\\r') self._stop_counter += 1 for workers", "by stopping the workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop() def", "+= 1 self._active_workers[name].append(process) def register_existing_process(self, name: str, pid: int): \"\"\"Registers", "or agreed to in writing, software # distributed under the", "start a new thread worker. Args: name: Name of the", "in self._active_workers.values(): for worker in workers: if isinstance(worker, ThreadWorker): #", "to register a signal handler but not in the #", "redirect to the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def _disable_alarm(self):", "compliance with the License. # You may obtain a copy", "any worker failure. return_on_first_completed: Whether to return upon the first", "future=future, manager=self): _WORKER_MANAGERS.manager = manager try: future.set_result(f()) except BaseException as", "if isinstance(worker, ThreadWorker): if self._stop_counter == 1: self._send_exception(worker) elif isinstance(worker,", "from absl.testing import absltest from launchpad import flags as lp_flags", "str, pid: int): \"\"\"Registers already started worker process. Args: name:", "= None, raise_error=True, return_on_first_completed=False): \"\"\"Waits for workers to finish. Args:", "not use this file except in compliance with the License.", "Args: name: Name of the worker group. function: Entrypoint function", "in process.name(): try: worker_found = True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass", "absl import flags from absl import logging from absl.testing import", "= self def _disable_signals(self): self._disable_alarm() if self._sigterm_handler is not None:", "you may not use this file except in compliance with", "parameters to be passed to `subprocess.Popen`. \"\"\" with self._mutex: process", "we attempt to register a signal handler but not in", "except subprocess.TimeoutExpired: pass else: try: # We can't obtain return", "self._sigterm_handler(sig, frame) self._stop() def _sigquit(self, sig=None, frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig,", "This happens when we attempt to register a signal handler", "in self._active_workers[label]: active = True if isinstance(worker, ThreadWorker): if not", "manager, 'Worker manager is not available in the current thread'", "lambda t, n: threading.Thread(target=t, name=n) thread = builder(run_inner, name) thread.setDaemon(True)", "CTRL+C to terminate immediately.', 'blue')) signal.signal(signal.SIGINT, lambda sig, frame: self._kill())", "frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill() def wait_for_stop(self): \"\"\"Blocks until", "absltest from launchpad import flags as lp_flags import psutil import", "\"\"\"Handles stopping of the runtime by a user.\"\"\" if self._termination_notice_secs", "register a signal handler but not in the # main", "manager try: future.set_result(f()) except BaseException as e: future.set_exception(e) builder =", "workers (and main thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.', 'blue'))", "# Not possible to kill a thread without killing the", "None self._sigquit_handler = None self._sigalrm_handler = None if register_signals: self._sigterm_handler", "thread without killing the process. kill_self = True else: self._kill_process_tree(worker.pid)", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "# This happens when we attempt to register a signal", "return code of external process, so clean # termination is", "at the end of the run, for example: start_server() lp.wait_for_stop()", "= True self._active_workers = collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda: 0) self._first_failure", "\"\"\"Cleanups runtime after a test.\"\"\" with self._mutex: self._check_workers() self._stop() self._disable_signals()", "psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills all workers (and main", "for termination.\"\"\" with self._mutex: self._stop() self.wait(raise_error=False) def join(self): self.wait() def", "for process in children: if process.name() != 'bash' and 'envelope_'", "(and main thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.', 'blue')) kill_self", "Raise an exception upon any worker failure. return_on_first_completed: Whether to", "[] for worker in self._active_workers[label]: active = True if isinstance(worker,", "self._sigquit) if handle_user_stop: register_signal_handler( signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user()) self._stop_main_thread", "used to perform cleanup at the end of the run,", "getattr(_WORKER_MANAGERS, 'manager', None) assert manager, 'Worker manager is not available", "cleanup at the end of the run, for example: start_server()", "manager def register_signal_handler(sig, handler): \"\"\"Registers a signal handler.\"\"\" return signal.signal(sig,", "terminate runtime in case of errors.\"\"\" has_workers = False for", "\"\"\" with self._mutex: self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles", "self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop = True _HAS_MAIN_MANAGER = True self._active_workers", "to the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def _disable_alarm(self): if", "workers to finish. Wait for all workers otherwise. raise_error: Raise", "self._sigquit_handler = None def _sigterm(self, sig=None, frame=None): \"\"\"Handles SIGTERM by", "a proxy process. children = worker.children(recursive=True) worker_found = False for", "-1 handle_user_stop = False global _HAS_MAIN_MANAGER # Make the first", "thread. Send a SIGTERM to redirect to the main thread.", "+= 1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles stopping of the runtime", "for worker in self._active_workers[label]: active = True if isinstance(worker, ThreadWorker):", "run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager = manager try: future.set_result(f()) except BaseException", "import ctypes import os import signal import subprocess import threading", "for process in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL)", "_stop_by_user(self): \"\"\"Handles stopping of the runtime by a user.\"\"\" if", "of the workers failed.') except subprocess.TimeoutExpired: pass else: try: #", "return signal.signal(sig, handler) def remove_signal_handler(sig, handler): return signal.signal(sig, handler) def", "\"\"\" with self._mutex: process = subprocess.Popen(command, env=env or {}, **kwargs)", "if any worker raises an exception. \"\"\" while True: try:", "register_signals: Whether or not to register signal handlers. \"\"\" self._mutex", "termcolor.colored( 'User-requested termination. Asking workers to stop.', 'blue')) print(termcolor.colored('Press CTRL+C", "self._kill_process_tree(os.getpid()) def _send_exception(self, worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert", "_HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop = True _HAS_MAIN_MANAGER = True", "if self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res <", "of running workers, terminate runtime in case of errors.\"\"\" has_workers", "already started worker process. Args: name: Name of the workers'", "_check_workers(self): \"\"\"Checks status of running workers, terminate runtime in case", "self._sigterm_handler = None self._sigquit_handler = None self._sigalrm_handler = None if", "= self._kill_main_thread for workers in self._active_workers.values(): for worker in workers:", "of the current process.\"\"\" parent = psutil.Process(pid) for process in", "self._sigterm_handler = None if self._sigquit_handler is not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler)", "`subprocess.Popen`. \"\"\" with self._mutex: process = subprocess.Popen(command, env=env or {},", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self._first_failure = None self._stop_counter = 0 self._alarm_enabled = False self._kill_main_thread", "for {pending_secs}s.', 'blue'), end='\\r') self._stop_counter += 1 for workers in", "= True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass if not worker_found: #", "Make the first created worker manager the main manager, which", "raise failure' if pending_secs >= 0: signal.alarm(1) def _stop(self): \"\"\"Requests", "except ValueError: # This happens when we attempt to register", "in the # main thread. Send a SIGTERM to redirect", "worker. self._send_exception(worker) def process_worker(self, name, command, env=None, **kwargs): \"\"\"Adds process", "process in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def", "file except in compliance with the License. # You may", "self._stop() self._disable_signals() self.wait(raise_error=False) with self._mutex: if self._first_failure: raise self._first_failure def", "# Runtime is terminating, so notify the worker. self._send_exception(worker) def", "did not terminate in time: {still_running}', 'red')) self._kill() return if", "main thread be notified about termination. kill_main_thread: When set to", "don't stop on time.\"\"\" pending_secs = self._termination_notice_secs - self._stop_counter if", "with self._mutex: self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles stopping", "thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def _disable_alarm(self): if self._alarm_enabled: self._alarm_enabled =", "Name of the workers' group. pid: Pid of the process", "in self._active_workers if self._active_workers[label] ] print( termcolor.colored( f'Worker groups that", "lp.wait_for_stop() stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates running threads", "the workers failed.') except subprocess.TimeoutExpired: pass else: try: # We", "= lambda t, n: threading.Thread(target=t, name=n) thread = builder(run_inner, name)", "self._stop_main_thread = stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager = self def _disable_signals(self):", "more workers running, so we can kill the proxy itself.", "self._mutex: self._stop() self.wait(raise_error=False) def join(self): self.wait() def wait(self, labels_to_wait_for: Optional[Sequence[Text]]", "= True if (return_on_first_completed and len(self._active_workers[label]) < self._workers_count[label]): return time.sleep(0.1)", "pending_secs >= 0: print( termcolor.colored(f'Waiting for workers to stop for", "parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills", "self._first_failure = None raise failure for label in labels_to_wait_for or", "for label in self._active_workers: still_active = [] for worker in", "True still_active.append(worker) self._active_workers[label] = still_active if has_workers and self._first_failure and", "KIND, either express or implied. # See the License for", "handles thread and process-based runtimes.\"\"\" import atexit import collections from", "Copyright 2020 DeepMind Technologies Limited. All rights reserved. # #", "workers running under a proxy process. children = worker.children(recursive=True) worker_found", "for label in labels_to_wait_for or self._active_workers.keys(): if self._active_workers[label]: active_workers =", "\"\"\"Kills all workers (and main thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling entire", "import flags from absl import logging from absl.testing import absltest", "None self._sigalrm_handler = None if register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm)", "exception. \"\"\" while True: try: active_workers = True while active_workers:", "self._stop_counter == 1: self._send_exception(worker) elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else: #", "(the \"License\"); # you may not use this file except", "running under a proxy process. children = worker.children(recursive=True) worker_found =", "self._mutex: self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles stopping of", "raise_error: Raise an exception upon any worker failure. return_on_first_completed: Whether", "set for the worker. **kwargs: Other parameters to be passed", "# # Unless required by applicable law or agreed to", "so we can kill the proxy itself. try: worker.send_signal(signal.SIGKILL) except", "workers: if isinstance(worker, ThreadWorker): # Not possible to kill a", "running, so we can kill the proxy itself. try: worker.send_signal(signal.SIGKILL)", "handle_user_stop: register_signal_handler( signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user()) self._stop_main_thread = stop_main_thread", "the same process. register_in_thread: TODO register_signals: Whether or not to", "for all workers otherwise. raise_error: Raise an exception upon any", "1 self._active_workers[name].append(process) def register_existing_process(self, name: str, pid: int): \"\"\"Registers already", "implied. # See the License for the specific language governing", "typing import Optional, Sequence, Text from absl import flags from", "active_workers: with self._mutex: self._check_workers() active_workers = False if self._first_failure and", "self._first_failure def _check_workers(self): \"\"\"Checks status of running workers, terminate runtime", "worker.send_signal(signal.SIGTERM) else: # Notify all workers running under a proxy", "_HAS_MAIN_MANAGER = False def get_worker_manager(): manager = getattr(_WORKER_MANAGERS, 'manager', None)", "# termination is assumed. res = worker.wait(0) active = False", "\"\"\"Stops all workers; kills them if they don't stop on", "future=future) self._active_workers[name].append(worker) if self._stop_event.is_set(): # Runtime is terminating, so notify", "return manager def register_signal_handler(sig, handler): \"\"\"Registers a signal handler.\"\"\" return", "in time: {still_running}', 'red')) self._kill() return if pending_secs >= 0:", "manager is not available in the current thread' return manager", "self._main_thread = threading.current_thread().ident self._sigterm_handler = None self._sigquit_handler = None self._sigalrm_handler", "when thread workers run in the same process. register_in_thread: TODO", "Can be used to perform cleanup at the end of", "happens when we attempt to register a signal handler but", "is requested. Can be used to perform cleanup at the", "worker. \"\"\" with self._mutex: future = futures.Future() def run_inner(f=function, future=future,", "active_workers = True while active_workers: with self._mutex: self._check_workers() active_workers =", "if isinstance(worker, ThreadWorker): # Not possible to kill a thread", "= register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop: register_signal_handler(", "Unless required by applicable law or agreed to in writing,", "== 0: if self._termination_notice_secs > 0: still_running = [ label", "proxy process. children = worker.children(recursive=True) worker_found = False for process", "the specific language governing permissions and # limitations under the", "self._mutex = threading.Lock() self._termination_notice_secs = -1 handle_user_stop = False global", "worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass if self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident),", "groups' workers to finish. Wait for all workers otherwise. raise_error:", "if not self._stop_counter: try: worker.future.result() except BaseException as e: if", "self._termination_notice_secs - self._stop_counter if pending_secs == 0: if self._termination_notice_secs >", "= None if self._sigquit_handler is not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler", "name: Name of the workers' group. pid: Pid of the", "isinstance(worker, ThreadWorker): if self._stop_counter == 1: self._send_exception(worker) elif isinstance(worker, subprocess.Popen):", "= False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests stopping all", "upon the first completed (or failed) worker. Raises: RuntimeError: if", "completed (or failed) worker. Raises: RuntimeError: if any worker raises", "\"\"\" with self._mutex: future = futures.Future() def run_inner(f=function, future=future, manager=self):", "self._disable_alarm() if self._sigterm_handler is not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler =", "= FLAGS.lp_termination_notice_secs handle_user_stop = True _HAS_MAIN_MANAGER = True self._active_workers =", "with self._mutex: self._stop() self.wait(raise_error=False) def join(self): self.wait() def wait(self, labels_to_wait_for:", "the License. \"\"\"WorkerManager handles thread and process-based runtimes.\"\"\" import atexit", "program is requested. Can be used to perform cleanup at", "worker. env: Environment variables to set for the worker. **kwargs:", "< 2, 'Exception raise failure' if pending_secs >= 0: signal.alarm(1)", "<gh_stars>0 # Copyright 2020 DeepMind Technologies Limited. All rights reserved.", "len(self._active_workers[label]) < self._workers_count[label]): return time.sleep(0.1) return except SystemExit: self._stop() def", "signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests stopping all workers and", "not self._stop_counter: self._first_failure = RuntimeError('One of the workers failed.') except", "None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler = None def _sigterm(self, sig=None, frame=None):", "current thread' return manager def register_signal_handler(sig, handler): \"\"\"Registers a signal", "_WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER = False def get_worker_manager(): manager =", "worker_found = True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass if not worker_found:", "not self._stop_counter: self._stop() elif not has_workers: self._disable_alarm() def __del__(self): self._disable_signals()", "try: if self._termination_notice_secs > 0: self._alarm_enabled = True self._sigalrm_handler =", "e: if not self._first_failure and not self._stop_counter: self._first_failure = e", "handler) def wait_for_stop(): \"\"\"Blocks until termination of the node's program", "try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills all", "self._first_failure and not self._stop_counter: self._stop() elif not has_workers: self._disable_alarm() def", "ThreadWorker): if not worker.thread.is_alive(): worker.thread.join() if not self._stop_counter: try: worker.future.result()", "in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def _kill(self):", "Whether or not to register signal handlers. \"\"\" self._mutex =", "to execute in the worker. env: Environment variables to set", "handle_user_stop = True _HAS_MAIN_MANAGER = True self._active_workers = collections.defaultdict(list) self._workers_count", "wait for termination.\"\"\" with self._mutex: self._stop() self.wait(raise_error=False) def join(self): self.wait()", "name=n) thread = builder(run_inner, name) thread.setDaemon(True) thread.start() self._workers_count[name] += 1", "'Exception raise failure' if pending_secs >= 0: signal.alarm(1) def _stop(self):", "process to monitor. \"\"\" with self._mutex: self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid))", "errors.\"\"\" has_workers = False for label in self._active_workers: still_active =", "return time.sleep(0.1) return except SystemExit: self._stop() def cleanup_after_test(self, test_case: absltest.TestCase):", "for workers in self._active_workers.values(): for worker in workers: if isinstance(worker,", "None if register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT,", "False global _HAS_MAIN_MANAGER # Make the first created worker manager", "frame) self._stop() def _sigquit(self, sig=None, frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig, frame)", "remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler = None if self._sigquit_handler is not None:", "self._active_workers.values(): for worker in workers: if isinstance(worker, ThreadWorker): # Not", "immediately.', 'blue')) signal.signal(signal.SIGINT, lambda sig, frame: self._kill()) self._stop() def _kill_process_tree(self,", "self._stop_counter = 0 self._alarm_enabled = False self._kill_main_thread = kill_main_thread self._stop_event", "pass else: try: # We can't obtain return code of", "be passed to `subprocess.Popen`. \"\"\" with self._mutex: process = subprocess.Popen(command,", "{}, **kwargs) self._workers_count[name] += 1 self._active_workers[name].append(process) def register_existing_process(self, name: str,", "'envelope_' not in process.name(): try: worker_found = True process.send_signal(signal.SIGTERM) except", "register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop: register_signal_handler( signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user())", "subprocess.TimeoutExpired: pass else: try: # We can't obtain return code", "_HAS_MAIN_MANAGER # Make the first created worker manager the main", "self._termination_notice_secs > 0: self._alarm_enabled = True self._sigalrm_handler = register_signal_handler( signal.SIGALRM,", "threading.current_thread().ident self._sigterm_handler = None self._sigquit_handler = None self._sigalrm_handler = None", "the worker. env: Environment variables to set for the worker.", "the current thread' return manager def register_signal_handler(sig, handler): \"\"\"Registers a", "the main manager, which handles # signals. if not _HAS_MAIN_MANAGER:", "ValueError: # This happens when we attempt to register a", "You may obtain a copy of the License at #", "def register_existing_process(self, name: str, pid: int): \"\"\"Registers already started worker", "collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda: 0) self._first_failure = None self._stop_counter =", "worker.children(recursive=True) worker_found = False for process in children: if process.name()", "= subprocess.Popen(command, env=env or {}, **kwargs) self._workers_count[name] += 1 self._active_workers[name].append(process)", "user.\"\"\" if self._termination_notice_secs != 0: print( termcolor.colored( 'User-requested termination. Asking", "not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler = None if self._sigquit_handler is", "\"\"\"Registers already started worker process. Args: name: Name of the", "self._send_exception(worker) def process_worker(self, name, command, env=None, **kwargs): \"\"\"Adds process worker", "the worker group. function: Entrypoint function to execute in a", "register_in_thread=False, register_signals=True): \"\"\"Initializes a WorkerManager. Args: stop_main_thread: Should main thread", "# signals. if not _HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop =", "def _kill_process_tree(self, pid): \"\"\"Kills all child processes of the current", "2, 'Exception raise failure' def _stop_or_kill(self): \"\"\"Stops all workers; kills", "if res and not self._first_failure and not self._stop_counter: self._first_failure =", "and wait for termination.\"\"\" with self._mutex: self._stop() self.wait(raise_error=False) def join(self):", "callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop() def _sigquit(self, sig=None, frame=None): if callable(self._sigquit_handler):", "can't obtain return code of external process, so clean #", "main thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.', 'blue')) kill_self =", "kills them if they don't stop on time.\"\"\" pending_secs =", "= ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise", "time.sleep(0.1) return except SystemExit: self._stop() def cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups", "self._mutex: self._check_workers() active_workers = False if self._first_failure and raise_error: failure", "= collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER = False", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "+= 1 for workers in self._active_workers.values(): for worker in workers:", "self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False) with self._mutex: if self._first_failure: raise self._first_failure", "worker. Args: name: Name of the worker group. function: Entrypoint", "ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER =", "on time.\"\"\" pending_secs = self._termination_notice_secs - self._stop_counter if pending_secs ==", "perform cleanup at the end of the run, for example:", "and not self._stop_counter: self._first_failure = e active = False elif", "subprocess.Popen): worker.send_signal(signal.SIGTERM) else: # Notify all workers running under a", "Optional[Sequence[Text]] = None, raise_error=True, return_on_first_completed=False): \"\"\"Waits for workers to finish.", "the process. kill_self = True else: self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid())", "= True while active_workers: with self._mutex: self._check_workers() active_workers = False", "] print( termcolor.colored( f'Worker groups that did not terminate in", "- self._stop_counter if pending_secs == 0: if self._termination_notice_secs > 0:", "still_active if has_workers and self._first_failure and not self._stop_counter: self._stop() elif", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "a worker. \"\"\" with self._mutex: future = futures.Future() def run_inner(f=function,", "to stop.', 'blue')) print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue')) signal.signal(signal.SIGINT,", "0: print( termcolor.colored( 'User-requested termination. Asking workers to stop.', 'blue'))", "License. # You may obtain a copy of the License", "them if they don't stop on time.\"\"\" pending_secs = self._termination_notice_secs", "def _disable_signals(self): self._disable_alarm() if self._sigterm_handler is not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler)", "pass if active: has_workers = True still_active.append(worker) self._active_workers[label] = still_active", "= False global _HAS_MAIN_MANAGER # Make the first created worker", "sig=None, frame=None: self._stop_by_user()) self._stop_main_thread = stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager =", "stop.', 'blue')) print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue')) signal.signal(signal.SIGINT, lambda", "signals. if not _HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop = True", "psutil.Process(pid) for process in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass", "exception upon any worker failure. return_on_first_completed: Whether to return upon", "self._termination_notice_secs = -1 handle_user_stop = False global _HAS_MAIN_MANAGER # Make", "assert manager, 'Worker manager is not available in the current", "False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests stopping all workers", "label in self._active_workers if self._active_workers[label] ] print( termcolor.colored( f'Worker groups", "def get_worker_manager(): manager = getattr(_WORKER_MANAGERS, 'manager', None) assert manager, 'Worker", "self._stop_counter: self._first_failure = e active = False elif isinstance(worker, subprocess.Popen):", "True else: self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self, worker): res", "if (return_on_first_completed and len(self._active_workers[label]) < self._workers_count[label]): return time.sleep(0.1) return except", "any worker raises an exception. \"\"\" while True: try: active_workers", "return if pending_secs >= 0: print( termcolor.colored(f'Waiting for workers to", "node's program is requested. Can be used to perform cleanup", "register_signal_handler( signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user()) self._stop_main_thread = stop_main_thread if", "threads and processes of a Launchpad Program.\"\"\" def __init__( self,", "None self._stop_counter = 0 self._alarm_enabled = False self._kill_main_thread = kill_main_thread", "future = futures.Future() def run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager = manager", "False def get_worker_manager(): manager = getattr(_WORKER_MANAGERS, 'manager', None) assert manager,", "created worker manager the main manager, which handles # signals.", "to stop for {pending_secs}s.', 'blue'), end='\\r') self._stop_counter += 1 for", "'User-requested termination. Asking workers to stop.', 'blue')) print(termcolor.colored('Press CTRL+C to", "signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill()) except ValueError: # This happens", "def _send_exception(self, worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res", "False for process in children: if process.name() != 'bash' and", "import threading import time from typing import Optional, Sequence, Text", "None raise failure for label in labels_to_wait_for or self._active_workers.keys(): if", "kill_self = True else: self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self,", "if not _HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop = True _HAS_MAIN_MANAGER", "self._sigterm_handler) self._sigterm_handler = None if self._sigquit_handler is not None: remove_signal_handler(signal.SIGQUIT,", "\"\"\"Kills all child processes of the current process.\"\"\" parent =", "thread_worker(self, name, function): \"\"\"Registers and start a new thread worker.", "self._kill_main_thread for workers in self._active_workers.values(): for worker in workers: if", "thread' return manager def register_signal_handler(sig, handler): \"\"\"Registers a signal handler.\"\"\"", "Send a SIGTERM to redirect to the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM)", "WorkerManager. Args: stop_main_thread: Should main thread be notified about termination.", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes a WorkerManager. Args: stop_main_thread:", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "workers' group. pid: Pid of the process to monitor. \"\"\"", "manager the main manager, which handles # signals. if not", "if handle_user_stop: register_signal_handler( signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user()) self._stop_main_thread =", "handler): return signal.signal(sig, handler) def wait_for_stop(): \"\"\"Blocks until termination of", "required by applicable law or agreed to in writing, software", "\"\"\"Requests stopping all workers and wait for termination.\"\"\" with self._mutex:", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "we can kill the proxy itself. try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess:", "is not available in the current thread' return manager def", "self._stop() def _sigquit(self, sig=None, frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill()", "Name of the worker's group. command: Command to execute in", "launcher while killing workers. This is not possible when thread", "termination.\"\"\" if not self._stop_event.is_set(): self._stop_event.set() try: if self._termination_notice_secs > 0:", "\"\"\"Blocks until termination of the node's program is requested. Can", "self.wait(raise_error=False) with self._mutex: if self._first_failure: raise self._first_failure def _check_workers(self): \"\"\"Checks", "agreed to in writing, software # distributed under the License", "finish. Args: labels_to_wait_for: If supplied, only wait for these groups'", "the process to monitor. \"\"\" with self._mutex: self._workers_count[name] += 1", "distributed under the License is distributed on an \"AS IS\"", "def join(self): self.wait() def wait(self, labels_to_wait_for: Optional[Sequence[Text]] = None, raise_error=True,", "= collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda: 0) self._first_failure = None self._stop_counter", "but not in the # main thread. Send a SIGTERM", "stop_main_thread: Should main thread be notified about termination. kill_main_thread: When", "terminating, so notify the worker. self._send_exception(worker) def process_worker(self, name, command,", "from absl import logging from absl.testing import absltest from launchpad", "for workers to finish. Args: labels_to_wait_for: If supplied, only wait", "_sigterm(self, sig=None, frame=None): \"\"\"Handles SIGTERM by stopping the workers.\"\"\" if", "if self._first_failure: raise self._first_failure def _check_workers(self): \"\"\"Checks status of running", "= False self._kill_main_thread = kill_main_thread self._stop_event = threading.Event() self._main_thread =", "self._mutex: self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False) with self._mutex: if self._first_failure: raise", "if self._termination_notice_secs > 0: self._alarm_enabled = True self._sigalrm_handler = register_signal_handler(", "self._stop_event.is_set(): # Runtime is terminating, so notify the worker. self._send_exception(worker)", "raise failure for label in labels_to_wait_for or self._active_workers.keys(): if self._active_workers[label]:", "assumed. res = worker.wait(0) active = False except psutil.TimeoutExpired: pass", "failure' def _stop_or_kill(self): \"\"\"Stops all workers; kills them if they", "try: worker_found = True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass if not", "set to false try not to kill the launcher while", "can kill the proxy itself. try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass", "and raise_error: failure = self._first_failure self._first_failure = None raise failure", "failure' if pending_secs >= 0: signal.alarm(1) def _stop(self): \"\"\"Requests all", "a test.\"\"\" with self._mutex: self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False) with self._mutex:", "__init__( self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes a WorkerManager. Args:", "try: res = worker.wait(0) active = False if res and", "= RuntimeError('One of the workers failed.') except subprocess.TimeoutExpired: pass else:", "kill a thread without killing the process. kill_self = True", "the proxy itself. try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass if self._stop_main_thread:", "not worker_found: # No more workers running, so we can", "not in the # main thread. Send a SIGTERM to", "OR CONDITIONS OF ANY KIND, either express or implied. #", "ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise failure' if pending_secs", "if kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self, worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident),", "the License is distributed on an \"AS IS\" BASIS, #", "from concurrent import futures import ctypes import os import signal", "active = False except psutil.TimeoutExpired: pass if active: has_workers =", "the # main thread. Send a SIGTERM to redirect to", "= False for label in self._active_workers: still_active = [] for", "RuntimeError: if any worker raises an exception. \"\"\" while True:", "'future']) _WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER = False def get_worker_manager(): manager", "time: {still_running}', 'red')) self._kill() return if pending_secs >= 0: print(", "self._alarm_enabled: self._alarm_enabled = False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests", "the node's program is requested. Can be used to perform", "law or agreed to in writing, software # distributed under", "a SIGTERM to redirect to the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return", "= -1 handle_user_stop = False global _HAS_MAIN_MANAGER # Make the", "the current process.\"\"\" parent = psutil.Process(pid) for process in parent.children(recursive=True):", "worker in workers: if isinstance(worker, ThreadWorker): # Not possible to", "handler but not in the # main thread. Send a", "terminate immediately.', 'blue')) signal.signal(signal.SIGINT, lambda sig, frame: self._kill()) self._stop() def", "FLAGS.lp_termination_notice_secs handle_user_stop = True _HAS_MAIN_MANAGER = True self._active_workers = collections.defaultdict(list)", "'Exception raise failure' def _stop_or_kill(self): \"\"\"Stops all workers; kills them", "may obtain a copy of the License at # #", "under the License. \"\"\"WorkerManager handles thread and process-based runtimes.\"\"\" import", "ThreadWorker): # Not possible to kill a thread without killing", "worker group. function: Entrypoint function to execute in a worker.", "res = worker.wait(0) active = False except psutil.TimeoutExpired: pass if", "may not use this file except in compliance with the", "psutil.TimeoutExpired: pass if active: has_workers = True still_active.append(worker) self._active_workers[label] =", "this file except in compliance with the License. # You", "if process.name() != 'bash' and 'envelope_' not in process.name(): try:", "finish. Wait for all workers otherwise. raise_error: Raise an exception", "print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue')) signal.signal(signal.SIGINT, lambda sig, frame:", "# # Licensed under the Apache License, Version 2.0 (the", "attempt to register a signal handler but not in the", "2, 'Exception raise failure' if pending_secs >= 0: signal.alarm(1) def", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Args: stop_main_thread: Should main thread be notified about termination. kill_main_thread:", "def _disable_alarm(self): if self._alarm_enabled: self._alarm_enabled = False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler)", "when we attempt to register a signal handler but not", "Args: labels_to_wait_for: If supplied, only wait for these groups' workers", "while killing workers. This is not possible when thread workers", "active = True if isinstance(worker, ThreadWorker): if not worker.thread.is_alive(): worker.thread.join()", "res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception", "0) self._first_failure = None self._stop_counter = 0 self._alarm_enabled = False", "{still_running}', 'red')) self._kill() return if pending_secs >= 0: print( termcolor.colored(f'Waiting", "= False if self._first_failure and raise_error: failure = self._first_failure self._first_failure", "os import signal import subprocess import threading import time from", "< 2, 'Exception raise failure' def _stop_or_kill(self): \"\"\"Stops all workers;", "Limited. All rights reserved. # # Licensed under the Apache", "psutil import termcolor FLAGS = flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker', ['thread',", "worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res < 2,", "or implied. # See the License for the specific language", "elif isinstance(worker, subprocess.Popen): try: res = worker.wait(0) active = False", "active_workers = False if self._first_failure and raise_error: failure = self._first_failure", "name: Name of the worker's group. command: Command to execute", "(return_on_first_completed and len(self._active_workers[label]) < self._workers_count[label]): return time.sleep(0.1) return except SystemExit:", "permissions and # limitations under the License. \"\"\"WorkerManager handles thread", "res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception", "def thread_worker(self, name, function): \"\"\"Registers and start a new thread", "self._first_failure = RuntimeError('One of the workers failed.') except subprocess.TimeoutExpired: pass", "signal import subprocess import threading import time from typing import", "= futures.Future() def run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager = manager try:", "self._mutex: process = subprocess.Popen(command, env=env or {}, **kwargs) self._workers_count[name] +=", "isinstance(worker, ThreadWorker): # Not possible to kill a thread without", "import flags as lp_flags import psutil import termcolor FLAGS =", "as lp_flags import psutil import termcolor FLAGS = flags.FLAGS ThreadWorker", "class WorkerManager: \"\"\"Encapsulates running threads and processes of a Launchpad", "parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills all workers (and main thread/process if", "from launchpad import flags as lp_flags import psutil import termcolor", "import signal import subprocess import threading import time from typing", "signal handlers. \"\"\" self._mutex = threading.Lock() self._termination_notice_secs = -1 handle_user_stop", "not self._first_failure and not self._stop_counter: self._first_failure = e active =", "self._first_failure and not self._stop_counter: self._first_failure = e active = False", "handler): \"\"\"Registers a signal handler.\"\"\" return signal.signal(sig, handler) def remove_signal_handler(sig,", "processes of a Launchpad Program.\"\"\" def __init__( self, stop_main_thread=False, kill_main_thread=True,", "\"\"\"Encapsulates running threads and processes of a Launchpad Program.\"\"\" def", "stopping the workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop() def _sigquit(self,", "if self._termination_notice_secs != 0: print( termcolor.colored( 'User-requested termination. Asking workers", "only wait for these groups' workers to finish. Wait for", "= threading.current_thread().ident self._sigterm_handler = None self._sigquit_handler = None self._sigalrm_handler =", "All rights reserved. # # Licensed under the Apache License,", "self._active_workers[label] = still_active if has_workers and self._first_failure and not self._stop_counter:", "and len(self._active_workers[label]) < self._workers_count[label]): return time.sleep(0.1) return except SystemExit: self._stop()", "self._active_workers[name].append(process) def register_existing_process(self, name: str, pid: int): \"\"\"Registers already started", "import futures import ctypes import os import signal import subprocess", "# Notify all workers running under a proxy process. children", "has_workers = True still_active.append(worker) self._active_workers[label] = still_active if has_workers and", "ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise failure'", "1 for workers in self._active_workers.values(): for worker in workers: if", "name) thread.setDaemon(True) thread.start() self._workers_count[name] += 1 worker = ThreadWorker(thread=thread, future=future)", "1 worker = ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if self._stop_event.is_set(): # Runtime", "and processes of a Launchpad Program.\"\"\" def __init__( self, stop_main_thread=False,", "_kill(self): \"\"\"Kills all workers (and main thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling", "def process_worker(self, name, command, env=None, **kwargs): \"\"\"Adds process worker to", "= register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop: register_signal_handler( signal.SIGINT, lambda sig=None, frame=None:", "= stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager = self def _disable_signals(self): self._disable_alarm()", "self._termination_notice_secs != 0: print( termcolor.colored( 'User-requested termination. Asking workers to", "'red')) self._kill() return if pending_secs >= 0: print( termcolor.colored(f'Waiting for", "callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill() def wait_for_stop(self): \"\"\"Blocks until managed runtime", "def _stop(self): \"\"\"Requests all workers to stop and schedule delayed", "in the current thread' return manager def register_signal_handler(sig, handler): \"\"\"Registers", "return signal.signal(sig, handler) def wait_for_stop(): \"\"\"Blocks until termination of the", "assert res < 2, 'Exception raise failure' def _stop_or_kill(self): \"\"\"Stops", "case of errors.\"\"\" has_workers = False for label in self._active_workers:", "the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def _disable_alarm(self): if self._alarm_enabled:", "ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise failure' if", "in the worker. env: Environment variables to set for the", "remove_signal_handler(sig, handler): return signal.signal(sig, handler) def wait_for_stop(): \"\"\"Blocks until termination", "try: future.set_result(f()) except BaseException as e: future.set_exception(e) builder = lambda", "self._workers_count[name] += 1 self._active_workers[name].append(process) def register_existing_process(self, name: str, pid: int):", "in writing, software # distributed under the License is distributed", "stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager = self def _disable_signals(self): self._disable_alarm() if", "and schedule delayed termination.\"\"\" if not self._stop_event.is_set(): self._stop_event.set() try: if", "handle_user_stop = False global _HAS_MAIN_MANAGER # Make the first created", "self._termination_notice_secs > 0: still_running = [ label for label in", "which handles # signals. if not _HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs", "labels_to_wait_for or self._active_workers.keys(): if self._active_workers[label]: active_workers = True if (return_on_first_completed", "that did not terminate in time: {still_running}', 'red')) self._kill() return", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "Optional, Sequence, Text from absl import flags from absl import", "License, Version 2.0 (the \"License\"); # you may not use", "thread.setDaemon(True) thread.start() self._workers_count[name] += 1 worker = ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker)", "the end of the run, for example: start_server() lp.wait_for_stop() stop_server()", "and start a new thread worker. Args: name: Name of", "name: str, pid: int): \"\"\"Registers already started worker process. Args:", "label for label in self._active_workers if self._active_workers[label] ] print( termcolor.colored(", "worker.thread.is_alive(): worker.thread.join() if not self._stop_counter: try: worker.future.result() except BaseException as", "worker. **kwargs: Other parameters to be passed to `subprocess.Popen`. \"\"\"", "self._stop_event = threading.Event() self._main_thread = threading.current_thread().ident self._sigterm_handler = None self._sigquit_handler", "name, command, env=None, **kwargs): \"\"\"Adds process worker to the runtime.", "the License for the specific language governing permissions and #", "False except psutil.TimeoutExpired: pass if active: has_workers = True still_active.append(worker)", "threading.Event() self._main_thread = threading.current_thread().ident self._sigterm_handler = None self._sigquit_handler = None", "self._sigalrm_handler = register_signal_handler( signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill()) except ValueError:", "supplied, only wait for these groups' workers to finish. Wait", "if self._termination_notice_secs > 0: still_running = [ label for label", "not worker.thread.is_alive(): worker.thread.join() if not self._stop_counter: try: worker.future.result() except BaseException", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= False elif isinstance(worker, subprocess.Popen): try: res = worker.wait(0) active", "self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self, worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc(", "to finish. Wait for all workers otherwise. raise_error: Raise an", "subprocess.Popen): try: res = worker.wait(0) active = False if res", "test.\"\"\" with self._mutex: self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False) with self._mutex: if", "stopping of the runtime by a user.\"\"\" if self._termination_notice_secs !=", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "\"\"\"Handles SIGTERM by stopping the workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig, frame)", "self._first_failure: raise self._first_failure def _check_workers(self): \"\"\"Checks status of running workers,", "language governing permissions and # limitations under the License. \"\"\"WorkerManager", "started worker process. Args: name: Name of the workers' group.", "self._active_workers: still_active = [] for worker in self._active_workers[label]: active =", "workers to stop and schedule delayed termination.\"\"\" if not self._stop_event.is_set():", "wait_for_stop(): \"\"\"Blocks until termination of the node's program is requested.", "# distributed under the License is distributed on an \"AS", "workers otherwise. raise_error: Raise an exception upon any worker failure.", "# main thread. Send a SIGTERM to redirect to the", "# Unless required by applicable law or agreed to in", "self._stop_or_kill()) except ValueError: # This happens when we attempt to", "None if self._sigquit_handler is not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler =", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler = None def _sigterm(self, sig=None,", "for workers to stop for {pending_secs}s.', 'blue'), end='\\r') self._stop_counter +=", "= True if isinstance(worker, ThreadWorker): if not worker.thread.is_alive(): worker.thread.join() if", "all workers and wait for termination.\"\"\" with self._mutex: self._stop() self.wait(raise_error=False)", "worker raises an exception. \"\"\" while True: try: active_workers =", "> 0: self._alarm_enabled = True self._sigalrm_handler = register_signal_handler( signal.SIGALRM, lambda", "thread = builder(run_inner, name) thread.setDaemon(True) thread.start() self._workers_count[name] += 1 worker", "worker.wait(0) active = False except psutil.TimeoutExpired: pass if active: has_workers", "the Apache License, Version 2.0 (the \"License\"); # you may", "self._stop() self.wait(raise_error=False) def join(self): self.wait() def wait(self, labels_to_wait_for: Optional[Sequence[Text]] =", "manager = getattr(_WORKER_MANAGERS, 'manager', None) assert manager, 'Worker manager is", "else: self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self, worker): res =", "collections.defaultdict(lambda: 0) self._first_failure = None self._stop_counter = 0 self._alarm_enabled =", "process. register_in_thread: TODO register_signals: Whether or not to register signal", "Runtime is terminating, so notify the worker. self._send_exception(worker) def process_worker(self,", "sig=None, frame=None): \"\"\"Handles SIGTERM by stopping the workers.\"\"\" if callable(self._sigterm_handler):", "stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes a WorkerManager. Args: stop_main_thread: Should", "= None self._sigquit_handler = None self._sigalrm_handler = None if register_signals:", "running threads and processes of a Launchpad Program.\"\"\" def __init__(", "return_on_first_completed: Whether to return upon the first completed (or failed)", "all workers; kills them if they don't stop on time.\"\"\"", "function: Entrypoint function to execute in a worker. \"\"\" with", "to finish. Args: labels_to_wait_for: If supplied, only wait for these", "process-based runtimes.\"\"\" import atexit import collections from concurrent import futures", "an exception. \"\"\" while True: try: active_workers = True while", "Asking workers to stop.', 'blue')) print(termcolor.colored('Press CTRL+C to terminate immediately.',", "self.wait() def wait(self, labels_to_wait_for: Optional[Sequence[Text]] = None, raise_error=True, return_on_first_completed=False): \"\"\"Waits", "runtime. Args: name: Name of the worker's group. command: Command", "Pid of the process to monitor. \"\"\" with self._mutex: self._workers_count[name]", "thread be notified about termination. kill_main_thread: When set to false", "# Make the first created worker manager the main manager,", "of the run, for example: start_server() lp.wait_for_stop() stop_server() checkpoint() \"\"\"", "\"\"\"Adds process worker to the runtime. Args: name: Name of", "['thread', 'future']) _WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER = False def get_worker_manager():", "if isinstance(worker, ThreadWorker): if not worker.thread.is_alive(): worker.thread.join() if not self._stop_counter:", "process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills all workers", "'bash' and 'envelope_' not in process.name(): try: worker_found = True", "def stop_and_wait(self): \"\"\"Requests stopping all workers and wait for termination.\"\"\"", "in children: if process.name() != 'bash' and 'envelope_' not in", "if register_in_thread: _WORKER_MANAGERS.manager = self def _disable_signals(self): self._disable_alarm() if self._sigterm_handler", "under the License is distributed on an \"AS IS\" BASIS,", "so notify the worker. self._send_exception(worker) def process_worker(self, name, command, env=None,", "ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise failure'", "def _stop_by_user(self): \"\"\"Handles stopping of the runtime by a user.\"\"\"", "res < 2, 'Exception raise failure' def _stop_or_kill(self): \"\"\"Stops all", "if pending_secs >= 0: print( termcolor.colored(f'Waiting for workers to stop", "workers to stop for {pending_secs}s.', 'blue'), end='\\r') self._stop_counter += 1", "_disable_alarm(self): if self._alarm_enabled: self._alarm_enabled = False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def", "= collections.defaultdict(lambda: 0) self._first_failure = None self._stop_counter = 0 self._alarm_enabled", "== 1: self._send_exception(worker) elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else: # Notify", "def cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups runtime after a test.\"\"\" with", "else: try: # We can't obtain return code of external", "future.set_result(f()) except BaseException as e: future.set_exception(e) builder = lambda t,", "in workers: if isinstance(worker, ThreadWorker): if self._stop_counter == 1: self._send_exception(worker)", "workers running, so we can kill the proxy itself. try:", "= [] for worker in self._active_workers[label]: active = True if", "frame: self._kill()) self._stop() def _kill_process_tree(self, pid): \"\"\"Kills all child processes", "about termination. kill_main_thread: When set to false try not to", "_disable_signals(self): self._disable_alarm() if self._sigterm_handler is not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler", "runtime is being terminated.\"\"\" self._stop_event.wait() def thread_worker(self, name, function): \"\"\"Registers", "True: try: active_workers = True while active_workers: with self._mutex: self._check_workers()", "= None if register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler =", "logging from absl.testing import absltest from launchpad import flags as", "proxy itself. try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass if self._stop_main_thread: res", "True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass if not worker_found: # No", "frame=None: self._stop_or_kill()) except ValueError: # This happens when we attempt", "True if (return_on_first_completed and len(self._active_workers[label]) < self._workers_count[label]): return time.sleep(0.1) return", "runtime.', 'blue')) kill_self = self._kill_main_thread for workers in self._active_workers.values(): for", "self._sigalrm_handler = None if register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler", "try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass if self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc(", "SIGTERM by stopping the workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop()", "None, raise_error=True, return_on_first_completed=False): \"\"\"Waits for workers to finish. Args: labels_to_wait_for:", "a WorkerManager. Args: stop_main_thread: Should main thread be notified about", "except psutil.NoSuchProcess: pass if self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit))", "ANY KIND, either express or implied. # See the License", "+= 1 worker = ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if self._stop_event.is_set(): #", "import time from typing import Optional, Sequence, Text from absl", "the License. # You may obtain a copy of the", "to stop and schedule delayed termination.\"\"\" if not self._stop_event.is_set(): self._stop_event.set()", "and process-based runtimes.\"\"\" import atexit import collections from concurrent import", "\"\"\" get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates running threads and processes of", "# See the License for the specific language governing permissions", "available in the current thread' return manager def register_signal_handler(sig, handler):", "[ label for label in self._active_workers if self._active_workers[label] ] print(", "by a user.\"\"\" if self._termination_notice_secs != 0: print( termcolor.colored( 'User-requested", "self._disable_signals() self.wait(raise_error=False) with self._mutex: if self._first_failure: raise self._first_failure def _check_workers(self):", "group. function: Entrypoint function to execute in a worker. \"\"\"", "execute in the worker. env: Environment variables to set for", "process, so clean # termination is assumed. res = worker.wait(0)", "register_signal_handler( signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill()) except ValueError: # This", "of the node's program is requested. Can be used to", "first created worker manager the main manager, which handles #", "lambda sig, frame: self._kill()) self._stop() def _kill_process_tree(self, pid): \"\"\"Kills all", "worker manager the main manager, which handles # signals. if", "workers to finish. Args: labels_to_wait_for: If supplied, only wait for", "Other parameters to be passed to `subprocess.Popen`. \"\"\" with self._mutex:", "or not to register signal handlers. \"\"\" self._mutex = threading.Lock()", "**kwargs: Other parameters to be passed to `subprocess.Popen`. \"\"\" with", "< self._workers_count[label]): return time.sleep(0.1) return except SystemExit: self._stop() def cleanup_after_test(self,", "workers to stop.', 'blue')) print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue'))", "if callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill() def wait_for_stop(self): \"\"\"Blocks until managed", "as e: if not self._first_failure and not self._stop_counter: self._first_failure =", "function to execute in a worker. \"\"\" with self._mutex: future", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "builder(run_inner, name) thread.setDaemon(True) thread.start() self._workers_count[name] += 1 worker = ThreadWorker(thread=thread,", "if pending_secs >= 0: signal.alarm(1) def _stop(self): \"\"\"Requests all workers", "self._active_workers[label]: active_workers = True if (return_on_first_completed and len(self._active_workers[label]) < self._workers_count[label]):", "run in the same process. register_in_thread: TODO register_signals: Whether or", "with self._mutex: self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False) with self._mutex: if self._first_failure:", "new thread worker. Args: name: Name of the worker group.", "writing, software # distributed under the License is distributed on", "worker.thread.join() if not self._stop_counter: try: worker.future.result() except BaseException as e:", "and 'envelope_' not in process.name(): try: worker_found = True process.send_signal(signal.SIGTERM)", "label in labels_to_wait_for or self._active_workers.keys(): if self._active_workers[label]: active_workers = True", "active_workers = True if (return_on_first_completed and len(self._active_workers[label]) < self._workers_count[label]): return", "0: print( termcolor.colored(f'Waiting for workers to stop for {pending_secs}s.', 'blue'),", "itself. try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass if self._stop_main_thread: res =", "kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes a WorkerManager. Args: stop_main_thread: Should main", "# We can't obtain return code of external process, so", "self._active_workers[label] ] print( termcolor.colored( f'Worker groups that did not terminate", "workers: if isinstance(worker, ThreadWorker): if self._stop_counter == 1: self._send_exception(worker) elif", "not self._first_failure and not self._stop_counter: self._first_failure = RuntimeError('One of the", "and not self._stop_counter: self._first_failure = RuntimeError('One of the workers failed.')", "# No more workers running, so we can kill the", "isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else: # Notify all workers running under", "these groups' workers to finish. Wait for all workers otherwise.", "still_running = [ label for label in self._active_workers if self._active_workers[label]", "False if res and not self._first_failure and not self._stop_counter: self._first_failure", "Launchpad Program.\"\"\" def __init__( self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes", "pid: Pid of the process to monitor. \"\"\" with self._mutex:", "self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res < 2,", "stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates running threads and", "= manager try: future.set_result(f()) except BaseException as e: future.set_exception(e) builder", "end of the run, for example: start_server() lp.wait_for_stop() stop_server() checkpoint()", "group. command: Command to execute in the worker. env: Environment", "if callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop() def _sigquit(self, sig=None, frame=None): if", "if pending_secs == 0: if self._termination_notice_secs > 0: still_running =", "function): \"\"\"Registers and start a new thread worker. Args: name:", "not _HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop = True _HAS_MAIN_MANAGER =", "to terminate immediately.', 'blue')) signal.signal(signal.SIGINT, lambda sig, frame: self._kill()) self._stop()", "kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self, worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit))", "process.name(): try: worker_found = True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass if", "isinstance(worker, ThreadWorker): if not worker.thread.is_alive(): worker.thread.join() if not self._stop_counter: try:", "worker process. Args: name: Name of the workers' group. pid:", "sig=None, frame=None: self._stop_or_kill()) except ValueError: # This happens when we", "0: signal.alarm(1) def _stop(self): \"\"\"Requests all workers to stop and", "self._sigquit_handler(sig, frame) self._kill() def wait_for_stop(self): \"\"\"Blocks until managed runtime is", "worker = ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if self._stop_event.is_set(): # Runtime is", "passed to `subprocess.Popen`. \"\"\" with self._mutex: process = subprocess.Popen(command, env=env", "except psutil.NoSuchProcess: pass parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills all workers (and", "remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler = None def _sigterm(self, sig=None, frame=None): \"\"\"Handles", "if has_workers and self._first_failure and not self._stop_counter: self._stop() elif not", "for label in self._active_workers if self._active_workers[label] ] print( termcolor.colored( f'Worker", "Whether to return upon the first completed (or failed) worker.", "all workers running under a proxy process. children = worker.children(recursive=True)", "'blue')) kill_self = self._kill_main_thread for workers in self._active_workers.values(): for worker", "self._stop_or_kill() def _disable_alarm(self): if self._alarm_enabled: self._alarm_enabled = False signal.alarm(0) remove_signal_handler(signal.SIGALRM,", "DeepMind Technologies Limited. All rights reserved. # # Licensed under", "0 self._alarm_enabled = False self._kill_main_thread = kill_main_thread self._stop_event = threading.Event()", "otherwise. raise_error: Raise an exception upon any worker failure. return_on_first_completed:", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= False def get_worker_manager(): manager = getattr(_WORKER_MANAGERS, 'manager', None) assert", "= ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if self._stop_event.is_set(): # Runtime is terminating,", "threading.local() _HAS_MAIN_MANAGER = False def get_worker_manager(): manager = getattr(_WORKER_MANAGERS, 'manager',", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "collections from concurrent import futures import ctypes import os import", "signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user()) self._stop_main_thread = stop_main_thread if register_in_thread:", "lambda sig=None, frame=None: self._stop_or_kill()) except ValueError: # This happens when", "= e active = False elif isinstance(worker, subprocess.Popen): try: res", "self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop: register_signal_handler( signal.SIGINT, lambda sig=None,", "for worker in workers: if isinstance(worker, ThreadWorker): # Not possible", "= threading.Lock() self._termination_notice_secs = -1 handle_user_stop = False global _HAS_MAIN_MANAGER", "of the runtime by a user.\"\"\" if self._termination_notice_secs != 0:", "false try not to kill the launcher while killing workers.", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "not available in the current thread' return manager def register_signal_handler(sig,", "to set for the worker. **kwargs: Other parameters to be", "True self._active_workers = collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda: 0) self._first_failure =", "Command to execute in the worker. env: Environment variables to", "rights reserved. # # Licensed under the Apache License, Version", "to register signal handlers. \"\"\" self._mutex = threading.Lock() self._termination_notice_secs =", "Should main thread be notified about termination. kill_main_thread: When set", "worker. Raises: RuntimeError: if any worker raises an exception. \"\"\"", "get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates running threads and processes of a", "groups that did not terminate in time: {still_running}', 'red')) self._kill()", "the worker's group. command: Command to execute in the worker.", "_WORKER_MANAGERS.manager = self def _disable_signals(self): self._disable_alarm() if self._sigterm_handler is not", "process = subprocess.Popen(command, env=env or {}, **kwargs) self._workers_count[name] += 1", "except psutil.NoSuchProcess: pass if not worker_found: # No more workers", "first completed (or failed) worker. Raises: RuntimeError: if any worker", "False for label in self._active_workers: still_active = [] for worker", "run, for example: start_server() lp.wait_for_stop() stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop() class", "False if self._first_failure and raise_error: failure = self._first_failure self._first_failure =", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "signal.signal(sig, handler) def remove_signal_handler(sig, handler): return signal.signal(sig, handler) def wait_for_stop():", "thread.start() self._workers_count[name] += 1 worker = ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if", "raise_error=True, return_on_first_completed=False): \"\"\"Waits for workers to finish. Args: labels_to_wait_for: If", "_stop_or_kill(self): \"\"\"Stops all workers; kills them if they don't stop", "if self._sigquit_handler is not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler = None", "SystemExit: self._stop() def cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups runtime after a", "1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles stopping of the runtime by", "in case of errors.\"\"\" has_workers = False for label in", "signal.signal(signal.SIGINT, lambda sig, frame: self._kill()) self._stop() def _kill_process_tree(self, pid): \"\"\"Kills", "specific language governing permissions and # limitations under the License.", "workers; kills them if they don't stop on time.\"\"\" pending_secs", "Wait for all workers otherwise. raise_error: Raise an exception upon", "ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise failure' def _stop_or_kill(self):", "\"\"\" while True: try: active_workers = True while active_workers: with", "= None raise failure for label in labels_to_wait_for or self._active_workers.keys():", "for example: start_server() lp.wait_for_stop() stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop() class WorkerManager:", "register_signals=True): \"\"\"Initializes a WorkerManager. Args: stop_main_thread: Should main thread be", "Technologies Limited. All rights reserved. # # Licensed under the", "limitations under the License. \"\"\"WorkerManager handles thread and process-based runtimes.\"\"\"", "to redirect to the main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def", "except SystemExit: self._stop() def cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups runtime after", "active: has_workers = True still_active.append(worker) self._active_workers[label] = still_active if has_workers", "No more workers running, so we can kill the proxy", "process.\"\"\" parent = psutil.Process(pid) for process in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL)", "# you may not use this file except in compliance", "print(termcolor.colored('\\nKilling entire runtime.', 'blue')) kill_self = self._kill_main_thread for workers in", "in self._active_workers: still_active = [] for worker in self._active_workers[label]: active", "return upon the first completed (or failed) worker. Raises: RuntimeError:", "labels_to_wait_for: If supplied, only wait for these groups' workers to", "not terminate in time: {still_running}', 'red')) self._kill() return if pending_secs", "res < 2, 'Exception raise failure' if pending_secs >= 0:", "False self._kill_main_thread = kill_main_thread self._stop_event = threading.Event() self._main_thread = threading.current_thread().ident", "to execute in a worker. \"\"\" with self._mutex: future =", "signal.signal(sig, handler) def wait_for_stop(): \"\"\"Blocks until termination of the node's", "**kwargs): \"\"\"Adds process worker to the runtime. Args: name: Name", "= True else: self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid()) def _send_exception(self, worker):", "import termcolor FLAGS = flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future'])", "= None self._sigalrm_handler = None if register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM,", "future.set_exception(e) builder = lambda t, n: threading.Thread(target=t, name=n) thread =", "the runtime. Args: name: Name of the worker's group. command:", "children = worker.children(recursive=True) worker_found = False for process in children:", "True if isinstance(worker, ThreadWorker): if not worker.thread.is_alive(): worker.thread.join() if not", "lp_flags import psutil import termcolor FLAGS = flags.FLAGS ThreadWorker =", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "so clean # termination is assumed. res = worker.wait(0) active", "pid): \"\"\"Kills all child processes of the current process.\"\"\" parent", "to return upon the first completed (or failed) worker. Raises:", "test_case: absltest.TestCase): \"\"\"Cleanups runtime after a test.\"\"\" with self._mutex: self._check_workers()", "a Launchpad Program.\"\"\" def __init__( self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True):", "possible to kill a thread without killing the process. kill_self", "if active: has_workers = True still_active.append(worker) self._active_workers[label] = still_active if", "register_signal_handler(sig, handler): \"\"\"Registers a signal handler.\"\"\" return signal.signal(sig, handler) def", "get_worker_manager(): manager = getattr(_WORKER_MANAGERS, 'manager', None) assert manager, 'Worker manager", "under the Apache License, Version 2.0 (the \"License\"); # you", "(or failed) worker. Raises: RuntimeError: if any worker raises an", "print( termcolor.colored( f'Worker groups that did not terminate in time:", "2020 DeepMind Technologies Limited. All rights reserved. # # Licensed", "workers run in the same process. register_in_thread: TODO register_signals: Whether", "not in process.name(): try: worker_found = True process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess:", "pass if not worker_found: # No more workers running, so", "clean # termination is assumed. res = worker.wait(0) active =", "None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler = None if self._sigquit_handler is not", "cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups runtime after a test.\"\"\" with self._mutex:", "stop on time.\"\"\" pending_secs = self._termination_notice_secs - self._stop_counter if pending_secs", "\"\"\"Registers and start a new thread worker. Args: name: Name", "isinstance(worker, subprocess.Popen): try: res = worker.wait(0) active = False if", "not to kill the launcher while killing workers. This is", "upon any worker failure. return_on_first_completed: Whether to return upon the", "of the worker's group. command: Command to execute in the", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests stopping all workers and wait for", "the runtime by a user.\"\"\" if self._termination_notice_secs != 0: print(", "ctypes import os import signal import subprocess import threading import", "in workers: if isinstance(worker, ThreadWorker): # Not possible to kill", "**kwargs) self._workers_count[name] += 1 self._active_workers[name].append(process) def register_existing_process(self, name: str, pid:", "_WORKER_MANAGERS.manager = manager try: future.set_result(f()) except BaseException as e: future.set_exception(e)", "to perform cleanup at the end of the run, for", "to kill the launcher while killing workers. This is not", "requested. Can be used to perform cleanup at the end", "handles # signals. if not _HAS_MAIN_MANAGER: self._termination_notice_secs = FLAGS.lp_termination_notice_secs handle_user_stop", "workers, terminate runtime in case of errors.\"\"\" has_workers = False", "launchpad import flags as lp_flags import psutil import termcolor FLAGS", "handlers. \"\"\" self._mutex = threading.Lock() self._termination_notice_secs = -1 handle_user_stop =", "worker_found = False for process in children: if process.name() !=", "all workers otherwise. raise_error: Raise an exception upon any worker", "If supplied, only wait for these groups' workers to finish.", "as e: future.set_exception(e) builder = lambda t, n: threading.Thread(target=t, name=n)", "if self._first_failure and raise_error: failure = self._first_failure self._first_failure = None", "the first completed (or failed) worker. Raises: RuntimeError: if any", "if self._stop_counter == 1: self._send_exception(worker) elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else:", "Args: name: Name of the workers' group. pid: Pid of", "if register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit)", "join(self): self.wait() def wait(self, labels_to_wait_for: Optional[Sequence[Text]] = None, raise_error=True, return_on_first_completed=False):", "n: threading.Thread(target=t, name=n) thread = builder(run_inner, name) thread.setDaemon(True) thread.start() self._workers_count[name]", "self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles stopping of the", "# Copyright 2020 DeepMind Technologies Limited. All rights reserved. #", "signal handler.\"\"\" return signal.signal(sig, handler) def remove_signal_handler(sig, handler): return signal.signal(sig,", "import absltest from launchpad import flags as lp_flags import psutil", "all child processes of the current process.\"\"\" parent = psutil.Process(pid)", "def _sigterm(self, sig=None, frame=None): \"\"\"Handles SIGTERM by stopping the workers.\"\"\"", "threading.Thread(target=t, name=n) thread = builder(run_inner, name) thread.setDaemon(True) thread.start() self._workers_count[name] +=", "managed runtime is being terminated.\"\"\" self._stop_event.wait() def thread_worker(self, name, function):", "to kill a thread without killing the process. kill_self =", "all workers to stop and schedule delayed termination.\"\"\" if not", "to `subprocess.Popen`. \"\"\" with self._mutex: process = subprocess.Popen(command, env=env or", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "worker to the runtime. Args: name: Name of the worker's", "signal.alarm(1) def _stop(self): \"\"\"Requests all workers to stop and schedule", "self._first_failure and raise_error: failure = self._first_failure self._first_failure = None raise", "of a Launchpad Program.\"\"\" def __init__( self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False,", "When set to false try not to kill the launcher", "Entrypoint function to execute in a worker. \"\"\" with self._mutex:", "futures import ctypes import os import signal import subprocess import", "pid: int): \"\"\"Registers already started worker process. Args: name: Name", "self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop: register_signal_handler( signal.SIGINT, lambda", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "= [ label for label in self._active_workers if self._active_workers[label] ]", "failed) worker. Raises: RuntimeError: if any worker raises an exception.", "worker_found: # No more workers running, so we can kill", "is terminating, so notify the worker. self._send_exception(worker) def process_worker(self, name,", "_stop(self): \"\"\"Requests all workers to stop and schedule delayed termination.\"\"\"", "workers failed.') except subprocess.TimeoutExpired: pass else: try: # We can't", "of external process, so clean # termination is assumed. res", "self._sigterm_handler is not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler = None if", "from typing import Optional, Sequence, Text from absl import flags", "a user.\"\"\" if self._termination_notice_secs != 0: print( termcolor.colored( 'User-requested termination.", "pending_secs = self._termination_notice_secs - self._stop_counter if pending_secs == 0: if", "import logging from absl.testing import absltest from launchpad import flags", "self._stop_event.set() try: if self._termination_notice_secs > 0: self._alarm_enabled = True self._sigalrm_handler", "if needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.', 'blue')) kill_self = self._kill_main_thread for", "needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.', 'blue')) kill_self = self._kill_main_thread for workers", "frame=None: self._stop_by_user()) self._stop_main_thread = stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager = self", "a signal handler.\"\"\" return signal.signal(sig, handler) def remove_signal_handler(sig, handler): return", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "int): \"\"\"Registers already started worker process. Args: name: Name of", "current process.\"\"\" parent = psutil.Process(pid) for process in parent.children(recursive=True): try:", "= worker.wait(0) active = False if res and not self._first_failure", "self.wait(raise_error=False) def join(self): self.wait() def wait(self, labels_to_wait_for: Optional[Sequence[Text]] = None,", "worker's group. command: Command to execute in the worker. env:", "has_workers = False for label in self._active_workers: still_active = []", "failure. return_on_first_completed: Whether to return upon the first completed (or", "with self._mutex: if self._first_failure: raise self._first_failure def _check_workers(self): \"\"\"Checks status", "remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self): \"\"\"Requests stopping all workers and wait", "termcolor FLAGS = flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS", "of the process to monitor. \"\"\" with self._mutex: self._workers_count[name] +=", "still_active = [] for worker in self._active_workers[label]: active = True", "self._send_exception(worker) elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else: # Notify all workers", "def _stop_or_kill(self): \"\"\"Stops all workers; kills them if they don't", "print( termcolor.colored(f'Waiting for workers to stop for {pending_secs}s.', 'blue'), end='\\r')", "= 0 self._alarm_enabled = False self._kill_main_thread = kill_main_thread self._stop_event =", "0: still_running = [ label for label in self._active_workers if", "of the workers' group. pid: Pid of the process to", "external process, so clean # termination is assumed. res =", "frame=None): \"\"\"Handles SIGTERM by stopping the workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig,", "possible when thread workers run in the same process. register_in_thread:", "= flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS = threading.local()", "governing permissions and # limitations under the License. \"\"\"WorkerManager handles", "without killing the process. kill_self = True else: self._kill_process_tree(worker.pid) if", "processes of the current process.\"\"\" parent = psutil.Process(pid) for process", "stop and schedule delayed termination.\"\"\" if not self._stop_event.is_set(): self._stop_event.set() try:", "is not None: remove_signal_handler(signal.SIGTERM, self._sigterm_handler) self._sigterm_handler = None if self._sigquit_handler", "for worker in workers: if isinstance(worker, ThreadWorker): if self._stop_counter ==", "from absl import flags from absl import logging from absl.testing", "register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop: register_signal_handler( signal.SIGINT,", "self._kill() def wait_for_stop(self): \"\"\"Blocks until managed runtime is being terminated.\"\"\"", "use this file except in compliance with the License. #", "env: Environment variables to set for the worker. **kwargs: Other", "= False except psutil.TimeoutExpired: pass if active: has_workers = True", "process.name() != 'bash' and 'envelope_' not in process.name(): try: worker_found", "reserved. # # Licensed under the Apache License, Version 2.0", "a new thread worker. Args: name: Name of the worker", "t, n: threading.Thread(target=t, name=n) thread = builder(run_inner, name) thread.setDaemon(True) thread.start()", "= False if res and not self._first_failure and not self._stop_counter:", "is being terminated.\"\"\" self._stop_event.wait() def thread_worker(self, name, function): \"\"\"Registers and", "thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.', 'blue')) kill_self = self._kill_main_thread", "0: if self._termination_notice_secs > 0: still_running = [ label for", "global _HAS_MAIN_MANAGER # Make the first created worker manager the", "threading.Lock() self._termination_notice_secs = -1 handle_user_stop = False global _HAS_MAIN_MANAGER #", "parent = psutil.Process(pid) for process in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except", "def _check_workers(self): \"\"\"Checks status of running workers, terminate runtime in", "label in self._active_workers: still_active = [] for worker in self._active_workers[label]:", "kill_self = self._kill_main_thread for workers in self._active_workers.values(): for worker in", "try: worker.future.result() except BaseException as e: if not self._first_failure and", "is not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler = None def _sigterm(self,", "after a test.\"\"\" with self._mutex: self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False) with", "example: start_server() lp.wait_for_stop() stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates", "self._alarm_enabled = False self._kill_main_thread = kill_main_thread self._stop_event = threading.Event() self._main_thread", "import collections from concurrent import futures import ctypes import os", "not possible when thread workers run in the same process.", "self._mutex: if self._first_failure: raise self._first_failure def _check_workers(self): \"\"\"Checks status of", "pass if self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert res", "try: # We can't obtain return code of external process,", "Args: name: Name of the worker's group. command: Command to", "self._sigquit_handler) self._sigquit_handler = None def _sigterm(self, sig=None, frame=None): \"\"\"Handles SIGTERM", "in compliance with the License. # You may obtain a", "register_existing_process(self, name: str, pid: int): \"\"\"Registers already started worker process.", "software # distributed under the License is distributed on an", "BaseException as e: future.set_exception(e) builder = lambda t, n: threading.Thread(target=t,", "command: Command to execute in the worker. env: Environment variables", "try: active_workers = True while active_workers: with self._mutex: self._check_workers() active_workers", "True self._sigalrm_handler = register_signal_handler( signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill()) except", "raise failure' def _stop_or_kill(self): \"\"\"Stops all workers; kills them if", "wait(self, labels_to_wait_for: Optional[Sequence[Text]] = None, raise_error=True, return_on_first_completed=False): \"\"\"Waits for workers", "_HAS_MAIN_MANAGER = True self._active_workers = collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda: 0)", "an exception upon any worker failure. return_on_first_completed: Whether to return", "is not possible when thread workers run in the same", "stopping all workers and wait for termination.\"\"\" with self._mutex: self._stop()", "and self._first_failure and not self._stop_counter: self._stop() elif not has_workers: self._disable_alarm()", "if not worker.thread.is_alive(): worker.thread.join() if not self._stop_counter: try: worker.future.result() except", "the first created worker manager the main manager, which handles", "= True still_active.append(worker) self._active_workers[label] = still_active if has_workers and self._first_failure", "self._first_failure self._first_failure = None raise failure for label in labels_to_wait_for", "to be passed to `subprocess.Popen`. \"\"\" with self._mutex: process =", "process.send_signal(signal.SIGTERM) except psutil.NoSuchProcess: pass if not worker_found: # No more", "raise_error: failure = self._first_failure self._first_failure = None raise failure for", "None) assert manager, 'Worker manager is not available in the", "killing the process. kill_self = True else: self._kill_process_tree(worker.pid) if kill_self:", "if self._active_workers[label]: active_workers = True if (return_on_first_completed and len(self._active_workers[label]) <", "with self._mutex: future = futures.Future() def run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager", "self._stop_event.wait() def thread_worker(self, name, function): \"\"\"Registers and start a new", "process_worker(self, name, command, env=None, **kwargs): \"\"\"Adds process worker to the", "self._kill_main_thread = kill_main_thread self._stop_event = threading.Event() self._main_thread = threading.current_thread().ident self._sigterm_handler", "{pending_secs}s.', 'blue'), end='\\r') self._stop_counter += 1 for workers in self._active_workers.values():", "worker in self._active_workers[label]: active = True if isinstance(worker, ThreadWorker): if", "with the License. # You may obtain a copy of", "def wait(self, labels_to_wait_for: Optional[Sequence[Text]] = None, raise_error=True, return_on_first_completed=False): \"\"\"Waits for", "being terminated.\"\"\" self._stop_event.wait() def thread_worker(self, name, function): \"\"\"Registers and start", "thread and process-based runtimes.\"\"\" import atexit import collections from concurrent", "pass parent.send_signal(signal.SIGKILL) def _kill(self): \"\"\"Kills all workers (and main thread/process", "express or implied. # See the License for the specific", "handler.\"\"\" return signal.signal(sig, handler) def remove_signal_handler(sig, handler): return signal.signal(sig, handler)", "they don't stop on time.\"\"\" pending_secs = self._termination_notice_secs - self._stop_counter", "except in compliance with the License. # You may obtain", "or self._active_workers.keys(): if self._active_workers[label]: active_workers = True if (return_on_first_completed and", "to false try not to kill the launcher while killing", "notify the worker. self._send_exception(worker) def process_worker(self, name, command, env=None, **kwargs):", "We can't obtain return code of external process, so clean", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "return_on_first_completed=False): \"\"\"Waits for workers to finish. Args: labels_to_wait_for: If supplied,", "the launcher while killing workers. This is not possible when", "obtain return code of external process, so clean # termination", "command, env=None, **kwargs): \"\"\"Adds process worker to the runtime. Args:", "if self._alarm_enabled: self._alarm_enabled = False signal.alarm(0) remove_signal_handler(signal.SIGALRM, self._sigalrm_handler) def stop_and_wait(self):", "runtime in case of errors.\"\"\" has_workers = False for label", "assert res < 2, 'Exception raise failure' if pending_secs >=", "CONDITIONS OF ANY KIND, either express or implied. # See", "import os import signal import subprocess import threading import time", "self._sigquit_handler is not None: remove_signal_handler(signal.SIGQUIT, self._sigquit_handler) self._sigquit_handler = None def", "self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self): \"\"\"Handles stopping of the runtime by a", "workers in self._active_workers.values(): for worker in workers: if isinstance(worker, ThreadWorker):", "self._active_workers[name].append(worker) if self._stop_event.is_set(): # Runtime is terminating, so notify the", "ThreadWorker): if self._stop_counter == 1: self._send_exception(worker) elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM)", "pending_secs >= 0: signal.alarm(1) def _stop(self): \"\"\"Requests all workers to", "register_signals: self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit) if", "code of external process, so clean # termination is assumed.", "kill_main_thread self._stop_event = threading.Event() self._main_thread = threading.current_thread().ident self._sigterm_handler = None", "termination. Asking workers to stop.', 'blue')) print(termcolor.colored('Press CTRL+C to terminate", "f'Worker groups that did not terminate in time: {still_running}', 'red'))", "absl import logging from absl.testing import absltest from launchpad import", "time from typing import Optional, Sequence, Text from absl import", "def __init__( self, stop_main_thread=False, kill_main_thread=True, register_in_thread=False, register_signals=True): \"\"\"Initializes a WorkerManager.", "\"\"\"Initializes a WorkerManager. Args: stop_main_thread: Should main thread be notified", "= None def _sigterm(self, sig=None, frame=None): \"\"\"Handles SIGTERM by stopping", "self._stop() def cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups runtime after a test.\"\"\"", "register_in_thread: TODO register_signals: Whether or not to register signal handlers.", "self._mutex: future = futures.Future() def run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager =", "group. pid: Pid of the process to monitor. \"\"\" with", "= worker.children(recursive=True) worker_found = False for process in children: if", "self._kill()) self._stop() def _kill_process_tree(self, pid): \"\"\"Kills all child processes of", "_kill_process_tree(self, pid): \"\"\"Kills all child processes of the current process.\"\"\"", "if self._active_workers[label] ] print( termcolor.colored( f'Worker groups that did not", "schedule delayed termination.\"\"\" if not self._stop_event.is_set(): self._stop_event.set() try: if self._termination_notice_secs", "import psutil import termcolor FLAGS = flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker',", "= threading.Event() self._main_thread = threading.current_thread().ident self._sigterm_handler = None self._sigquit_handler =", "env=env or {}, **kwargs) self._workers_count[name] += 1 self._active_workers[name].append(process) def register_existing_process(self,", "in self._active_workers.values(): for worker in workers: if isinstance(worker, ThreadWorker): if", "ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res < 2, 'Exception raise failure' def", "elif isinstance(worker, subprocess.Popen): worker.send_signal(signal.SIGTERM) else: # Notify all workers running", "self._active_workers if self._active_workers[label] ] print( termcolor.colored( f'Worker groups that did", "= kill_main_thread self._stop_event = threading.Event() self._main_thread = threading.current_thread().ident self._sigterm_handler =", "process in children: if process.name() != 'bash' and 'envelope_' not", "self._check_workers() active_workers = False if self._first_failure and raise_error: failure =", "\"\"\"Registers a signal handler.\"\"\" return signal.signal(sig, handler) def remove_signal_handler(sig, handler):", "the workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop() def _sigquit(self, sig=None,", "self._stop_counter: self._first_failure = RuntimeError('One of the workers failed.') except subprocess.TimeoutExpired:", "None def _sigterm(self, sig=None, frame=None): \"\"\"Handles SIGTERM by stopping the", "termination. kill_main_thread: When set to false try not to kill", "raise self._first_failure def _check_workers(self): \"\"\"Checks status of running workers, terminate", "kill_main_thread: When set to false try not to kill the", "runtime by a user.\"\"\" if self._termination_notice_secs != 0: print( termcolor.colored(", "Raises: RuntimeError: if any worker raises an exception. \"\"\" while", "FLAGS = flags.FLAGS ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS =", "variables to set for the worker. **kwargs: Other parameters to", "RuntimeError('One of the workers failed.') except subprocess.TimeoutExpired: pass else: try:", "This is not possible when thread workers run in the", "terminated.\"\"\" self._stop_event.wait() def thread_worker(self, name, function): \"\"\"Registers and start a", "def run_inner(f=function, future=future, manager=self): _WORKER_MANAGERS.manager = manager try: future.set_result(f()) except", "termcolor.colored( f'Worker groups that did not terminate in time: {still_running}',", "time.\"\"\" pending_secs = self._termination_notice_secs - self._stop_counter if pending_secs == 0:", "0: self._alarm_enabled = True self._sigalrm_handler = register_signal_handler( signal.SIGALRM, lambda sig=None,", "not self._stop_counter: self._first_failure = e active = False elif isinstance(worker,", "_send_exception(self, worker): res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(worker.thread.ident), ctypes.py_object(SystemExit)) assert res <", "= register_signal_handler( signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill()) except ValueError: #", "return self._stop_or_kill() def _disable_alarm(self): if self._alarm_enabled: self._alarm_enabled = False signal.alarm(0)", "= still_active if has_workers and self._first_failure and not self._stop_counter: self._stop()", "concurrent import futures import ctypes import os import signal import", "same process. register_in_thread: TODO register_signals: Whether or not to register", "the run, for example: start_server() lp.wait_for_stop() stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop()", "if not self._stop_event.is_set(): self._stop_event.set() try: if self._termination_notice_secs > 0: self._alarm_enabled", "self._active_workers.values(): for worker in workers: if isinstance(worker, ThreadWorker): if self._stop_counter", "the workers' group. pid: Pid of the process to monitor.", "process. kill_self = True else: self._kill_process_tree(worker.pid) if kill_self: self._kill_process_tree(os.getpid()) def", "\"\"\" self._mutex = threading.Lock() self._termination_notice_secs = -1 handle_user_stop = False", "not self._stop_counter: try: worker.future.result() except BaseException as e: if not", ">= 0: print( termcolor.colored(f'Waiting for workers to stop for {pending_secs}s.',", "else: # Notify all workers running under a proxy process.", "pending_secs == 0: if self._termination_notice_secs > 0: still_running = [", "def wait_for_stop(self): \"\"\"Blocks until managed runtime is being terminated.\"\"\" self._stop_event.wait()", "name: Name of the worker group. function: Entrypoint function to", "self._stop_counter if pending_secs == 0: if self._termination_notice_secs > 0: still_running", "\"\"\"Requests all workers to stop and schedule delayed termination.\"\"\" if", "builder = lambda t, n: threading.Thread(target=t, name=n) thread = builder(run_inner,", "be used to perform cleanup at the end of the", "= psutil.Process(pid) for process in parent.children(recursive=True): try: process.send_signal(signal.SIGKILL) except psutil.NoSuchProcess:", "the worker. self._send_exception(worker) def process_worker(self, name, command, env=None, **kwargs): \"\"\"Adds", "\"\"\"Checks status of running workers, terminate runtime in case of", "has_workers and self._first_failure and not self._stop_counter: self._stop() elif not has_workers:", "to monitor. \"\"\" with self._mutex: self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid)) def", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "\"\"\"Blocks until managed runtime is being terminated.\"\"\" self._stop_event.wait() def thread_worker(self,", "sig, frame: self._kill()) self._stop() def _kill_process_tree(self, pid): \"\"\"Kills all child", "if self._stop_event.is_set(): # Runtime is terminating, so notify the worker.", "self._workers_count[name] += 1 worker = ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if self._stop_event.is_set():", "with self._mutex: self._check_workers() active_workers = False if self._first_failure and raise_error:", "psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def _disable_alarm(self): if self._alarm_enabled: self._alarm_enabled = False", "except psutil.TimeoutExpired: pass if active: has_workers = True still_active.append(worker) self._active_workers[label]", "all workers (and main thread/process if needed).\"\"\" print(termcolor.colored('\\nKilling entire runtime.',", "raises an exception. \"\"\" while True: try: active_workers = True", "Name of the worker group. function: Entrypoint function to execute", "return except SystemExit: self._stop() def cleanup_after_test(self, test_case: absltest.TestCase): \"\"\"Cleanups runtime", "start_server() lp.wait_for_stop() stop_server() checkpoint() \"\"\" get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates running", "of the worker group. function: Entrypoint function to execute in", "False elif isinstance(worker, subprocess.Popen): try: res = worker.wait(0) active =", "not self._stop_event.is_set(): self._stop_event.set() try: if self._termination_notice_secs > 0: self._alarm_enabled =", "worker.future.result() except BaseException as e: if not self._first_failure and not", "Version 2.0 (the \"License\"); # you may not use this", "worker.wait(0) active = False if res and not self._first_failure and", "self def _disable_signals(self): self._disable_alarm() if self._sigterm_handler is not None: remove_signal_handler(signal.SIGTERM,", "a signal handler but not in the # main thread.", "self._active_workers[label]: active = True if isinstance(worker, ThreadWorker): if not worker.thread.is_alive():", "handler) def remove_signal_handler(sig, handler): return signal.signal(sig, handler) def wait_for_stop(): \"\"\"Blocks", "= False for process in children: if process.name() != 'bash'", "active = False if res and not self._first_failure and not", "self._kill() return if pending_secs >= 0: print( termcolor.colored(f'Waiting for workers", "psutil.NoSuchProcess: pass if not worker_found: # No more workers running,", "> 0: still_running = [ label for label in self._active_workers", "under a proxy process. children = worker.children(recursive=True) worker_found = False", "self._stop_by_user()) self._stop_main_thread = stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager = self def", "e active = False elif isinstance(worker, subprocess.Popen): try: res =", "labels_to_wait_for: Optional[Sequence[Text]] = None, raise_error=True, return_on_first_completed=False): \"\"\"Waits for workers to", "self._active_workers.keys(): if self._active_workers[label]: active_workers = True if (return_on_first_completed and len(self._active_workers[label])", "by applicable law or agreed to in writing, software #", "main manager, which handles # signals. if not _HAS_MAIN_MANAGER: self._termination_notice_secs", "'Worker manager is not available in the current thread' return", "if not worker_found: # No more workers running, so we", "flags as lp_flags import psutil import termcolor FLAGS = flags.FLAGS", "monitor. \"\"\" with self._mutex: self._workers_count[name] += 1 self._active_workers[name].append(psutil.Process(pid)) def _stop_by_user(self):", "failure for label in labels_to_wait_for or self._active_workers.keys(): if self._active_workers[label]: active_workers", "not to register signal handlers. \"\"\" self._mutex = threading.Lock() self._termination_notice_secs", "while active_workers: with self._mutex: self._check_workers() active_workers = False if self._first_failure", "'blue')) signal.signal(signal.SIGINT, lambda sig, frame: self._kill()) self._stop() def _kill_process_tree(self, pid):", "of errors.\"\"\" has_workers = False for label in self._active_workers: still_active", "to the runtime. Args: name: Name of the worker's group.", "self._active_workers = collections.defaultdict(list) self._workers_count = collections.defaultdict(lambda: 0) self._first_failure = None", "thread workers run in the same process. register_in_thread: TODO register_signals:", "signal handler but not in the # main thread. Send", "termination.\"\"\" with self._mutex: self._stop() self.wait(raise_error=False) def join(self): self.wait() def wait(self,", "absl.testing import absltest from launchpad import flags as lp_flags import", "= builder(run_inner, name) thread.setDaemon(True) thread.start() self._workers_count[name] += 1 worker =", "kill the proxy itself. try: worker.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass if", "applicable law or agreed to in writing, software # distributed", "= getattr(_WORKER_MANAGERS, 'manager', None) assert manager, 'Worker manager is not", "workers and wait for termination.\"\"\" with self._mutex: self._stop() self.wait(raise_error=False) def", "name, function): \"\"\"Registers and start a new thread worker. Args:", "killing workers. This is not possible when thread workers run", "entire runtime.', 'blue')) kill_self = self._kill_main_thread for workers in self._active_workers.values():", "stop_and_wait(self): \"\"\"Requests stopping all workers and wait for termination.\"\"\" with", "termination is assumed. res = worker.wait(0) active = False except", "'manager', None) assert manager, 'Worker manager is not available in", "in a worker. \"\"\" with self._mutex: future = futures.Future() def", "a thread without killing the process. kill_self = True else:", "absltest.TestCase): \"\"\"Cleanups runtime after a test.\"\"\" with self._mutex: self._check_workers() self._stop()", "workers.\"\"\" if callable(self._sigterm_handler): self._sigterm_handler(sig, frame) self._stop() def _sigquit(self, sig=None, frame=None):", "self._sigquit_handler = None self._sigalrm_handler = None if register_signals: self._sigterm_handler =", "def _kill(self): \"\"\"Kills all workers (and main thread/process if needed).\"\"\"", "def register_signal_handler(sig, handler): \"\"\"Registers a signal handler.\"\"\" return signal.signal(sig, handler)", "TODO register_signals: Whether or not to register signal handlers. \"\"\"", "and not self._first_failure and not self._stop_counter: self._first_failure = RuntimeError('One of", "= self._first_failure self._first_failure = None raise failure for label in", "try not to kill the launcher while killing workers. This", "for the worker. **kwargs: Other parameters to be passed to", "in the same process. register_in_thread: TODO register_signals: Whether or not", "# You may obtain a copy of the License at", "import subprocess import threading import time from typing import Optional,", "'blue'), end='\\r') self._stop_counter += 1 for workers in self._active_workers.values(): for", "def _sigquit(self, sig=None, frame=None): if callable(self._sigquit_handler): self._sigquit_handler(sig, frame) self._kill() def", "self._stop_counter: try: worker.future.result() except BaseException as e: if not self._first_failure", "is assumed. res = worker.wait(0) active = False except psutil.TimeoutExpired:", "\"\"\"WorkerManager handles thread and process-based runtimes.\"\"\" import atexit import collections", "in labels_to_wait_for or self._active_workers.keys(): if self._active_workers[label]: active_workers = True if", "= threading.local() _HAS_MAIN_MANAGER = False def get_worker_manager(): manager = getattr(_WORKER_MANAGERS,", ">= 0: signal.alarm(1) def _stop(self): \"\"\"Requests all workers to stop", "= True _HAS_MAIN_MANAGER = True self._active_workers = collections.defaultdict(list) self._workers_count =", "'blue')) print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue')) signal.signal(signal.SIGINT, lambda sig,", "\"\"\"Waits for workers to finish. Args: labels_to_wait_for: If supplied, only", "status of running workers, terminate runtime in case of errors.\"\"\"", "self._first_failure and not self._stop_counter: self._first_failure = RuntimeError('One of the workers", "ThreadWorker(thread=thread, future=future) self._active_workers[name].append(worker) if self._stop_event.is_set(): # Runtime is terminating, so", "!= 'bash' and 'envelope_' not in process.name(): try: worker_found =", "self._alarm_enabled = True self._sigalrm_handler = register_signal_handler( signal.SIGALRM, lambda sig=None, frame=None:", "True while active_workers: with self._mutex: self._check_workers() active_workers = False if", "!= 0: print( termcolor.colored( 'User-requested termination. Asking workers to stop.',", "print( termcolor.colored( 'User-requested termination. Asking workers to stop.', 'blue')) print(termcolor.colored('Press", "and # limitations under the License. \"\"\"WorkerManager handles thread and", "checkpoint() \"\"\" get_worker_manager().wait_for_stop() class WorkerManager: \"\"\"Encapsulates running threads and processes", "kill the launcher while killing workers. This is not possible", "register_in_thread: _WORKER_MANAGERS.manager = self def _disable_signals(self): self._disable_alarm() if self._sigterm_handler is", "process. children = worker.children(recursive=True) worker_found = False for process in", "running workers, terminate runtime in case of errors.\"\"\" has_workers =", "workers. This is not possible when thread workers run in", "self._sigterm_handler = register_signal_handler(signal.SIGTERM, self._sigterm) self._sigquit_handler = register_signal_handler(signal.SIGQUIT, self._sigquit) if handle_user_stop:", "self._workers_count = collections.defaultdict(lambda: 0) self._first_failure = None self._stop_counter = 0", "self._workers_count[label]): return time.sleep(0.1) return except SystemExit: self._stop() def cleanup_after_test(self, test_case:", "register signal handlers. \"\"\" self._mutex = threading.Lock() self._termination_notice_secs = -1", "runtime after a test.\"\"\" with self._mutex: self._check_workers() self._stop() self._disable_signals() self.wait(raise_error=False)", "subprocess.Popen(command, env=env or {}, **kwargs) self._workers_count[name] += 1 self._active_workers[name].append(process) def", "terminate in time: {still_running}', 'red')) self._kill() return if pending_secs >=", "notified about termination. kill_main_thread: When set to false try not", "manager=self): _WORKER_MANAGERS.manager = manager try: future.set_result(f()) except BaseException as e:", "Text from absl import flags from absl import logging from", "\"License\"); # you may not use this file except in", "BaseException as e: if not self._first_failure and not self._stop_counter: self._first_failure", "manager, which handles # signals. if not _HAS_MAIN_MANAGER: self._termination_notice_secs =", "failure = self._first_failure self._first_failure = None raise failure for label", "WorkerManager: \"\"\"Encapsulates running threads and processes of a Launchpad Program.\"\"\"", "collections.namedtuple('ThreadWorker', ['thread', 'future']) _WORKER_MANAGERS = threading.local() _HAS_MAIN_MANAGER = False def", "self._first_failure = e active = False elif isinstance(worker, subprocess.Popen): try:", "failed.') except subprocess.TimeoutExpired: pass else: try: # We can't obtain", "termination of the node's program is requested. Can be used", "except BaseException as e: future.set_exception(e) builder = lambda t, n:", "psutil.NoSuchProcess: pass if self._stop_main_thread: res = ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(threading.main_thread().ident), ctypes.py_object(SystemExit)) assert", "lambda sig=None, frame=None: self._stop_by_user()) self._stop_main_thread = stop_main_thread if register_in_thread: _WORKER_MANAGERS.manager", "Sequence, Text from absl import flags from absl import logging", "termcolor.colored(f'Waiting for workers to stop for {pending_secs}s.', 'blue'), end='\\r') self._stop_counter", "threading import time from typing import Optional, Sequence, Text from", "wait_for_stop(self): \"\"\"Blocks until managed runtime is being terminated.\"\"\" self._stop_event.wait() def", "if they don't stop on time.\"\"\" pending_secs = self._termination_notice_secs -", "self._stop_counter += 1 for workers in self._active_workers.values(): for worker in", "main thread. psutil.Process(os.getpid()).send_signal(signal.SIGTERM) return self._stop_or_kill() def _disable_alarm(self): if self._alarm_enabled: self._alarm_enabled", "delayed termination.\"\"\" if not self._stop_event.is_set(): self._stop_event.set() try: if self._termination_notice_secs >", "= worker.wait(0) active = False except psutil.TimeoutExpired: pass if active:", "children: if process.name() != 'bash' and 'envelope_' not in process.name():", "runtimes.\"\"\" import atexit import collections from concurrent import futures import" ]
[ "plugins from {lib_path}') success = True else: logging.warning(f'Could not load", "bool: \"\"\"Load TensorRT plugins library. Returns: bool: True if TensorRT", "ctypes import glob import logging import os def get_ops_path() ->", "0 else '' return lib_path def load_tensorrt_plugin() -> bool: \"\"\"Load", "plugin library. Returns: str: A path of the TensorRT plugin", "str: \"\"\"Get path of the TensorRT plugin library. Returns: str:", "wildcard = os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard) lib_path", "def load_tensorrt_plugin() -> bool: \"\"\"Load TensorRT plugins library. Returns: bool:", "-> bool: \"\"\"Load TensorRT plugins library. Returns: bool: True if", "plugins. \\ Because the file does not exist: {lib_path}') return", "\"\"\"Load TensorRT plugins library. Returns: bool: True if TensorRT plugin", "of the TensorRT plugin library. Returns: str: A path of", "glob import logging import os def get_ops_path() -> str: \"\"\"Get", "logging import os def get_ops_path() -> str: \"\"\"Get path of", "os def get_ops_path() -> str: \"\"\"Get path of the TensorRT", "os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard) lib_path = paths[0]", "glob.glob(wildcard) lib_path = paths[0] if len(paths) > 0 else ''", "len(paths) > 0 else '' return lib_path def load_tensorrt_plugin() ->", "\"\"\"Get path of the TensorRT plugin library. Returns: str: A", "os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt plugins from {lib_path}') success =", "not load the library of tensorrt plugins. \\ Because the", "plugin library. \"\"\" wildcard = os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths", "load_tensorrt_plugin() -> bool: \"\"\"Load TensorRT plugins library. Returns: bool: True", "logging.warning(f'Could not load the library of tensorrt plugins. \\ Because", "OpenMMLab. All rights reserved. import ctypes import glob import logging", "lib_path def load_tensorrt_plugin() -> bool: \"\"\"Load TensorRT plugins library. Returns:", "from {lib_path}') success = True else: logging.warning(f'Could not load the", "import ctypes import glob import logging import os def get_ops_path()", "plugin library is successfully loaded. \"\"\" lib_path = get_ops_path() success", "= paths[0] if len(paths) > 0 else '' return lib_path", "True if TensorRT plugin library is successfully loaded. \"\"\" lib_path", "success = False if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt plugins", "(c) OpenMMLab. All rights reserved. import ctypes import glob import", "lib_path = get_ops_path() success = False if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully", "library is successfully loaded. \"\"\" lib_path = get_ops_path() success =", "path of the TensorRT plugin library. \"\"\" wildcard = os.path.abspath(", "All rights reserved. import ctypes import glob import logging import", "library. \"\"\" wildcard = os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths =", "paths = glob.glob(wildcard) lib_path = paths[0] if len(paths) > 0", "def get_ops_path() -> str: \"\"\"Get path of the TensorRT plugin", "# Copyright (c) OpenMMLab. All rights reserved. import ctypes import", "Returns: bool: True if TensorRT plugin library is successfully loaded.", "tensorrt plugins. \\ Because the file does not exist: {lib_path}')", "reserved. import ctypes import glob import logging import os def", "the library of tensorrt plugins. \\ Because the file does", "of tensorrt plugins. \\ Because the file does not exist:", "= False if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt plugins from", "TensorRT plugins library. Returns: bool: True if TensorRT plugin library", "Returns: str: A path of the TensorRT plugin library. \"\"\"", "successfully loaded. \"\"\" lib_path = get_ops_path() success = False if", "plugins library. Returns: bool: True if TensorRT plugin library is", "lib_path = paths[0] if len(paths) > 0 else '' return", "get_ops_path() success = False if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt", "import logging import os def get_ops_path() -> str: \"\"\"Get path", "'../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard) lib_path = paths[0] if len(paths) >", "is successfully loaded. \"\"\" lib_path = get_ops_path() success = False", "loaded. \"\"\" lib_path = get_ops_path() success = False if os.path.exists(lib_path):", "<gh_stars>1-10 # Copyright (c) OpenMMLab. All rights reserved. import ctypes", "the TensorRT plugin library. \"\"\" wildcard = os.path.abspath( os.path.join( os.path.dirname(__file__),", "TensorRT plugin library is successfully loaded. \"\"\" lib_path = get_ops_path()", "tensorrt plugins from {lib_path}') success = True else: logging.warning(f'Could not", "success = True else: logging.warning(f'Could not load the library of", "logging.info(f'Successfully loaded tensorrt plugins from {lib_path}') success = True else:", "path of the TensorRT plugin library. Returns: str: A path", "True else: logging.warning(f'Could not load the library of tensorrt plugins.", "os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard) lib_path = paths[0] if len(paths)", "False if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt plugins from {lib_path}')", "if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt plugins from {lib_path}') success", "paths[0] if len(paths) > 0 else '' return lib_path def", "of the TensorRT plugin library. \"\"\" wildcard = os.path.abspath( os.path.join(", "else '' return lib_path def load_tensorrt_plugin() -> bool: \"\"\"Load TensorRT", "bool: True if TensorRT plugin library is successfully loaded. \"\"\"", "\\ Because the file does not exist: {lib_path}') return success", "= glob.glob(wildcard) lib_path = paths[0] if len(paths) > 0 else", "= get_ops_path() success = False if os.path.exists(lib_path): ctypes.CDLL(lib_path) logging.info(f'Successfully loaded", "{lib_path}') success = True else: logging.warning(f'Could not load the library", "if TensorRT plugin library is successfully loaded. \"\"\" lib_path =", "\"\"\" wildcard = os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard)", "rights reserved. import ctypes import glob import logging import os", "> 0 else '' return lib_path def load_tensorrt_plugin() -> bool:", "library. Returns: str: A path of the TensorRT plugin library.", "load the library of tensorrt plugins. \\ Because the file", "A path of the TensorRT plugin library. \"\"\" wildcard =", "loaded tensorrt plugins from {lib_path}') success = True else: logging.warning(f'Could", "import glob import logging import os def get_ops_path() -> str:", "the TensorRT plugin library. Returns: str: A path of the", "TensorRT plugin library. Returns: str: A path of the TensorRT", "ctypes.CDLL(lib_path) logging.info(f'Successfully loaded tensorrt plugins from {lib_path}') success = True", "library of tensorrt plugins. \\ Because the file does not", "TensorRT plugin library. \"\"\" wildcard = os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so'))", "-> str: \"\"\"Get path of the TensorRT plugin library. Returns:", "get_ops_path() -> str: \"\"\"Get path of the TensorRT plugin library.", "library. Returns: bool: True if TensorRT plugin library is successfully", "os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard) lib_path = paths[0] if", "'' return lib_path def load_tensorrt_plugin() -> bool: \"\"\"Load TensorRT plugins", "Copyright (c) OpenMMLab. All rights reserved. import ctypes import glob", "import os def get_ops_path() -> str: \"\"\"Get path of the", "else: logging.warning(f'Could not load the library of tensorrt plugins. \\", "= os.path.abspath( os.path.join( os.path.dirname(__file__), '../../../build/lib/libmmdeploy_tensorrt_ops.so')) paths = glob.glob(wildcard) lib_path =", "= True else: logging.warning(f'Could not load the library of tensorrt", "return lib_path def load_tensorrt_plugin() -> bool: \"\"\"Load TensorRT plugins library.", "str: A path of the TensorRT plugin library. \"\"\" wildcard", "\"\"\" lib_path = get_ops_path() success = False if os.path.exists(lib_path): ctypes.CDLL(lib_path)", "if len(paths) > 0 else '' return lib_path def load_tensorrt_plugin()" ]
[ "When planning for 1, 2, and 3 steps ahead, #", "torch.all(expected_q_values == q_values) def test_get_Q(self): NUM_ACTION = 2 MULTI_STEPS =", "1 NUM_ACTION = 6 expected_outcome = torch.tensor([[0], [1], [2], [3],", "acc_reward = tensor( [[ 0.], [ 1.], [ 10.], [", "# pyre-fixme[9]: action has type `FeatureData`; used as `Tensor`. action", "super().__init__() self.look_ahead_steps = look_ahead_steps def forward(self, state: torch.Tensor): \"\"\" Given", "10) < 1.0 assert abs(initial_state_q_values[1].item() - 5) < 1.0 if", "NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert SEQ_LEN == 6 and", "= df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp) != SEQ_LEN: continue", "state_preprocessor, seq_len, action_dim, ) else: model_with_preprocessor = Seq2RewardWithPreprocessor( model, state_preprocessor,", "pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION", "= get_Q(seq2reward_network, state, all_permut) expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]])", "= pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer):", "get_Q(seq2reward_network, state, all_permut) expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]]) logger.info(f\"q_values:", "have different total rewards due to the missing # states", "1 \"\"\" # pyre-fixme[9]: action has type `FeatureData`; used as", "action = action.float_features.transpose(0, 1) action_indices = torch.argmax(action, dim=2).tolist() acc_reward =", "0, 1, ) num_batches = int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH)", "batch in enumerate(eval_data): ( mse_loss, q_values, action_distribution, _, ) =", "N_eval eval_q_values = total_q_values / N_eval eval_action_distribution = total_action_distribution /", "model = FakeSeq2RewardNetwork() state_normalization_parameters = { i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0,", "NUM_ACTION, expected_outcome): # expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM result =", "predict the probability of experiencing next n steps (1 <=n", "torch.utils.data import DataLoader logger = logging.getLogger(__name__) SEED = 0 STRING_GAME_TESTS", "acc_reward shape: batch_size, 1 \"\"\" # pyre-fixme[9]: action has type", "if batch_seq_count == batch_size: batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state),", ") = eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item() - 10) < 1.0", "(1 <=n <= look_ahead_steps) For the test purpose, it outputs", "NUM_MDP_PER_BATCH, 1 ), 0, 1, ) num_batches = int(dataset_size /", "Gym from reagent.gym.utils import create_df_from_replay_buffer from reagent.models.seq2reward_model import Seq2RewardNetwork from", "all_step_state = torch.Tensor([list(s.values()) for s in mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state)", "= torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state) assert batch_count", "reagent.preprocessing.preprocessor import Preprocessor from reagent.training.utils import gen_permutations from reagent.training.world_model.compress_model_trainer import", "reagent.core.parameters import ( NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, ) from reagent.gym.envs", "= total_q_values / N_eval eval_action_distribution = total_action_distribution / N_eval return", "5 env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION,", ") else: model_with_preprocessor = Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len, action_dim, )", "[[ 0.], [ 1.], [ 10.], [ 11.], [100.], [101.],", ") input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values =", "compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss < 1e-5 assert", "[2], [3], [4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN", "its affiliates. All rights reserved. import logging import os import", "pytorch_lightning as pl import torch import torch.nn as nn from", "batches = [None for _ in range(num_batches)] batch_count, batch_seq_count =", "plan_short_sequence): state_dim = 4 action_dim = 2 seq_len = 3", "i in range(1, state_dim) } state_preprocessor = Preprocessor(state_normalization_parameters, False) if", "SEQ_LEN = 6 NUM_ACTION = 2 NUM_MDP_PER_BATCH = 5 env", "1), dim=-1) assert torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ,", "= 2, acc_reward = tensor( [[ 0.], [ 1.], [", "__init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps = look_ahead_steps def forward(self, state: torch.Tensor):", "= NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = SEQ_LEN *", "idx, batch in enumerate(eval_data): ( mse_loss, _, q_values, action_distribution, )", "compress_model_trainer) assert compress_eval_mse_loss < 1e-5 assert torch.all(eval_q_values - compress_eval_q_values <", "action_dim, ) else: model_with_preprocessor = Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len, action_dim,", "= torch.ones(SEQ_LEN, batch_size) valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal", "action_indices = torch.argmax(action, dim=2).tolist() acc_reward = torch.tensor( list(map(lambda x: float(\"\".join(map(str,", "== 6 and NUM_ACTION == 2 compress_net_builder = FullyConnected(sizes=[8, 8])", "= total_mse_loss / N_eval eval_q_values = total_q_values / N_eval eval_action_distribution", "torch.manual_seed(SEED) training_data, eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer = train_seq2reward_model(training_data)", "n steps (1 <=n <= look_ahead_steps) For the test purpose,", "[ 10.], [ 11.], [100.], [101.], [110.], [111.]] ) Input", "expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM result = gen_permutations(SEQ_LEN, NUM_ACTION) assert", "result = gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape == (SEQ_LEN, NUM_ACTION **", "are respectively: # [0, 1], [1, 11], [11, 111] #", "1), 0, 1) else: batch_size = NUM_MDP_PER_BATCH * SEQ_LEN time_diff", "0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION) for idx, batch", "total rewards due to the missing # states and actions", "None, ): \"\"\" Mimic I/O of Seq2RewardNetwork but return fake", "batch_reward = torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state) assert", "batch_action = torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state) assert batch_count == num_batches", "= gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN,", "state_normalization_parameters = { i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 ) for", "initial_state = torch.Tensor([[0, 0]]) initial_state_q_values = torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state,", "= 2 seq_len = 3 model = FakeSeq2RewardNetwork() state_normalization_parameters =", "mdp = df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp) != SEQ_LEN:", "batch in enumerate(eval_data): ( mse_loss, _, q_values, action_distribution, ) =", "np import pytorch_lightning as pl import torch import torch.nn as", "1], [1, 0, 0], [1, 0, 1], [1, 1, 0],", "), # fake, not used anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None,", "NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0, 0]]) initial_state_q_values = torch.squeeze(", "probability of experiencing next n steps (1 <=n <= look_ahead_steps)", "Output acc_reward shape: batch_size, 1 \"\"\" # pyre-fixme[9]: action has", "= torch.zeros_like(all_step_action) action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN - j)", "] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome):", "= compress_model_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values) total_action_distribution", "[0.33, 0.33, 0.33], we have [4, 41] expected_q_values = torch.tensor([[4.0,", "long test on sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED)", "trained network is not able # to reduce the mse", "action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True, ) trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param", "pyre-fixme[9]: action has type `FeatureData`; used as `Tensor`. action =", "SEQ_LEN / NUM_MDP_PER_BATCH) batches = [None for _ in range(num_batches)]", "= FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor, seq_len, action_dim,", "of action indices, independent of state. For example, when seq_len", "get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, ) ) total_mse_loss = 0 total_q_values", "action indices, independent of state. For example, when seq_len =", "steps ahead, # the expected q values are respectively: #", "< 1.0 if filter_short_sequence: assert eval_mse_loss < 0.1 else: #", "(c) Facebook, Inc. and its affiliates. All rights reserved. import", "# expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM result = gen_permutations(SEQ_LEN, NUM_ACTION)", "batch_size = NUM_MDP_PER_BATCH * SEQ_LEN time_diff = torch.ones(SEQ_LEN, batch_size) valid_step", "if i == SEQ_LEN - 1 else 1 for i", "next( iter(training_data) ).action.float_features.shape assert SEQ_LEN == 6 and NUM_ACTION ==", ") if filter_short_sequence: batch_size = NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN, batch_size)", "# [0, 1], [1, 11], [11, 111] # Weighting the", "to the missing # states and actions in previous steps,", "Seq2RewardTrainer from torch.utils.data import DataLoader logger = logging.getLogger(__name__) SEED =", "range(num_batches)] batch_count, batch_seq_count = 0, 0 batch_reward = torch.zeros(SEQ_LEN, batch_size)", "= tensor( [[ 0.], [ 1.], [ 10.], [ 11.],", "stddev=1.0 ) for i in range(1, state_dim) } state_preprocessor =", "import ( NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, ) from reagent.gym.envs import", "* torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal = torch.Tensor( [0 if i", "close to zero. assert eval_mse_loss < 10 compress_model_trainer = train_seq2reward_compress_model(", "Optional import numpy as np import pytorch_lightning as pl import", "( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss", "float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values = model_with_preprocessor(input_prototype) if plan_short_sequence: #", "import random import unittest from typing import Optional import numpy", "= DataLoader( batches[:num_training_batches], collate_fn=lambda x: x[0] ) eval_data = DataLoader(batches[num_training_batches:],", "values are respectively: # [0, 1], [1, 11], [11, 111]", "= eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item() - 10) < 1.0 assert", "training_data = DataLoader( batches[:num_training_batches], collate_fn=lambda x: x[0] ) eval_data =", "import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import Preprocessor from reagent.training.utils import gen_permutations", "all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp[\"action\"]]] = 1.0 for j", "(True,)] class FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps = look_ahead_steps", "from reagent.training.world_model.compress_model_trainer import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer from", "batch_reward[:, batch_seq_count] = reward state = torch.zeros_like(all_step_state) state[: SEQ_LEN -", "step # probabilities [0.33, 0.33, 0.33], we have [4, 41]", "total_action_distribution = torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data): ( mse_loss,", "forward(self, state: torch.Tensor): \"\"\" Given the current state, predict the", "= torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for", "total_action_distribution / N_eval return ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, )", "compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss = 0 total_q_values", "1, 2, and 3 steps ahead, # the expected q", "[1, 0, 1], [1, 1, 0], [1, 1, 1], ]", "3 BATCH_SIZE = 2 STATE_DIM = 4 all_permut = gen_permutations(MULTI_STEPS,", "test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1 NUM_ACTION = 6 expected_outcome = torch.tensor([[0],", "SEQ_LEN - j] = all_step_reward[-(SEQ_LEN - j) :] batch_reward[:, batch_seq_count]", "action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] = None, ): \"\"\" Mimic I/O", "test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence", "num_training_batches = int(training_data_ratio * num_batches) training_data = DataLoader( batches[:num_training_batches], collate_fn=lambda", "reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import Preprocessor from reagent.training.utils import", "for mdp_id in sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True)", "eval_action_distribution, ) def train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1, num_epochs=5 ): SEQ_LEN,", "mean=0.0, stddev=1.0 ) for i in range(1, state_dim) } state_preprocessor", "eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer = train_seq2reward_model(training_data) ( initial_state_q_values,", "def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state =", "N_eval eval_action_distribution = total_action_distribution / N_eval return ( initial_state_q_values, eval_mse_loss,", "1 ), 0, 1, ) num_batches = int(dataset_size / SEQ_LEN", "and 3 steps ahead, # the expected q values are", "import pytorch_lightning as pl import torch import torch.nn as nn", "import DataLoader logger = logging.getLogger(__name__) SEED = 0 STRING_GAME_TESTS =", ") not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1) else: batch_size =", "= torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp[\"action\"]]] = 1.0", "3, batch_size = 1, action_num = 2, acc_reward = tensor(", "+= torch.tensor(action_distribution) N_eval = len(eval_data) eval_mse_loss = total_mse_loss / N_eval", "batch_seq_count] = state action = torch.zeros_like(all_step_action) action[: SEQ_LEN - j]", "import Preprocessor from reagent.training.utils import gen_permutations from reagent.training.world_model.compress_model_trainer import CompressModelTrainer", "q_values = get_Q(seq2reward_network, state, all_permut) expected_q_values = torch.tensor([[11.0, 111.0], [11.0,", "num_action Output acc_reward shape: batch_size, 1 \"\"\" # pyre-fixme[9]: action", "batch_seq_count] = action batch_seq_count += 1 if batch_seq_count == batch_size:", "= pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_model(eval_data, seq2reward_trainer):", "= torch.Tensor([[0, 0]]) initial_state_q_values = torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut,", "and NUM_ACTION == 2 seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64,", "Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer,", "N_eval return ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) def train_seq2reward_compress_model(", "train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape", "torch.ones(SEQ_LEN, batch_size) valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal =", "= total_action_distribution / N_eval return ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution,", "/ N_eval eval_q_values = total_q_values / N_eval eval_action_distribution = total_action_distribution", "( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) def train_seq2reward_compress_model( training_data, seq2reward_network,", "1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network = compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, )", "1, action_num = 2, acc_reward = tensor( [[ 0.], [", "train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1, num_epochs=5 ): SEQ_LEN, batch_size, NUM_ACTION =", "trainer = CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED) pl_trainer =", ") compress_model_network = compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, ) trainer_param = Seq2RewardTrainerParameters(", "torch.all(expected_q_values == q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1 NUM_ACTION =", "used as `Tensor`. action = action.float_features.transpose(0, 1) action_indices = torch.argmax(action,", "experiencing next n steps (1 <=n <= look_ahead_steps) For the", "TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence):", "filter_short_sequence: assert eval_mse_loss < 0.1 else: # Same short sequences", "test_get_Q(self): NUM_ACTION = 2 MULTI_STEPS = 3 BATCH_SIZE = 2", "q_values) def test_get_Q(self): NUM_ACTION = 2 MULTI_STEPS = 3 BATCH_SIZE", ") = eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss < 1e-5 assert torch.all(eval_q_values", "result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION) outcome = torch.argmax(result.transpose(0,", "used anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None, ) batch_count += 1", "rlt from reagent.core.parameters import ( NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, )", "batch_seq_count] = reward state = torch.zeros_like(all_step_state) state[: SEQ_LEN - j]", "collate_fn=lambda x: x[0]) return training_data, eval_data def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5):", "0 batch_reward = torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state)", "_ in range(num_batches)] batch_count, batch_seq_count = 0, 0 batch_reward =", "fixed fake numbers \"\"\" batch_size, _ = state.shape return torch.ones(batch_size,", "from reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from", "int(training_data_ratio * num_batches) training_data = DataLoader( batches[:num_training_batches], collate_fn=lambda x: x[0]", "8]) state_normalization_data = NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), }", "state_normalization_data = NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } )", "2 expected_outcome = torch.tensor( [ [0, 0, 0], [0, 0,", "NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution", "next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0, 0]]) initial_state_q_values = torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network,", "expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping long test on sandcastle.\")", "= rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values = model_with_preprocessor(input_prototype) if", "0], [0, 1, 1], [1, 0, 0], [1, 0, 1],", "111] # Weighting the expected q values by predicted step", "if plan_short_sequence: # When planning for 1, 2, and 3", "): SEQ_LEN = 6 NUM_ACTION = 2 NUM_MDP_PER_BATCH = 5", "learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert", "trainer def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state", "on sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data", "typing import Optional import numpy as np import pytorch_lightning as", "from parameterized import parameterized from reagent.core import types as rlt", "0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network = compress_net_builder.build_value_network( state_normalization_data,", "sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data =", "111.0], [11.0, 111.0]]) logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values == q_values) def", "Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True, ) trainer =", "j) :] batch_reward[:, batch_seq_count] = reward state = torch.zeros_like(all_step_state) state[:", "s in mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for a", "`FeatureData`; used as `Tensor`. action = action.float_features.transpose(0, 1) action_indices =", "sequences may have different total rewards due to the missing", "train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, ) =", "= CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs,", "num_batches num_training_batches = int(training_data_ratio * num_batches) training_data = DataLoader( batches[:num_training_batches],", "= SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal = torch.Tensor( [0", "SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert SEQ_LEN ==", "torch.zeros(BATCH_SIZE, STATE_DIM) q_values = get_Q(seq2reward_network, state, all_permut) expected_q_values = torch.tensor([[11.0,", "training_data, seq2reward_network, learning_rate=0.1, num_epochs=5 ): SEQ_LEN, batch_size, NUM_ACTION = next(", "x: x[0]) return training_data, eval_data def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN,", "due to the missing # states and actions in previous", "None] not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ),", "SEQ_LEN, NUM_ACTION, expected_outcome): # expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM result", "affiliates. All rights reserved. import logging import os import random", "from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import Preprocessor from reagent.training.utils", "1, 1], [1, 0, 0], [1, 0, 1], [1, 1,", "eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item() - 10) < 1.0 assert abs(initial_state_q_values[1].item()", "seq2reward_trainer.all_permut, ) ) total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution", "import Seq2RewardNetwork from reagent.net_builder.value.fully_connected import FullyConnected from reagent.prediction.predictor_wrapper import (", "NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network = compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION,", "list(map(lambda x: float(\"\".join(map(str, x))), action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\") return", "assert result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION) outcome =", "valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal = torch.Tensor(", "batch_action[:, batch_seq_count] = action batch_seq_count += 1 if batch_seq_count ==", "previous steps, so the trained network is not able #", "assert torch.all(eval_q_values - compress_eval_q_values < 1e-5) assert torch.all( eval_action_distribution -", "Mimic I/O of Seq2RewardNetwork but return fake reward Reward is", "the trained network is not able # to reduce the", "/ SEQ_LEN / NUM_MDP_PER_BATCH) batches = [None for _ in", "in os.environ, \"Skipping long test on sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence):", "j) :] batch_state[:, batch_seq_count] = state action = torch.zeros_like(all_step_action) action[:", "= 3 NUM_ACTION = 2 expected_outcome = torch.tensor( [ [0,", "has type `FeatureData`; used as `Tensor`. action = action.float_features.transpose(0, 1)", "= torch.argmax(action, dim=2).tolist() acc_reward = torch.tensor( list(map(lambda x: float(\"\".join(map(str, x))),", "total_q_values / N_eval eval_action_distribution = total_action_distribution / N_eval return eval_mse_loss,", "filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence )", "from reagent.core import types as rlt from reagent.core.parameters import (", "def create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN = 6 NUM_ACTION", "abs(initial_state_q_values[1].item() - 5) < 1.0 if filter_short_sequence: assert eval_mse_loss <", "import FullyConnected from reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES,", "return eval_mse_loss, eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def", "seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0, 0]])", "1.0 assert abs(initial_state_q_values[1].item() - 5) < 1.0 if filter_short_sequence: assert", "assert SEQ_LEN == 6 and NUM_ACTION == 2 seq2reward_network =", "SEED = 0 STRING_GAME_TESTS = [(False,), (True,)] class FakeStepPredictionNetwork(nn.Module): def", "def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data)", "look_ahead_steps): super().__init__() self.look_ahead_steps = look_ahead_steps def forward(self, state: torch.Tensor): \"\"\"", "diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ), 0, 1, ) num_batches = int(dataset_size", ") = compress_model_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values)", "the concatenation of action indices, independent of state. For example,", "11.], [100.], [101.], [110.], [111.]] ) Input action shape: seq_len,", "2 NUM_MDP_PER_BATCH = 5 env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df =", "rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values = model_with_preprocessor(input_prototype) if plan_short_sequence:", "torch.tensor( [ [0, 0, 0], [0, 0, 1], [0, 1,", "Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor", "reserved. import logging import os import random import unittest from", "seq2reward_trainer) assert abs(initial_state_q_values[0].item() - 10) < 1.0 assert abs(initial_state_q_values[1].item() -", "< 1e-5) assert torch.all( eval_action_distribution - compress_eval_action_distribution < 1e-5 )", "for a in mdp[\"action\"]]] = 1.0 for j in range(SEQ_LEN):", "import torch import torch.nn as nn from parameterized import parameterized", "- j] = all_step_reward[-(SEQ_LEN - j) :] batch_reward[:, batch_seq_count] =", "zero. assert eval_mse_loss < 10 compress_model_trainer = train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network", "torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values()) for s in mdp[\"state_features\"]]) all_step_action =", "i == SEQ_LEN - 1 else 1 for i in", "> 0: break reward = torch.zeros_like(all_step_reward) reward[: SEQ_LEN - j]", "anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None, ) batch_count += 1 batch_seq_count", "NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 ) for i in range(1, state_dim)", "- 1 else 1 for i in range(SEQ_LEN)] ) not_terminal", "NUM_ACTION = 2 expected_outcome = torch.tensor( [ [0, 0, 0],", "ds=\"2020-10-10\", ) if filter_short_sequence: batch_size = NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN,", "different total rewards due to the missing # states and", "= torch.tensor([[11.0, 111.0], [11.0, 111.0]]) logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values ==", "and actions in previous steps, so the trained network is", "missing # states and actions in previous steps, so the", "# When planning for 1, 2, and 3 steps ahead,", "= torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN),", "batch_size, NUM_ACTION) batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for mdp_id in", "of experiencing next n steps (1 <=n <= look_ahead_steps) For", "mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp[\"action\"]]]", "from reagent.models.seq2reward_model import Seq2RewardNetwork from reagent.net_builder.value.fully_connected import FullyConnected from reagent.prediction.predictor_wrapper", ").action.float_features.shape assert SEQ_LEN == 6 and NUM_ACTION == 2 compress_net_builder", "torch.argmax(result.transpose(0, 1), dim=-1) assert torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in", "= 3 BATCH_SIZE = 2 STATE_DIM = 4 all_permut =", "batch_count, batch_seq_count = 0, 0 batch_reward = torch.zeros(SEQ_LEN, batch_size) batch_action", "5) < 1.0 if filter_short_sequence: assert eval_mse_loss < 0.1 else:", "= compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN,", "have [4, 41] expected_q_values = torch.tensor([[4.0, 41.0]]) else: expected_q_values =", "NUM_MDP_PER_BATCH * SEQ_LEN time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = torch.arange(SEQ_LEN,", "len(mdp) != SEQ_LEN: continue all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values())", "training_data) return trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION =", "def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim = 4 action_dim", "= train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, )", "assert compress_eval_mse_loss < 1e-5 assert torch.all(eval_q_values - compress_eval_q_values < 1e-5)", "rights reserved. import logging import os import random import unittest", "= 2 NUM_MDP_PER_BATCH = 5 env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df", "Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, ) trainer", "= next( iter(training_data) ).action.float_features.shape assert SEQ_LEN == 6 and NUM_ACTION", "random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer =", "dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network = compress_net_builder.build_value_network(", "fake reward Reward is the concatenation of action indices, independent", "action_dim, ) input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values", "_test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome): # expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM", "< 0.1 else: # Same short sequences may have different", "to reduce the mse loss to values close to zero.", "1.], [ 10.], [ 11.], [100.], [101.], [110.], [111.]] )", "seq_len = 3, batch_size = 1, action_num = 2, acc_reward", "batch_state[:, batch_seq_count] = state action = torch.zeros_like(all_step_action) action[: SEQ_LEN -", "= 2 expected_outcome = torch.tensor( [ [0, 0, 0], [0,", "assert eval_mse_loss < 0.1 else: # Same short sequences may", "Optional[torch.Tensor] = None, ): \"\"\" Mimic I/O of Seq2RewardNetwork but", "from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer from torch.utils.data import DataLoader logger", "loss to values close to zero. assert eval_mse_loss < 10", "assert batch_count == num_batches num_training_batches = int(training_data_ratio * num_batches) training_data", "= Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True)", "model, step_prediction_model, state_preprocessor, seq_len, action_dim, ) else: model_with_preprocessor = Seq2RewardWithPreprocessor(", "2, and 3 steps ahead, # the expected q values", "env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", ) if filter_short_sequence: batch_size =", ":] batch_reward[:, batch_seq_count] = reward state = torch.zeros_like(all_step_state) state[: SEQ_LEN", "x[0]) return training_data, eval_data def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size,", "import create_df_from_replay_buffer from reagent.models.seq2reward_model import Seq2RewardNetwork from reagent.net_builder.value.fully_connected import FullyConnected", "STATE_DIM) q_values = get_Q(seq2reward_network, state, all_permut) expected_q_values = torch.tensor([[11.0, 111.0],", "[4, 41] expected_q_values = torch.tensor([[4.0, 41.0]]) else: expected_q_values = torch.tensor([[11.0,", "initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) = eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item()", "torch.tensor( list(map(lambda x: float(\"\".join(map(str, x))), action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\")", "+= 1 batch_seq_count = 0 batch_reward = torch.zeros_like(batch_reward) batch_action =", "_, q_values, action_distribution, ) = seq2reward_trainer.validation_step(batch, idx) total_mse_loss += mse_loss", "batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION)", "else: expected_q_values = torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values == q_values) def", "= torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, ) ) total_mse_loss =", "initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) def train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1,", "gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION)", "feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 ) for i in range(1, state_dim) }", "from reagent.gym.envs import Gym from reagent.gym.utils import create_df_from_replay_buffer from reagent.models.seq2reward_model", "probabilities [0.33, 0.33, 0.33], we have [4, 41] expected_q_values =", "[1], [2], [3], [4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self):", "mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp) != SEQ_LEN: continue all_step_reward = torch.Tensor(list(mdp[\"reward\"]))", "FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE, STATE_DIM) q_values = get_Q(seq2reward_network, state, all_permut)", "expected_outcome) def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome): # expected shape: SEQ_LEN,", "pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION", "NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, ) from reagent.gym.envs import Gym from", "* SEQ_LEN time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = torch.arange(SEQ_LEN, 0,", "concatenation of action indices, independent of state. For example, when", "action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN - j) :] batch_action[:,", "seq_len = 3 model = FakeSeq2RewardNetwork() state_normalization_parameters = { i:", "state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), # fake, not used anyway not_terminal=not_terminal,", ") from reagent.gym.envs import Gym from reagent.gym.utils import create_df_from_replay_buffer from", "= len(eval_data) eval_mse_loss = total_mse_loss / N_eval eval_q_values = total_q_values", "6 expected_outcome = torch.tensor([[0], [1], [2], [3], [4], [5]]) self._test_gen_permutations(SEQ_LEN,", "test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim =", "all_step_action[-(SEQ_LEN - j) :] batch_action[:, batch_seq_count] = action batch_seq_count +=", "batch_count += 1 batch_seq_count = 0 batch_reward = torch.zeros_like(batch_reward) batch_action", "1 else 1 for i in range(SEQ_LEN)] ) not_terminal =", "expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]]) logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values", "expected_outcome = torch.tensor([[0], [1], [2], [3], [4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION,", "1], ] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION,", "the mse loss to values close to zero. assert eval_mse_loss", "seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data, compress_model_trainer)", "0, 0], [1, 0, 1], [1, 1, 0], [1, 1,", "trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss", "= Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True, ) trainer", "state_preprocessor, seq_len, action_dim, ) input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES,", "input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values = model_with_preprocessor(input_prototype)", "in sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp)", "= torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data):", "from torch.utils.data import DataLoader logger = logging.getLogger(__name__) SEED = 0", "SEQ_LEN = 1 NUM_ACTION = 6 expected_outcome = torch.tensor([[0], [1],", "plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor,", "SEQ_LEN, NUM_ACTION) outcome = torch.argmax(result.transpose(0, 1), dim=-1) assert torch.all(outcome ==", "torch.zeros_like(all_step_reward) reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN - j) :]", "batch_seq_count == batch_size: batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData(", "= Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, ) trainer_param = Seq2RewardTrainerParameters(", "torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal = torch.Tensor( [0 if i ==", "trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs,", "not able # to reduce the mse loss to values", "False) if plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model,", "the expected q values by predicted step # probabilities [0.33,", "values by predicted step # probabilities [0.33, 0.33, 0.33], we", "- j] = all_step_action[-(SEQ_LEN - j) :] batch_action[:, batch_seq_count] =", "= 5 env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer( env=env,", "self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim = 4 action_dim = 2", "111.0]]) logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values == q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN", "reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types", "mdp[\"action\"]]] = 1.0 for j in range(SEQ_LEN): if filter_short_sequence and", "2 seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, ) trainer_param", "= 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION) for idx,", "): \"\"\" Mimic I/O of Seq2RewardNetwork but return fake reward", "[1, 0, 0], [1, 0, 1], [1, 1, 0], [1,", "dim=-1) assert torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping", "a in mdp[\"action\"]]] = 1.0 for j in range(SEQ_LEN): if", "assert torch.all(expected_q_values == q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1 NUM_ACTION", "** SEQ_LEN, NUM_ACTION) outcome = torch.argmax(result.transpose(0, 1), dim=-1) assert torch.all(outcome", "purpose, it outputs fixed fake numbers \"\"\" batch_size, _ =", "batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for mdp_id in sorted(set(df.mdp_id)): mdp", "state_dim) } state_preprocessor = Preprocessor(state_normalization_parameters, False) if plan_short_sequence: step_prediction_model =", "reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer from torch.utils.data import DataLoader logger =", "1], [1, 1, 0], [1, 1, 1], ] ) self._test_gen_permutations(SEQ_LEN,", "== 2 compress_net_builder = FullyConnected(sizes=[8, 8]) state_normalization_data = NormalizationData( dense_normalization_parameters={", "class FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps = look_ahead_steps def", "seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data)", "values close to zero. assert eval_mse_loss < 10 compress_model_trainer =", "DataLoader logger = logging.getLogger(__name__) SEED = 0 STRING_GAME_TESTS = [(False,),", "torch.tensor([[11.0, 111.0], [11.0, 111.0]]) logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values == q_values)", "total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION) for", "current state, predict the probability of experiencing next n steps", "reagent.gym.envs import Gym from reagent.gym.utils import create_df_from_replay_buffer from reagent.models.seq2reward_model import", "= torch.zeros_like(all_step_reward) reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN - j)", "all_step_reward[-(SEQ_LEN - j) :] batch_reward[:, batch_seq_count] = reward state =", "For example, when seq_len = 3, batch_size = 1, action_num", "NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, ) from reagent.gym.envs import Gym from reagent.gym.utils", ") trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0,", ") from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import Preprocessor from", "learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True, ) trainer = Seq2RewardTrainer(", "10.], [ 11.], [100.], [101.], [110.], [111.]] ) Input action", "self, state: rlt.FeatureData, action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] = None, ):", "problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", ) if filter_short_sequence: batch_size = NUM_MDP_PER_BATCH", "( mse_loss, q_values, action_distribution, _, ) = compress_model_trainer.validation_step(batch, idx) total_mse_loss", "action_dim = 2 seq_len = 3 model = FakeSeq2RewardNetwork() state_normalization_parameters", "- compress_eval_q_values < 1e-5) assert torch.all( eval_action_distribution - compress_eval_action_distribution <", "= torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for mdp_id in sorted(set(df.mdp_id)): mdp =", "import types as rlt from reagent.core.parameters import ( NormalizationData, NormalizationParameters,", "N_eval = len(eval_data) eval_mse_loss = total_mse_loss / N_eval eval_q_values =", "from reagent.core.parameters import ( NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, ) from", "eval_mse_loss, eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self):", "/ N_eval eval_action_distribution = total_action_distribution / N_eval return ( initial_state_q_values,", "model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor, seq_len, action_dim, ) else:", "deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size,", "reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN - j) :] batch_reward[:,", ") q_values = model_with_preprocessor(input_prototype) if plan_short_sequence: # When planning for", "filter_short_sequence=False ): SEQ_LEN = 6 NUM_ACTION = 2 NUM_MDP_PER_BATCH =", "ProblemDomain, Seq2RewardTrainerParameters, ) from reagent.gym.envs import Gym from reagent.gym.utils import", "SEQ_LEN - j] = all_step_action[-(SEQ_LEN - j) :] batch_action[:, batch_seq_count]", "numbers \"\"\" batch_size, _ = state.shape return torch.ones(batch_size, self.look_ahead_steps).float() class", "in range(SEQ_LEN): if filter_short_sequence and j > 0: break reward", "num_batches) training_data = DataLoader( batches[:num_training_batches], collate_fn=lambda x: x[0] ) eval_data", ") pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer", "41] expected_q_values = torch.tensor([[4.0, 41.0]]) else: expected_q_values = torch.tensor([[11.0, 111.0]])", "+= mse_loss total_q_values += torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution) N_eval =", "self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def forward( self, state: rlt.FeatureData, action: rlt.FeatureData,", "< 1e-5 assert torch.all(eval_q_values - compress_eval_q_values < 1e-5) assert torch.all(", "= next(iter(eval_data)).action.float_features.shape total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution =", "2, acc_reward = tensor( [[ 0.], [ 1.], [ 10.],", "set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", )", "action = torch.zeros_like(all_step_action) action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN -", ":] batch_action[:, batch_seq_count] = action batch_seq_count += 1 if batch_seq_count", "if len(mdp) != SEQ_LEN: continue all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state =", "len(eval_data) eval_mse_loss = total_mse_loss / N_eval eval_q_values = total_q_values /", "batch_count == num_batches num_training_batches = int(training_data_ratio * num_batches) training_data =", "@parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping long test on sandcastle.\") def", ") trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True,", "SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0, 0]]) initial_state_q_values", "= 1, action_num = 2, acc_reward = tensor( [[ 0.],", "dim=2).tolist() acc_reward = torch.tensor( list(map(lambda x: float(\"\".join(map(str, x))), action_indices)) ).reshape(-1,", "x: float(\"\".join(map(str, x))), action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward)", "rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN = 6", "state = torch.zeros_like(all_step_state) state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN -", "batch_reward = torch.zeros(SEQ_LEN, batch_size) batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state", "compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\",", "expected_q_values = torch.tensor([[4.0, 41.0]]) else: expected_q_values = torch.tensor([[11.0, 111.0]]) assert", "= 0, 0 batch_reward = torch.zeros(SEQ_LEN, batch_size) batch_action = torch.zeros(SEQ_LEN,", "q_values, action_distribution, ) = seq2reward_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values", "os.environ, \"Skipping long test on sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED)", "# states and actions in previous steps, so the trained", "Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from", "float(\"\".join(map(str, x))), action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def", "def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3 NUM_ACTION = 2 expected_outcome =", "parameterized from reagent.core import types as rlt from reagent.core.parameters import", "def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome): # expected shape: SEQ_LEN, PERM_NUM,", "all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values()) for s in mdp[\"state_features\"]])", "torch.tensor([[4.0, 41.0]]) else: expected_q_values = torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values ==", "torch.tensor(action_distribution) N_eval = len(eval_data) eval_mse_loss = total_mse_loss / N_eval eval_q_values", "= all_step_reward[-(SEQ_LEN - j) :] batch_reward[:, batch_seq_count] = reward state", "shape: SEQ_LEN, PERM_NUM, ACTION_DIM result = gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape", "All rights reserved. import logging import os import random import", "FullyConnected(sizes=[8, 8]) state_normalization_data = NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS),", "mse loss to values close to zero. assert eval_mse_loss <", "== (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION) outcome = torch.argmax(result.transpose(0, 1),", "1e-5 assert torch.all(eval_q_values - compress_eval_q_values < 1e-5) assert torch.all( eval_action_distribution", "time_diff=time_diff, valid_step=valid_step, step=None, ) batch_count += 1 batch_seq_count = 0", ") batch_count += 1 batch_seq_count = 0 batch_reward = torch.zeros_like(batch_reward)", "111.0]]) assert torch.all(expected_q_values == q_values) def test_get_Q(self): NUM_ACTION = 2", "0], [1, 1, 1], ] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def", "== expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping long test on", "[4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3", "\"1\"], gamma=1.0, view_q_value=True, ) trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param )", "assert SEQ_LEN == 6 and NUM_ACTION == 2 compress_net_builder =", "return trainer def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape", "numpy as np import pytorch_lightning as pl import torch import", "for 1, 2, and 3 steps ahead, # the expected", "torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data): (", "4 all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork() state =", "random import unittest from typing import Optional import numpy as", "batch_size) valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal =", "create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN = 6 NUM_ACTION =", "[0, 0, 0], [0, 0, 1], [0, 1, 0], [0,", "seq_len, batch_size, num_action Output acc_reward shape: batch_size, 1 \"\"\" #", "torch.zeros_like(batch_state) assert batch_count == num_batches num_training_batches = int(training_data_ratio * num_batches)", "the probability of experiencing next n steps (1 <=n <=", "else: model_with_preprocessor = Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len, action_dim, ) input_prototype", "if filter_short_sequence: assert eval_mse_loss < 0.1 else: # Same short", "NUM_ACTION) batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for mdp_id in sorted(set(df.mdp_id)):", "eval_q_values, eval_action_distribution, ) def train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1, num_epochs=5 ):", "import logging import os import random import unittest from typing", "id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values = model_with_preprocessor(input_prototype) if plan_short_sequence: # When", "1 for i in range(SEQ_LEN)] ) not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1),", "reward Reward is the concatenation of action indices, independent of", "in mdp[\"action\"]]] = 1.0 for j in range(SEQ_LEN): if filter_short_sequence", "env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size,", "( mse_loss, _, q_values, action_distribution, ) = seq2reward_trainer.validation_step(batch, idx) total_mse_loss", "1 if batch_seq_count == batch_size: batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action),", "torch.Tensor([[0, 0]]) initial_state_q_values = torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, )", "= [(False,), (True,)] class FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps", "NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3 NUM_ACTION = 2", "1) else: batch_size = NUM_MDP_PER_BATCH * SEQ_LEN time_diff = torch.ones(SEQ_LEN,", "NUM_ACTION ** SEQ_LEN, NUM_ACTION) outcome = torch.argmax(result.transpose(0, 1), dim=-1) assert", "3 model = FakeSeq2RewardNetwork() state_normalization_parameters = { i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS,", "the current state, predict the probability of experiencing next n", "): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert SEQ_LEN", "NUM_ACTION, expected_outcome) def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome): # expected shape:", "compress_eval_q_values, compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss < 1e-5", "df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp) != SEQ_LEN: continue all_step_reward", "import unittest from typing import Optional import numpy as np", "batch_state = torch.zeros_like(batch_state) assert batch_count == num_batches num_training_batches = int(training_data_ratio", "@unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping long test on sandcastle.\") def test_seq2reward_on_string_game_v0(self,", "== 2 seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, )", "STATE_DIM = 4 all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork()", "Inc. and its affiliates. All rights reserved. import logging import", "Input action shape: seq_len, batch_size, num_action Output acc_reward shape: batch_size,", "return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN =", "collate_fn=lambda x: x[0] ) eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0])", "FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor, seq_len, action_dim, )", "4 action_dim = 2 seq_len = 3 model = FakeSeq2RewardNetwork()", "DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0]) return training_data, eval_data def train_seq2reward_model(training_data, learning_rate=0.01,", "float_features=torch.zeros_like(batch_state) ), # fake, not used anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step,", "Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate,", "all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp[\"action\"]]] =", "= torch.tensor( [ [0, 0, 0], [0, 0, 1], [0,", "test purpose, it outputs fixed fake numbers \"\"\" batch_size, _", "is not able # to reduce the mse loss to", "2 seq_len = 3 model = FakeSeq2RewardNetwork() state_normalization_parameters = {", "desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", ) if filter_short_sequence: batch_size = NUM_MDP_PER_BATCH time_diff", "test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim = 4 action_dim =", "3 NUM_ACTION = 2 expected_outcome = torch.tensor( [ [0, 0,", "\"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, ) trainer = CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network,", "= logging.getLogger(__name__) SEED = 0 STRING_GAME_TESTS = [(False,), (True,)] class", "torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for mdp_id", "[11.0, 111.0]]) logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values == q_values) def test_gen_permutations_seq_len_1_action_6(self):", "expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3 NUM_ACTION = 2 expected_outcome", "torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) for mdp_id in sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"]", "[None for _ in range(num_batches)] batch_count, batch_seq_count = 0, 0", "gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE, STATE_DIM) q_values", "compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer,", "= NUM_MDP_PER_BATCH * SEQ_LEN time_diff = torch.ones(SEQ_LEN, batch_size) valid_step =", "reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), # fake, not used", "planning for 1, 2, and 3 steps ahead, # the", "state, predict the probability of experiencing next n steps (1", "rlt.FeatureData, action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] = None, ): \"\"\" Mimic", "= create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", ) if filter_short_sequence:", "if filter_short_sequence and j > 0: break reward = torch.zeros_like(all_step_reward)", "MULTI_STEPS = 3 BATCH_SIZE = 2 STATE_DIM = 4 all_permut", "seq2reward_network = FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE, STATE_DIM) q_values = get_Q(seq2reward_network,", "self.look_ahead_steps = look_ahead_steps def forward(self, state: torch.Tensor): \"\"\" Given the", "= [None for _ in range(num_batches)] batch_count, batch_seq_count = 0,", "step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor, seq_len,", "= look_ahead_steps def forward(self, state: torch.Tensor): \"\"\" Given the current", "[101.], [110.], [111.]] ) Input action shape: seq_len, batch_size, num_action", "enumerate(eval_data): ( mse_loss, q_values, action_distribution, _, ) = compress_model_trainer.validation_step(batch, idx)", "state_preprocessor = Preprocessor(state_normalization_parameters, False) if plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor", "else: # Same short sequences may have different total rewards", ") Input action shape: seq_len, batch_size, num_action Output acc_reward shape:", "model_with_preprocessor(input_prototype) if plan_short_sequence: # When planning for 1, 2, and", "reagent.net_builder.value.fully_connected import FullyConnected from reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES,", "as rlt from reagent.core.parameters import ( NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters,", "I/O of Seq2RewardNetwork but return fake reward Reward is the", "break reward = torch.zeros_like(all_step_reward) reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN", "create_df_from_replay_buffer from reagent.models.seq2reward_model import Seq2RewardNetwork from reagent.net_builder.value.fully_connected import FullyConnected from", "6 and NUM_ACTION == 2 compress_net_builder = FullyConnected(sizes=[8, 8]) state_normalization_data", "q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1 NUM_ACTION = 6 expected_outcome", "mse_loss, q_values, action_distribution, _, ) = compress_model_trainer.validation_step(batch, idx) total_mse_loss +=", "return fake reward Reward is the concatenation of action indices,", "of Seq2RewardNetwork but return fake reward Reward is the concatenation", "SEQ_LEN: continue all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values()) for s", "torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ), 0, 1, ) num_batches", "= model_with_preprocessor(input_prototype) if plan_short_sequence: # When planning for 1, 2,", "== 6 and NUM_ACTION == 2 seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION,", "next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), # fake, not used anyway not_terminal=not_terminal, time_diff=time_diff,", "= state.shape return torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def forward( self,", "independent of state. For example, when seq_len = 3, batch_size", "0 batch_reward = torch.zeros(SEQ_LEN, batch_size) batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION)", "Reward is the concatenation of action indices, independent of state.", "and its affiliates. All rights reserved. import logging import os", "valid_reward_len: Optional[torch.Tensor] = None, ): \"\"\" Mimic I/O of Seq2RewardNetwork", "[1, 11], [11, 111] # Weighting the expected q values", "0, 1], [0, 1, 0], [0, 1, 1], [1, 0,", "< 10 compress_model_trainer = train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss,", "state = torch.zeros(BATCH_SIZE, STATE_DIM) q_values = get_Q(seq2reward_network, state, all_permut) expected_q_values", "action.float_features.transpose(0, 1) action_indices = torch.argmax(action, dim=2).tolist() acc_reward = torch.tensor( list(map(lambda", "range(SEQ_LEN): if filter_short_sequence and j > 0: break reward =", "shape: batch_size, 1 \"\"\" # pyre-fixme[9]: action has type `FeatureData`;", "enumerate(eval_data): ( mse_loss, _, q_values, action_distribution, ) = seq2reward_trainer.validation_step(batch, idx)", "#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates.", "= torch.tensor([[0], [1], [2], [3], [4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome)", "+= 1 if batch_seq_count == batch_size: batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward,", "SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss = 0 total_q_values =", "= torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values()) for s in mdp[\"state_features\"]]) all_step_action", "gen_permutations from reagent.training.world_model.compress_model_trainer import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer", "assert eval_mse_loss < 10 compress_model_trainer = train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network )", "reagent.training.world_model.compress_model_trainer import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer from torch.utils.data", "= 3, batch_size = 1, action_num = 2, acc_reward =", "return training_data, eval_data def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION", "0.33, 0.33], we have [4, 41] expected_q_values = torch.tensor([[4.0, 41.0]])", "batch_size, 1 \"\"\" # pyre-fixme[9]: action has type `FeatureData`; used", "not_terminal = torch.Tensor( [0 if i == SEQ_LEN - 1", "= int(training_data_ratio * num_batches) training_data = DataLoader( batches[:num_training_batches], collate_fn=lambda x:", "NUM_MDP_PER_BATCH) batches = [None for _ in range(num_batches)] batch_count, batch_seq_count", "/ N_eval return ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) def", "0], [0, 0, 1], [0, 1, 0], [0, 1, 1],", "<=n <= look_ahead_steps) For the test purpose, it outputs fixed", "trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True, )", "10 compress_model_trainer = train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss, compress_eval_q_values,", "to values close to zero. assert eval_mse_loss < 10 compress_model_trainer", "torch.zeros_like(all_step_action) action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN - j) :]", "SEQ_LEN == 6 and NUM_ACTION == 2 seq2reward_network = Seq2RewardNetwork(", "} ) compress_model_network = compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, ) trainer_param =", "[111.]] ) Input action shape: seq_len, batch_size, num_action Output acc_reward", "not used anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None, ) batch_count +=", "def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim", "torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile(", "= torch.zeros(SEQ_LEN, batch_size) batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state =", "mdp_id in sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True) if", "= 1 NUM_ACTION = 6 expected_outcome = torch.tensor([[0], [1], [2],", "= { i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 ) for i", "\"Skipping long test on sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED)", "batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0, 0]]) initial_state_q_values =", ") self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome): #", "torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp[\"action\"]]] = 1.0 for", "pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_compress_model(eval_data,", "[(False,), (True,)] class FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps =", "_ = state.shape return torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def forward(", "[1, 1, 1], ] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def _test_gen_permutations(self,", "[110.], [111.]] ) Input action shape: seq_len, batch_size, num_action Output", "short sequences may have different total rewards due to the", "= int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH) batches = [None for", "of state. For example, when seq_len = 3, batch_size =", "None] not_terminal = torch.Tensor( [0 if i == SEQ_LEN -", "= torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1) else: batch_size = NUM_MDP_PER_BATCH *", "else: batch_size = NUM_MDP_PER_BATCH * SEQ_LEN time_diff = torch.ones(SEQ_LEN, batch_size)", "type `FeatureData`; used as `Tensor`. action = action.float_features.transpose(0, 1) action_indices", "valid_step=valid_step, step=None, ) batch_count += 1 batch_seq_count = 0 batch_reward", "learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, ) trainer =", "state. For example, when seq_len = 3, batch_size = 1,", "outcome = torch.argmax(result.transpose(0, 1), dim=-1) assert torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS)", "1.0 for j in range(SEQ_LEN): if filter_short_sequence and j >", "for idx, batch in enumerate(eval_data): ( mse_loss, _, q_values, action_distribution,", "rewards due to the missing # states and actions in", "state: rlt.FeatureData, action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] = None, ): \"\"\"", "NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network = compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, ) trainer_param", "= Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, )", ") for i in range(1, state_dim) } state_preprocessor = Preprocessor(state_normalization_parameters,", "Weighting the expected q values by predicted step # probabilities", "1.0 if filter_short_sequence: assert eval_mse_loss < 0.1 else: # Same", "seq2reward_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values) total_action_distribution +=", "NUM_ACTION) outcome = torch.argmax(result.transpose(0, 1), dim=-1) assert torch.all(outcome == expected_outcome)", "batch_size, num_action Output acc_reward shape: batch_size, 1 \"\"\" # pyre-fixme[9]:", "in range(1, state_dim) } state_preprocessor = Preprocessor(state_normalization_parameters, False) if plan_short_sequence:", "not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ), 0,", "= torch.Tensor( [0 if i == SEQ_LEN - 1 else", "/ N_eval return eval_mse_loss, eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self):", "abs(initial_state_q_values[0].item() - 10) < 1.0 assert abs(initial_state_q_values[1].item() - 5) <", "torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1) else: batch_size = NUM_MDP_PER_BATCH * SEQ_LEN", "2 compress_net_builder = FullyConnected(sizes=[8, 8]) state_normalization_data = NormalizationData( dense_normalization_parameters={ 0:", "test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3 NUM_ACTION = 2 expected_outcome = torch.tensor(", "N_eval return eval_mse_loss, eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True)", ") num_batches = int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH) batches =", "seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data)", "== batch_size: batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state)", "= torch.zeros_like(all_step_state) state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN - j)", "<= look_ahead_steps) For the test purpose, it outputs fixed fake", "batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), #", "gamma=1.0, view_q_value=True, ) trainer = CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, )", "q_values, action_distribution, _, ) = compress_model_trainer.validation_step(batch, idx) total_mse_loss += mse_loss", "reagent.models.seq2reward_model import Seq2RewardNetwork from reagent.net_builder.value.fully_connected import FullyConnected from reagent.prediction.predictor_wrapper import", "trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True,", "\"\"\" Given the current state, predict the probability of experiencing", "q values by predicted step # probabilities [0.33, 0.33, 0.33],", "NUM_ACTION == 2 compress_net_builder = FullyConnected(sizes=[8, 8]) state_normalization_data = NormalizationData(", "batch_size, NUM_ACTION) for mdp_id in sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"] ==", "= state action = torch.zeros_like(all_step_action) action[: SEQ_LEN - j] =", "by predicted step # probabilities [0.33, 0.33, 0.33], we have", "j > 0: break reward = torch.zeros_like(all_step_reward) reward[: SEQ_LEN -", "from reagent.gym.utils import create_df_from_replay_buffer from reagent.models.seq2reward_model import Seq2RewardNetwork from reagent.net_builder.value.fully_connected", "NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE, STATE_DIM) q_values =", "num_batches = int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH) batches = [None", "num_hiddens=64, num_hidden_layers=2, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"],", ") trainer = CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED) pl_trainer", ") seq2reward_trainer = train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, )", "def test_get_Q(self): NUM_ACTION = 2 MULTI_STEPS = 3 BATCH_SIZE =", "as nn from parameterized import parameterized from reagent.core import types", "compress_model_trainer = train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution,", "outputs fixed fake numbers \"\"\" batch_size, _ = state.shape return", "for i in range(SEQ_LEN)] ) not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0,", "range(SEQ_LEN)] ) not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1) else: batch_size", "state_dim = 4 action_dim = 2 seq_len = 3 model", "0.1 else: # Same short sequences may have different total", "dtype=torch.int64)[:, None] not_terminal = torch.Tensor( [0 if i == SEQ_LEN", "logger.info(f\"q_values: {q_values}\") assert torch.all(expected_q_values == q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN =", "x: x[0] ) eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0]) return", "eval_mse_loss < 0.1 else: # Same short sequences may have", "total_mse_loss += mse_loss total_q_values += torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution) N_eval", "# the expected q values are respectively: # [0, 1],", "seq2reward_trainer = train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) =", "} state_preprocessor = Preprocessor(state_normalization_parameters, False) if plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len)", "= Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len, action_dim, ) input_prototype = rlt.ServingFeatureData(", "torch.Tensor): \"\"\" Given the current state, predict the probability of", "== q_values) def test_get_Q(self): NUM_ACTION = 2 MULTI_STEPS = 3", "training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN = 6 NUM_ACTION = 2 NUM_MDP_PER_BATCH", "pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN,", "total_action_distribution += torch.tensor(action_distribution) N_eval = len(eval_data) eval_mse_loss = total_mse_loss /", "1, 1], ] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def _test_gen_permutations(self, SEQ_LEN,", "action shape: seq_len, batch_size, num_action Output acc_reward shape: batch_size, 1", "def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim = 4 action_dim = 2 seq_len", "= 0 STRING_GAME_TESTS = [(False,), (True,)] class FakeStepPredictionNetwork(nn.Module): def __init__(self,", "batch_size) valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal = torch.transpose(", "state action = torch.zeros_like(all_step_action) action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN", "torch.argmax(action, dim=2).tolist() acc_reward = torch.tensor( list(map(lambda x: float(\"\".join(map(str, x))), action_indices))", "unittest from typing import Optional import numpy as np import", "= NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network", "(SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION) outcome = torch.argmax(result.transpose(0, 1), dim=-1)", "state.shape return torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def forward( self, state:", "[0, 1, 1], [1, 0, 0], [1, 0, 1], [1,", "but return fake reward Reward is the concatenation of action", "0]]) initial_state_q_values = torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, ) )", ") def train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1, num_epochs=5 ): SEQ_LEN, batch_size,", "def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data = create_string_game_data(", "expected_outcome = torch.tensor( [ [0, 0, 0], [0, 0, 1],", "may have different total rewards due to the missing #", "# fake, not used anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None, )", ") ( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert", "1) logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False", "torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state) assert batch_count ==", ") = seq2reward_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values)", "predicted step # probabilities [0.33, 0.33, 0.33], we have [4,", "FakeSeq2RewardNetwork(nn.Module): def forward( self, state: rlt.FeatureData, action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor]", "[ [0, 0, 0], [0, 0, 1], [0, 1, 0],", "STRING_GAME_TESTS = [(False,), (True,)] class FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps): super().__init__()", "num_hidden_layers=2, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0,", "from typing import Optional import numpy as np import pytorch_lightning", "= FakeSeq2RewardNetwork() state_normalization_parameters = { i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0", "41.0]]) else: expected_q_values = torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values == q_values)", "logging import os import random import unittest from typing import", "import parameterized from reagent.core import types as rlt from reagent.core.parameters", "state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN,", "torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, ) ) total_mse_loss = 0", "seq_len, action_dim, ) else: model_with_preprocessor = Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len,", "= torch.tensor([[4.0, 41.0]]) else: expected_q_values = torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values", "= Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None,", "reduce the mse loss to values close to zero. assert", "reward = torch.zeros_like(all_step_reward) reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN -", "( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) = eval_seq2reward_model(eval_data, seq2reward_trainer) assert", "action has type `FeatureData`; used as `Tensor`. action = action.float_features.transpose(0,", "[1, 1, 0], [1, 1, 1], ] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION,", "( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS", "states and actions in previous steps, so the trained network", ") eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0]) return training_data, eval_data", "class FakeSeq2RewardNetwork(nn.Module): def forward( self, state: rlt.FeatureData, action: rlt.FeatureData, valid_reward_len:", "ascending=True) if len(mdp) != SEQ_LEN: continue all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state", "state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN - j) :] batch_state[:,", "SEQ_LEN time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:,", "== q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1 NUM_ACTION = 6", "1 batch_seq_count = 0 batch_reward = torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action)", "class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self,", "j) :] batch_action[:, batch_seq_count] = action batch_seq_count += 1 if", "in enumerate(eval_data): ( mse_loss, q_values, action_distribution, _, ) = compress_model_trainer.validation_step(batch,", "mse_loss, _, q_values, action_distribution, ) = seq2reward_trainer.validation_step(batch, idx) total_mse_loss +=", "the expected q values are respectively: # [0, 1], [1,", "= DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0]) return training_data, eval_data def train_seq2reward_model(training_data,", "pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_model(eval_data,", "eval_mse_loss, eval_q_values, eval_action_distribution, ) def train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1, num_epochs=5", "torch.nn as nn from parameterized import parameterized from reagent.core import", "[0, 1], [1, 11], [11, 111] # Weighting the expected", "all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE,", "== num_batches num_training_batches = int(training_data_ratio * num_batches) training_data = DataLoader(", "if plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model,", "= torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ), 0, 1,", "action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=learning_rate, multi_steps=SEQ_LEN, action_names=[\"0\",", "SEQ_LEN == 6 and NUM_ACTION == 2 compress_net_builder = FullyConnected(sizes=[8,", "ACTION_DIM result = gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape == (SEQ_LEN, NUM_ACTION", "as np import pytorch_lightning as pl import torch import torch.nn", "[11, 111] # Weighting the expected q values by predicted", "Given the current state, predict the probability of experiencing next", "gamma=1.0, view_q_value=True, ) trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED)", "{q_values}\") assert torch.all(expected_q_values == q_values) def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1", "compress_eval_mse_loss < 1e-5 assert torch.all(eval_q_values - compress_eval_q_values < 1e-5) assert", "action_distribution, ) = seq2reward_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values +=", "eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False)", "eval_action_distribution class TestSeq2Reward(unittest.TestCase): def test_seq2reward_with_preprocessor_plan_short_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def", "= eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss < 1e-5 assert torch.all(eval_q_values -", "= FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE, STATE_DIM) q_values = get_Q(seq2reward_network, state,", "compress_model_network = compress_net_builder.build_value_network( state_normalization_data, output_dim=NUM_ACTION, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0,", "1, 0], [1, 1, 1], ] ) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome)", "batch_size, _ = state.shape return torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def", "for i in range(1, state_dim) } state_preprocessor = Preprocessor(state_normalization_parameters, False)", "continue all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values()) for s in", "- 5) < 1.0 if filter_short_sequence: assert eval_mse_loss < 0.1", "to zero. assert eval_mse_loss < 10 compress_model_trainer = train_seq2reward_compress_model( training_data,", "1, ) num_batches = int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH) batches", "import os import random import unittest from typing import Optional", "import numpy as np import pytorch_lightning as pl import torch", "num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert SEQ_LEN", "initial_state, seq2reward_trainer.all_permut, ) ) total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION)", "[5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3 NUM_ACTION", "train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) = eval_seq2reward_model(eval_data, seq2reward_trainer)", "eval_mse_loss, eval_q_values, eval_action_distribution, ) = eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item() -", "eval_action_distribution, ) = eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item() - 10) <", "eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss = 0", "0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH,", "in range(num_batches)] batch_count, batch_seq_count = 0, 0 batch_reward = torch.zeros(SEQ_LEN,", "Preprocessor(state_normalization_parameters, False) if plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor(", "os import random import unittest from typing import Optional import", "in previous steps, so the trained network is not able", "compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss <", "eval_seq2reward_compress_model(eval_data, compress_model_trainer) assert compress_eval_mse_loss < 1e-5 assert torch.all(eval_q_values - compress_eval_q_values", "next(iter(eval_data)).action.float_features.shape total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION)", "compress_eval_q_values < 1e-5) assert torch.all( eval_action_distribution - compress_eval_action_distribution < 1e-5", "Facebook, Inc. and its affiliates. All rights reserved. import logging", "torch.zeros_like(all_step_state) state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN - j) :]", "0 STRING_GAME_TESTS = [(False,), (True,)] class FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps):", "mse_loss total_q_values += torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution) N_eval = len(eval_data)", "eval_action_distribution = total_action_distribution / N_eval return eval_mse_loss, eval_q_values, eval_action_distribution class", "torch.all(eval_q_values - compress_eval_q_values < 1e-5) assert torch.all( eval_action_distribution - compress_eval_action_distribution", "torch.ones(SEQ_LEN, batch_size) valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal", "eval_action_distribution = total_action_distribution / N_eval return ( initial_state_q_values, eval_mse_loss, eval_q_values,", "PERM_NUM, ACTION_DIM result = gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape == (SEQ_LEN,", "= 0 batch_reward = torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action) batch_state =", "= torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data): ( mse_loss, _,", "# Same short sequences may have different total rewards due", "rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] = None, ): \"\"\" Mimic I/O of", "N_eval eval_action_distribution = total_action_distribution / N_eval return eval_mse_loss, eval_q_values, eval_action_distribution", "# probabilities [0.33, 0.33, 0.33], we have [4, 41] expected_q_values", "== mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp) != SEQ_LEN: continue all_step_reward =", "0, 0 batch_reward = torch.zeros(SEQ_LEN, batch_size) batch_action = torch.zeros(SEQ_LEN, batch_size,", "- j) :] batch_action[:, batch_seq_count] = action batch_seq_count += 1", "- j] = all_step_state[-(SEQ_LEN - j) :] batch_state[:, batch_seq_count] =", "batches[:num_training_batches], collate_fn=lambda x: x[0] ) eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x:", "range(1, state_dim) } state_preprocessor = Preprocessor(state_normalization_parameters, False) if plan_short_sequence: step_prediction_model", "as pl import torch import torch.nn as nn from parameterized", "action_num = 2, acc_reward = tensor( [[ 0.], [ 1.],", "assert abs(initial_state_q_values[1].item() - 5) < 1.0 if filter_short_sequence: assert eval_mse_loss", "training_data, eval_data def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION =", "so the trained network is not able # to reduce", "output_dim=NUM_ACTION, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate,", "expected_outcome): # expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM result = gen_permutations(SEQ_LEN,", "NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = SEQ_LEN * torch.ones(batch_size,", "look_ahead_steps) For the test purpose, it outputs fixed fake numbers", "torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values == q_values) def test_get_Q(self): NUM_ACTION =", "Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\",", "def forward( self, state: rlt.FeatureData, action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] =", "training_data, eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer = train_seq2reward_model(training_data) (", "NUM_ACTION) for mdp_id in sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\",", "np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data, eval_data = create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer", "SEQ_LEN, PERM_NUM, ACTION_DIM result = gen_permutations(SEQ_LEN, NUM_ACTION) assert result.shape ==", "time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:,", "torch.Tensor( [0 if i == SEQ_LEN - 1 else 1", "fake, not used anyway not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None, ) batch_count", "pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def", "= 2 STATE_DIM = 4 all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network", "0, 1) else: batch_size = NUM_MDP_PER_BATCH * SEQ_LEN time_diff =", "torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ), 0, 1, )", "for _ in range(num_batches)] batch_count, batch_seq_count = 0, 0 batch_reward", ":] batch_state[:, batch_seq_count] = state action = torch.zeros_like(all_step_action) action[: SEQ_LEN", "batch_seq_count += 1 if batch_seq_count == batch_size: batches[batch_count] = rlt.MemoryNetworkInput(", "batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert SEQ_LEN == 6", "3 steps ahead, # the expected q values are respectively:", "def __init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps = look_ahead_steps def forward(self, state:", "if filter_short_sequence: batch_size = NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN, batch_size) valid_step", "in mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for a in", "reagent.training.utils import gen_permutations from reagent.training.world_model.compress_model_trainer import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import", "eval_mse_loss < 10 compress_model_trainer = train_seq2reward_compress_model( training_data, seq2reward_trainer.seq2reward_network ) (", "1], [1, 11], [11, 111] # Weighting the expected q", "and NUM_ACTION == 2 compress_net_builder = FullyConnected(sizes=[8, 8]) state_normalization_data =", "when seq_len = 3, batch_size = 1, action_num = 2,", "batch_seq_count = 0, 0 batch_reward = torch.zeros(SEQ_LEN, batch_size) batch_action =", "nn from parameterized import parameterized from reagent.core import types as", "look_ahead_steps def forward(self, state: torch.Tensor): \"\"\" Given the current state,", "training_data) return trainer def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION =", "batch_size) batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state = torch.zeros(SEQ_LEN, batch_size,", "df = create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", ) if", "action_distribution, _, ) = compress_model_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values", "Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor, seq_len, action_dim, ) else: model_with_preprocessor =", "def test_gen_permutations_seq_len_1_action_6(self): SEQ_LEN = 1 NUM_ACTION = 6 expected_outcome =", "seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, ) ) total_mse_loss = 0 total_q_values =", "torch import torch.nn as nn from parameterized import parameterized from", "= None, ): \"\"\" Mimic I/O of Seq2RewardNetwork but return", "import gen_permutations from reagent.training.world_model.compress_model_trainer import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import get_Q,", "= train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) = eval_seq2reward_model(eval_data,", "= torch.ones(SEQ_LEN, batch_size) valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None]", "6 and NUM_ACTION == 2 seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION,", "from reagent.preprocessing.preprocessor import Preprocessor from reagent.training.utils import gen_permutations from reagent.training.world_model.compress_model_trainer", "seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2, ) trainer_param =", "reward state = torch.zeros_like(all_step_state) state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN", "all_permut) expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]]) logger.info(f\"q_values: {q_values}\") assert", "* num_batches) training_data = DataLoader( batches[:num_training_batches], collate_fn=lambda x: x[0] )", "[100.], [101.], [110.], [111.]] ) Input action shape: seq_len, batch_size,", "batch_size = NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = SEQ_LEN", "import Optional import numpy as np import pytorch_lightning as pl", "def forward(self, state: torch.Tensor): \"\"\" Given the current state, predict", "# Weighting the expected q values by predicted step #", "python3 # Copyright (c) Facebook, Inc. and its affiliates. All", "torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data): ( mse_loss, q_values, action_distribution,", "step_prediction_model, state_preprocessor, seq_len, action_dim, ) else: model_with_preprocessor = Seq2RewardWithPreprocessor( model,", "SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None] not_terminal = torch.Tensor( [0 if", "( NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters, ) from reagent.gym.envs import Gym", "q values are respectively: # [0, 1], [1, 11], [11,", "eval_data def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5): SEQ_LEN, batch_size, NUM_ACTION = next(", "for idx, batch in enumerate(eval_data): ( mse_loss, q_values, action_distribution, _,", "it outputs fixed fake numbers \"\"\" batch_size, _ = state.shape", "self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome): # expected", "0, 1], [1, 1, 0], [1, 1, 1], ] )", "deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size,", "shape: seq_len, batch_size, num_action Output acc_reward shape: batch_size, 1 \"\"\"", "\"\"\" batch_size, _ = state.shape return torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module):", "!= SEQ_LEN: continue all_step_reward = torch.Tensor(list(mdp[\"reward\"])) all_step_state = torch.Tensor([list(s.values()) for", "= torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data): ( mse_loss, q_values,", "Seq2RewardNetwork from reagent.net_builder.value.fully_connected import FullyConnected from reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor,", "Preprocessor from reagent.training.utils import gen_permutations from reagent.training.world_model.compress_model_trainer import CompressModelTrainer from", "from reagent.training.utils import gen_permutations from reagent.training.world_model.compress_model_trainer import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer", "= action batch_seq_count += 1 if batch_seq_count == batch_size: batches[batch_count]", "- j) :] batch_reward[:, batch_seq_count] = reward state = torch.zeros_like(all_step_state)", "eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0,", "torch.zeros(NUM_ACTION) for idx, batch in enumerate(eval_data): ( mse_loss, _, q_values,", "self._test_seq2reward_with_preprocessor(plan_short_sequence=True) def test_seq2reward_with_preprocessor_plan_full_sequence(self): self._test_seq2reward_with_preprocessor(plan_short_sequence=False) def _test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim = 4", "= gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork() state = torch.zeros(BATCH_SIZE, STATE_DIM)", "0, 0], [0, 0, 1], [0, 1, 0], [0, 1,", "torch.Tensor([list(s.values()) for s in mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a)", "rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), # fake, not", "= create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer = train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss,", "= action.float_features.transpose(0, 1) action_indices = torch.argmax(action, dim=2).tolist() acc_reward = torch.tensor(", "network is not able # to reduce the mse loss", "num_epochs=5 ): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape assert", "state, all_permut) expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]]) logger.info(f\"q_values: {q_values}\")", "model_with_preprocessor = Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len, action_dim, ) input_prototype =", "model, state_preprocessor, seq_len, action_dim, ) input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES,", "def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape total_mse_loss =", "create_string_game_data( filter_short_sequence=filter_short_sequence ) seq2reward_trainer = train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss, eval_q_values,", "\"\"\" # pyre-fixme[9]: action has type `FeatureData`; used as `Tensor`.", "we have [4, 41] expected_q_values = torch.tensor([[4.0, 41.0]]) else: expected_q_values", "NUM_ACTION = 2 NUM_MDP_PER_BATCH = 5 env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN)", "self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN = 3 NUM_ACTION =", "= 1.0 for j in range(SEQ_LEN): if filter_short_sequence and j", "valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN,", "pl import torch import torch.nn as nn from parameterized import", "import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types import", "batch_seq_count = 0 batch_reward = torch.zeros_like(batch_reward) batch_action = torch.zeros_like(batch_action) batch_state", "from reagent.net_builder.value.fully_connected import FullyConnected from reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor,", "get_Q, Seq2RewardTrainer from torch.utils.data import DataLoader logger = logging.getLogger(__name__) SEED", "iter(training_data) ).action.float_features.shape assert SEQ_LEN == 6 and NUM_ACTION == 2", "2 MULTI_STEPS = 3 BATCH_SIZE = 2 STATE_DIM = 4", "FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import Preprocessor", "multi_steps=None, ds=\"2020-10-10\", ) if filter_short_sequence: batch_size = NUM_MDP_PER_BATCH time_diff =", "[3], [4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def test_gen_permutations_seq_len_3_num_action_2(self): SEQ_LEN =", "== SEQ_LEN - 1 else 1 for i in range(SEQ_LEN)]", "import torch.nn as nn from parameterized import parameterized from reagent.core", "return trainer def eval_seq2reward_compress_model(eval_data, compress_model_trainer): SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape", "view_q_value=True, ) trainer = CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED)", "1, 0], [0, 1, 1], [1, 0, 0], [1, 0,", "action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, ) trainer = CompressModelTrainer( compress_model_network=compress_model_network,", ") trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED) pl_trainer =", "acc_reward = torch.tensor( list(map(lambda x: float(\"\".join(map(str, x))), action_indices)) ).reshape(-1, 1)", "FullyConnected from reagent.prediction.predictor_wrapper import ( Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, )", "batch_size = 1, action_num = 2, acc_reward = tensor( [[", "x))), action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data(", "forward( self, state: rlt.FeatureData, action: rlt.FeatureData, valid_reward_len: Optional[torch.Tensor] = None,", "logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ):", "NUM_MDP_PER_BATCH = 5 env = Gym(env_name=\"StringGame-v0\", set_max_steps=SEQ_LEN) df = create_df_from_replay_buffer(", "j] = all_step_reward[-(SEQ_LEN - j) :] batch_reward[:, batch_seq_count] = reward", "total_q_values = torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION) for idx, batch in", "types as rlt from reagent.core.parameters import ( NormalizationData, NormalizationParameters, ProblemDomain,", ").action.float_features.shape assert SEQ_LEN == 6 and NUM_ACTION == 2 seq2reward_network", "steps (1 <=n <= look_ahead_steps) For the test purpose, it", "initial_state_q_values = torch.squeeze( get_Q( seq2reward_trainer.seq2reward_network, initial_state, seq2reward_trainer.all_permut, ) ) total_mse_loss", "+= torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution) N_eval = len(eval_data) eval_mse_loss =", "return ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution, ) def train_seq2reward_compress_model( training_data,", "-1).tile(NUM_MDP_PER_BATCH)[:, None] not_terminal = torch.transpose( torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1", "total_q_values += torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution) N_eval = len(eval_data) eval_mse_loss", "0: break reward = torch.zeros_like(all_step_reward) reward[: SEQ_LEN - j] =", "NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), } ) compress_model_network =", "FakeSeq2RewardNetwork() state_normalization_parameters = { i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 )", "[0 if i == SEQ_LEN - 1 else 1 for", "sorted(set(df.mdp_id)): mdp = df[df[\"mdp_id\"] == mdp_id].sort_values(\"sequence_number\", ascending=True) if len(mdp) !=", "params=trainer_param ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return", "params=trainer_param, ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return", "= reward state = torch.zeros_like(all_step_state) state[: SEQ_LEN - j] =", "SEQ_LEN - 1 else 1 for i in range(SEQ_LEN)] )", "view_q_value=True, ) trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network, params=trainer_param ) pl.seed_everything(SEED) pl_trainer", "eval_q_values = total_q_values / N_eval eval_action_distribution = total_action_distribution / N_eval", "test on sandcastle.\") def test_seq2reward_on_string_game_v0(self, filter_short_sequence): np.random.seed(SEED) random.seed(SEED) torch.manual_seed(SEED) training_data,", "= 6 NUM_ACTION = 2 NUM_MDP_PER_BATCH = 5 env =", "= seq2reward_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values) total_action_distribution", "the missing # states and actions in previous steps, so", "logger = logging.getLogger(__name__) SEED = 0 STRING_GAME_TESTS = [(False,), (True,)]", "\"\"\" Mimic I/O of Seq2RewardNetwork but return fake reward Reward", "= 4 all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network = FakeSeq2RewardNetwork() state", "NUM_ACTION = 6 expected_outcome = torch.tensor([[0], [1], [2], [3], [4],", "action batch_seq_count += 1 if batch_seq_count == batch_size: batches[batch_count] =", "reagent.core import types as rlt from reagent.core.parameters import ( NormalizationData,", "expected_q_values = torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values == q_values) def test_get_Q(self):", "torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def forward( self, state: rlt.FeatureData, action:", "respectively: # [0, 1], [1, 11], [11, 111] # Weighting", "return torch.ones(batch_size, self.look_ahead_steps).float() class FakeSeq2RewardNetwork(nn.Module): def forward( self, state: rlt.FeatureData,", "in range(SEQ_LEN)] ) not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1) else:", "[0, 1, 0], [0, 1, 1], [1, 0, 0], [1,", "seq_len, action_dim, ) input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(), id_list_features=FAKE_STATE_ID_LIST_FEATURES, id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, )", "6 NUM_ACTION = 2 NUM_MDP_PER_BATCH = 5 env = Gym(env_name=\"StringGame-v0\",", "= torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state) assert batch_count == num_batches num_training_batches", "i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 ) for i in range(1,", "Seq2RewardNetwork but return fake reward Reward is the concatenation of", "plan_short_sequence: # When planning for 1, 2, and 3 steps", "[ 11.], [100.], [101.], [110.], [111.]] ) Input action shape:", "= torch.tensor([[11.0, 111.0]]) assert torch.all(expected_q_values == q_values) def test_get_Q(self): NUM_ACTION", "= all_step_state[-(SEQ_LEN - j) :] batch_state[:, batch_seq_count] = state action", "SEQ_LEN - j] = all_step_state[-(SEQ_LEN - j) :] batch_state[:, batch_seq_count]", "{ i: NormalizationParameters( feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0 ) for i in", "not_terminal=not_terminal, time_diff=time_diff, valid_step=valid_step, step=None, ) batch_count += 1 batch_seq_count =", "idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution)", "torch.tensor(q_values) total_action_distribution += torch.tensor(action_distribution) N_eval = len(eval_data) eval_mse_loss = total_mse_loss", "= FullyConnected(sizes=[8, 8]) state_normalization_data = NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS), 1:", "= 4 action_dim = 2 seq_len = 3 model =", "For the test purpose, it outputs fixed fake numbers \"\"\"", "/ NUM_MDP_PER_BATCH) batches = [None for _ in range(num_batches)] batch_count,", "torch.tensor([[0], [1], [2], [3], [4], [5]]) self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome) def", "total_action_distribution / N_eval return eval_mse_loss, eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase): def", "[0, 0, 1], [0, 1, 0], [0, 1, 1], [1,", "id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES, ) q_values = model_with_preprocessor(input_prototype) if plan_short_sequence: # When planning", "# Copyright (c) Facebook, Inc. and its affiliates. All rights", "expected q values are respectively: # [0, 1], [1, 11],", "able # to reduce the mse loss to values close", "= all_step_action[-(SEQ_LEN - j) :] batch_action[:, batch_seq_count] = action batch_seq_count", "seq2reward_network, learning_rate=0.1, num_epochs=5 ): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data)", "batch_size: batches[batch_count] = rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ),", "= 2 MULTI_STEPS = 3 BATCH_SIZE = 2 STATE_DIM =", "# to reduce the mse loss to values close to", "actions in previous steps, so the trained network is not", "NUM_ACTION == 2 seq2reward_network = Seq2RewardNetwork( state_dim=NUM_ACTION, action_dim=NUM_ACTION, num_hiddens=64, num_hidden_layers=2,", "batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION)", "assert torch.all(expected_q_values == q_values) def test_get_Q(self): NUM_ACTION = 2 MULTI_STEPS", "= total_action_distribution / N_eval return eval_mse_loss, eval_q_values, eval_action_distribution class TestSeq2Reward(unittest.TestCase):", "= Seq2RewardPlanShortSeqWithPreprocessor( model, step_prediction_model, state_preprocessor, seq_len, action_dim, ) else: model_with_preprocessor", "all_step_state[-(SEQ_LEN - j) :] batch_state[:, batch_seq_count] = state action =", "step=None, ) batch_count += 1 batch_seq_count = 0 batch_reward =", "eval_q_values, eval_action_distribution, ) = eval_seq2reward_model(eval_data, seq2reward_trainer) assert abs(initial_state_q_values[0].item() - 10)", "state: torch.Tensor): \"\"\" Given the current state, predict the probability", "assert torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping long", "j] = all_step_action[-(SEQ_LEN - j) :] batch_action[:, batch_seq_count] = action", "FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES, ) from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import", "NUM_ACTION = 2 MULTI_STEPS = 3 BATCH_SIZE = 2 STATE_DIM", "steps, so the trained network is not able # to", "the test purpose, it outputs fixed fake numbers \"\"\" batch_size,", "1) action_indices = torch.argmax(action, dim=2).tolist() acc_reward = torch.tensor( list(map(lambda x:", "< 1.0 assert abs(initial_state_q_values[1].item() - 5) < 1.0 if filter_short_sequence:", "training_data, seq2reward_trainer.seq2reward_network ) ( compress_eval_mse_loss, compress_eval_q_values, compress_eval_action_distribution, ) = eval_seq2reward_compress_model(eval_data,", "q_values = model_with_preprocessor(input_prototype) if plan_short_sequence: # When planning for 1,", "= torch.tensor( list(map(lambda x: float(\"\".join(map(str, x))), action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward:", "action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), # fake, not used anyway", "and j > 0: break reward = torch.zeros_like(all_step_reward) reward[: SEQ_LEN", "), 0, 1, ) num_batches = int(dataset_size / SEQ_LEN /", "/ N_eval eval_action_distribution = total_action_distribution / N_eval return eval_mse_loss, eval_q_values,", "expected q values by predicted step # probabilities [0.33, 0.33,", "= torch.argmax(result.transpose(0, 1), dim=-1) assert torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\"", "filter_short_sequence: batch_size = NUM_MDP_PER_BATCH time_diff = torch.ones(SEQ_LEN, batch_size) valid_step =", "time_diff = torch.ones(SEQ_LEN, batch_size) valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None]", "SEQ_LEN), diagonal=-1).tile( NUM_MDP_PER_BATCH, 1 ), 0, 1, ) num_batches =", "0], [1, 0, 1], [1, 1, 0], [1, 1, 1],", "import get_Q, Seq2RewardTrainer from torch.utils.data import DataLoader logger = logging.getLogger(__name__)", "NUM_ACTION) assert result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION) outcome", ").reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000, training_data_ratio=0.9,", "import CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer from torch.utils.data import", "= 6 expected_outcome = torch.tensor([[0], [1], [2], [3], [4], [5]])", "= next(iter(eval_data)).action.float_features.shape initial_state = torch.Tensor([[0, 0]]) initial_state_q_values = torch.squeeze( get_Q(", "for s in mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN), [int(a) for", "FakeStepPredictionNetwork(nn.Module): def __init__(self, look_ahead_steps): super().__init__() self.look_ahead_steps = look_ahead_steps def forward(self,", "compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, ) trainer = CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param,", "not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1) else: batch_size = NUM_MDP_PER_BATCH", "= Preprocessor(state_normalization_parameters, False) if plan_short_sequence: step_prediction_model = FakeStepPredictionNetwork(seq_len) model_with_preprocessor =", "pl.Trainer(max_epochs=num_epochs, deterministic=True) pl_trainer.fit(trainer, training_data) return trainer def eval_seq2reward_model(eval_data, seq2reward_trainer): SEQ_LEN,", "eval_mse_loss = total_mse_loss / N_eval eval_q_values = total_q_values / N_eval", "learning_rate=0.1, num_epochs=5 ): SEQ_LEN, batch_size, NUM_ACTION = next( iter(training_data) ).action.float_features.shape", "torch.all(outcome == expected_outcome) @parameterized.expand(STRING_GAME_TESTS) @unittest.skipIf(\"SANDCASTLE\" in os.environ, \"Skipping long test", "as `Tensor`. action = action.float_features.transpose(0, 1) action_indices = torch.argmax(action, dim=2).tolist()", "multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], compress_model_learning_rate=learning_rate, gamma=1.0, view_q_value=True, ) trainer = CompressModelTrainer(", "<reponame>dmitryvinn/ReAgent #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its", "Same short sequences may have different total rewards due to", "is the concatenation of action indices, independent of state. For", "int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH) batches = [None for _", "state_normalization_data, output_dim=NUM_ACTION, ) trainer_param = Seq2RewardTrainerParameters( learning_rate=0.0, multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"],", "BATCH_SIZE = 2 STATE_DIM = 4 all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION)", "indices, independent of state. For example, when seq_len = 3,", "total_mse_loss / N_eval eval_q_values = total_q_values / N_eval eval_action_distribution =", "eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0]) return training_data, eval_data def", "compress_model_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values += torch.tensor(q_values) total_action_distribution +=", "multi_steps=SEQ_LEN, action_names=[\"0\", \"1\"], gamma=1.0, view_q_value=True, ) trainer = Seq2RewardTrainer( seq2reward_network=seq2reward_network,", "example, when seq_len = 3, batch_size = 1, action_num =", "j] = all_step_state[-(SEQ_LEN - j) :] batch_state[:, batch_seq_count] = state", "= torch.zeros(BATCH_SIZE, STATE_DIM) q_values = get_Q(seq2reward_network, state, all_permut) expected_q_values =", "0.], [ 1.], [ 10.], [ 11.], [100.], [101.], [110.],", "x[0] ) eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0]) return training_data,", "DO_NOT_PREPROCESS from reagent.preprocessing.preprocessor import Preprocessor from reagent.training.utils import gen_permutations from", "2 STATE_DIM = 4 all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION) seq2reward_network =", "Seq2RewardWithPreprocessor( model, state_preprocessor, seq_len, action_dim, ) input_prototype = rlt.ServingFeatureData( float_features_with_presence=state_preprocessor.input_prototype(),", "Seq2RewardTrainerParameters, ) from reagent.gym.envs import Gym from reagent.gym.utils import create_df_from_replay_buffer", "ahead, # the expected q values are respectively: # [0,", "torch.zeros(SEQ_LEN, batch_size) batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION) batch_state = torch.zeros(SEQ_LEN,", "0.33], we have [4, 41] expected_q_values = torch.tensor([[4.0, 41.0]]) else:", "next n steps (1 <=n <= look_ahead_steps) For the test", "parameterized import parameterized from reagent.core import types as rlt from", "CompressModelTrainer from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer from torch.utils.data import DataLoader", "reagent.gym.utils import create_df_from_replay_buffer from reagent.models.seq2reward_model import Seq2RewardNetwork from reagent.net_builder.value.fully_connected import", "else 1 for i in range(SEQ_LEN)] ) not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH,", "for j in range(SEQ_LEN): if filter_short_sequence and j > 0:", ") ) total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution =", ") total_mse_loss = 0 total_q_values = torch.zeros(NUM_ACTION) total_action_distribution = torch.zeros(NUM_ACTION)", "{acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN", "filter_short_sequence and j > 0: break reward = torch.zeros_like(all_step_reward) reward[:", "DataLoader( batches[:num_training_batches], collate_fn=lambda x: x[0] ) eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda", "def train_seq2reward_compress_model( training_data, seq2reward_network, learning_rate=0.1, num_epochs=5 ): SEQ_LEN, batch_size, NUM_ACTION", "CompressModelTrainer( compress_model_network=compress_model_network, seq2reward_network=seq2reward_network, params=trainer_param, ) pl.seed_everything(SEED) pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True)", "- j) :] batch_state[:, batch_seq_count] = state action = torch.zeros_like(all_step_action)", "11], [11, 111] # Weighting the expected q values by", "`Tensor`. action = action.float_features.transpose(0, 1) action_indices = torch.argmax(action, dim=2).tolist() acc_reward", "assert abs(initial_state_q_values[0].item() - 10) < 1.0 assert abs(initial_state_q_values[1].item() - 5)", "filter_short_sequence=filter_short_sequence ) seq2reward_trainer = train_seq2reward_model(training_data) ( initial_state_q_values, eval_mse_loss, eval_q_values, eval_action_distribution,", "[ 1.], [ 10.], [ 11.], [100.], [101.], [110.], [111.]]", "logging.getLogger(__name__) SEED = 0 STRING_GAME_TESTS = [(False,), (True,)] class FakeStepPredictionNetwork(nn.Module):", "import Gym from reagent.gym.utils import create_df_from_replay_buffer from reagent.models.seq2reward_model import Seq2RewardNetwork", "total_q_values / N_eval eval_action_distribution = total_action_distribution / N_eval return (", "= torch.zeros_like(batch_state) assert batch_count == num_batches num_training_batches = int(training_data_ratio *", "_test_seq2reward_with_preprocessor(self, plan_short_sequence): state_dim = 4 action_dim = 2 seq_len =", "= torch.Tensor([list(s.values()) for s in mdp[\"state_features\"]]) all_step_action = torch.zeros_like(all_step_state) all_step_action[torch.arange(SEQ_LEN),", "compress_net_builder = FullyConnected(sizes=[8, 8]) state_normalization_data = NormalizationData( dense_normalization_parameters={ 0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS),", "in enumerate(eval_data): ( mse_loss, _, q_values, action_distribution, ) = seq2reward_trainer.validation_step(batch,", "= rlt.MemoryNetworkInput( reward=batch_reward, action=rlt.FeatureData(float_features=batch_action), state=rlt.FeatureData(float_features=batch_state), next_state=rlt.FeatureData( float_features=torch.zeros_like(batch_state) ), # fake,", "j in range(SEQ_LEN): if filter_short_sequence and j > 0: break", "- 10) < 1.0 assert abs(initial_state_q_values[1].item() - 5) < 1.0", "i in range(SEQ_LEN)] ) not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1)", "idx, batch in enumerate(eval_data): ( mse_loss, q_values, action_distribution, _, )", "_, ) = compress_model_trainer.validation_step(batch, idx) total_mse_loss += mse_loss total_q_values +=", "SEQ_LEN = 3 NUM_ACTION = 2 expected_outcome = torch.tensor( [", "Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.", "[int(a) for a in mdp[\"action\"]]] = 1.0 for j in", "1], [0, 1, 0], [0, 1, 1], [1, 0, 0],", "torch.zeros_like(batch_action) batch_state = torch.zeros_like(batch_state) assert batch_count == num_batches num_training_batches =", "action_indices)) ).reshape(-1, 1) logger.info(f\"acc_reward: {acc_reward}\") return rlt.Seq2RewardOutput(acc_reward=acc_reward) def create_string_game_data( dataset_size=10000,", "dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False ): SEQ_LEN = 6 NUM_ACTION = 2", "create_df_from_replay_buffer( env=env, problem_domain=ProblemDomain.DISCRETE_ACTION, desired_size=dataset_size, multi_steps=None, ds=\"2020-10-10\", ) if filter_short_sequence: batch_size", "= 3 model = FakeSeq2RewardNetwork() state_normalization_parameters = { i: NormalizationParameters(", "fake numbers \"\"\" batch_size, _ = state.shape return torch.ones(batch_size, self.look_ahead_steps).float()", "tensor( [[ 0.], [ 1.], [ 10.], [ 11.], [100.]," ]
[ "= Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0) m.c123 = Constraint(expr=", "m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413", "+ 0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619)", "1) m.c1178 = Constraint(expr= m.b723 + m.b724 <= 1) m.c1179", "<= 0) m.c554 = Constraint(expr= m.x449 + 9*m.b644 <= 9)", "0) m.c112 = Constraint(expr= m.x241 == 0) m.c113 = Constraint(expr=", "<= 0) m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0)", "0.994083415506506) m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506) m.c863", "6*m.b700 + m.x790 == 0) m.c938 = Constraint(expr= 7*m.b701 +", "- m.x247 == 0) m.c146 = Constraint(expr= m.x41 - m.x266", "Constraint(expr= m.b609 - m.b624 >= 0) m.c1426 = Constraint(expr= m.b610", "m.b628 - m.b655 >= 0) m.c1457 = Constraint(expr= - m.b653", "m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0) m.c627 =", "m.c929 = Constraint(expr= 6*m.b692 + m.x782 == 0) m.c930 =", "= Constraint(expr= m.b608 - m.b698 <= 0) m.c1296 = Constraint(expr=", "Constraint(expr= m.x147 - m.x483 - m.x486 == 0) m.c652 =", "+ 0.999* m.b672) <= 0) m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673)", "m.c241 = Constraint(expr= m.x52 - m.x292 - m.x295 == 0)", "1) m.c1248 = Constraint(expr= m.b758 + m.b760 <= 1) m.c1249", "- m.x408 == 0) m.c595 = Constraint(expr= m.x106 - m.x406", "- m.x444 == 0) m.c517 = Constraint(expr= m.x124 - m.x442", "3.04984759446376*m.b650 <= 3.04984759446376) m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <=", "+ 0.690184503917672*m.b678 <= 0.690184503917672) m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679", "m.c1248 = Constraint(expr= m.b758 + m.b760 <= 1) m.c1249 =", "== 0) m.c990 = Constraint(expr= 8*m.b753 + m.x843 == 0)", "m.c1083 = Constraint(expr= m.b665 - m.b667 <= 0) m.c1084 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 =", "m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0) m.c741 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692 =", "== 0) m.c10 = Constraint(expr= m.x19 - m.x22 - m.x25", "+ 0.999*m.b598) <= 0) m.c56 = Constraint(expr= m.x215 == 0)", "m.x343 == 0) m.c266 = Constraint(expr= m.x41 - m.x269 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x366 == 0) m.c445 = Constraint(expr= m.x82 - m.x364", "m.b725 + m.b727 <= 1) m.c1186 = Constraint(expr= m.b726 +", "m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999* m.b667) <= 0) m.c722 =", "m.x450 == 0) m.c544 = Constraint(expr= m.x127 - m.x448 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 =", "Constraint(expr= m.b614 - m.b615 <= 0) m.c1032 = Constraint(expr= m.b614", "= Constraint(expr= 6*m.b691 + m.x781 == 0) m.c929 = Constraint(expr=", "m.b749 + m.b750 <= 1) m.c1232 = Constraint(expr= m.b750 +", "m.b648 + m.b651 + m.b654 >= 0) m.c1402 = Constraint(expr=", "m.c1182 = Constraint(expr= m.b725 + m.b727 <= 1) m.c1183 =", "4.45628648004517*m.b600 <= 4.45628648004517) m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <=", "0.666992981045719*m.b673 <= 0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1", "+ 0.940066550763924*m.b659 <= 0.940066550763924) m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660", "m.x73 - m.x91 + m.x94 == 0) m.c26 = Constraint(expr=", "Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186) m.c432 = Constraint(expr= m.x426", "m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999* m.b612) <= 0) m.c205 =", "m.c1124 = Constraint(expr= m.b696 + m.b697 <= 1) m.c1125 =", "0) m.c1002 = Constraint(expr= 3*m.b765 + m.x855 == 0) m.c1003", "0) m.c1004 = Constraint(expr= 4*m.b767 + m.x857 == 0) m.c1005", "<= 0) m.c913 = Constraint(expr= m.x562 - 15*m.b685 <= 0)", "= Constraint(expr= m.b686 + m.b687 <= 1) m.c1104 = Constraint(expr=", "0) m.c381 = Constraint(expr= m.x63 - m.x318 - m.x324 ==", "+ m.b766 <= 1) m.c1264 = Constraint(expr= m.b765 + m.b766", "== 0) m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c855 = Constraint(expr= m.x204 - m.x579 - m.x582", "m.c1271 = Constraint(expr= m.b770 + m.b771 <= 1) m.c1272 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x397 + 9*m.b646 <= 9) m.c551 = Constraint(expr=", "<= 1) m.c1279 = Constraint(expr= m.b773 + m.b774 <= 1)", "6*m.b769 - 2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773 -", "m.x86 - m.x374 - m.x377 == 0) m.c384 = Constraint(expr=", "Constraint(expr= m.x486 == 0) m.c646 = Constraint(expr= m.x487 == 0)", "<= 13.5) m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5)", "log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617) <= 0) m.c258", "Constraint(expr= 2*m.b733 + m.x823 == 0) m.c971 = Constraint(expr= 3*m.b734", "m.b617 - m.b707 <= 0) m.c1305 = Constraint(expr= - m.b617", "= Constraint(expr= 5*m.b707 + m.x797 == 0) m.c945 = Constraint(expr=", "+ 1.26558121681553*m.b618 <= 1.26558121681553) m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619", "+ m.b718 <= 1) m.c1169 = Constraint(expr= m.b719 + m.b720", "m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605", "0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638)", "0) m.c960 = Constraint(expr= m.b723 + m.x813 == 0) m.c961", "- 1.83548069293539*m.b611 <= 0) m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612", "m.x414 == 0) m.c616 = Constraint(expr= m.x415 == 0) m.c617", "= Constraint(expr= m.b619 - m.b640 >= 0) m.c1442 = Constraint(expr=", "- m.b646 >= 0) m.c1448 = Constraint(expr= m.b626 - m.b647", ">= 0) m.c1466 = Constraint(expr= m.b653 - m.b656 >= 0)", "+ 0.999*m.b615)))*(0.001 + 0.999* m.b615) <= 0) m.c232 = Constraint(expr=(m.x328/(0.001", "m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034) m.c583 = Constraint(expr= m.x457 +", "- m.x495 - m.x498 == 0) m.c703 = Constraint(expr= m.x163", "- 0.5*m.x255 + m.x279 == 0) m.c166 = Constraint(expr= -", "Constraint(expr= - m.b605 + m.b606 - m.b696 <= 0) m.c1294", "Constraint(expr= - m.b655 + m.b658 + m.b661 >= 0) m.c1460", "3.04984759446376) m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0) m.c579", "0) m.c618 = Constraint(expr= m.x468 == 0) m.c619 = Constraint(expr=", "<= 0) m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672) m.c869 = Constraint(expr= -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5) m.c311 = Constraint(expr= -", "+ 0.999*m.b637)))*(0.001 + 0.999* m.b637) <= 0) m.c437 = Constraint(expr=", "m.x294 == 0) m.c241 = Constraint(expr= m.x52 - m.x292 -", "m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388) m.c249 = Constraint(expr= m.x294 +", "<= 1) m.c1206 = Constraint(expr= m.b737 + m.b739 <= 1)", "m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721", "m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300", "0) m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0) m.c361", "1) m.c1196 = Constraint(expr= m.b732 + m.b733 <= 1) m.c1197", "= Constraint(expr= m.x279 - 15*m.b609 <= 0) m.c199 = Constraint(expr=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782 =", "m.x585 - m.x588 == 0) m.c883 = Constraint(expr= m.x208 -", "m.b671 + m.b672 - m.b762 <= 0) m.c1360 = Constraint(expr=", "== 0) m.c46 = Constraint(expr= m.x160 - m.x163 - m.x166", "= Constraint(expr= - m.b665 + m.b677 >= 0) m.c1464 =", "m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183", "Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x273 == 0) m.c148 = Constraint(expr= m.x43 - m.x268", "m.x84 == 0) m.c22 = Constraint(expr= m.x70 - m.x82 -", "0.842233385663186*m.b634 <= 0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1", "m.c1043 = Constraint(expr= m.b626 - m.b627 <= 0) m.c1044 =", "m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65", "m.x476 - 1.18887736200171*m.b659 <= 0) m.c681 = Constraint(expr= m.x477 -", "= Constraint(expr= - m.b641 + m.b642 - m.b732 <= 0)", "= Constraint(expr= m.x378 + 20*m.b630 <= 20) m.c400 = Constraint(expr=", "1.32154609891348*m.b632 <= 0) m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <=", "+ m.b724 <= 1) m.c1177 = Constraint(expr= m.b722 + m.b723", "= Constraint(expr= m.b770 + m.b772 <= 1) m.c1276 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.78338879230327) m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327) m.c664", "= Constraint(expr= 8*m.b773 + m.x863 == 0) m.c1011 = Constraint(expr=", "- 0.705049913072943*m.b674 <= 0) m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675", "+ m.x352 == 0) m.c314 = Constraint(expr= m.x305 == 0)", "Constraint(expr= m.b651 - m.b652 <= 0) m.c1070 = Constraint(expr= m.b653", "m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize)", "- m.x261 - m.x264 == 0) m.c121 = Constraint(expr= m.x40", "m.x384 + 33.5*m.b639 <= 33.5) m.c496 = Constraint(expr= m.x385 +", "<= 0) m.c1039 = Constraint(expr= m.b621 - m.b622 <= 0)", "= Constraint(expr= m.b771 + m.b772 <= 1) m.c1277 = Constraint(expr=", "- m.b660 >= 0) m.c1471 = Constraint(expr= m.b655 - m.b661", "= Constraint(expr= m.b645 - m.b646 <= 0) m.c1064 = Constraint(expr=", "m.b616) <= 0) m.c233 = Constraint(expr= m.x293 == 0) m.c234", "+ m.b757 <= 1) m.c1243 = Constraint(expr= m.b755 + m.b756", "m.x514 - m.x517 == 0) m.c767 = Constraint(expr= m.x179 -", "0) m.c1452 = Constraint(expr= m.b627 - m.b651 >= 0) m.c1453", "m.x847 == 0) m.c995 = Constraint(expr= 10*m.b758 + m.x848 ==", "+ m.x153 == 0) m.c40 = Constraint(expr= - m.x148 -", "m.c1420 = Constraint(expr= m.b607 - m.b619 >= 0) m.c1421 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 =", "= Constraint(expr= m.b602 - m.b603 <= 0) m.c1020 = Constraint(expr=", "m.x223 + 40*m.b601 <= 40) m.c101 = Constraint(expr= m.x230 -", "<= 1) m.c1141 = Constraint(expr= m.b704 + m.b705 <= 1)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x306 + 15*m.b624 <= 15) m.c331 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 =", "m.x491 == 0) m.c672 = Constraint(expr= m.x492 == 0) m.c673", "m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0) m.c737 =", "Constraint(expr= m.x43 - m.x271 - m.x277 == 0) m.c269 =", "+ m.b744 <= 1) m.c1218 = Constraint(expr= m.b743 + m.b745", "Constraint(expr= 5*m.b747 + m.x837 == 0) m.c985 = Constraint(expr= 2*m.b748", "m.b712 <= 1) m.c1157 = Constraint(expr= m.b713 + m.b714 <=", "0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999* m.b652) <= 0)", "<= 1) m.c1185 = Constraint(expr= m.b725 + m.b727 <= 1)", "0) m.c268 = Constraint(expr= m.x43 - m.x271 - m.x277 ==", "m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652", "Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 +", "x b i s1s s2s sc si # Total cont", "m.x260 - m.x263 == 0) m.c120 = Constraint(expr= m.x39 -", "4.45628648004517) m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517) m.c106", "= Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 =", "= Constraint(expr= m.x76 - m.x97 - m.x100 == 0) m.c29", "+ m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632) <= 0) m.c408 =", "Constraint(expr= m.b654 - m.b660 >= 0) m.c1471 = Constraint(expr= m.b655", "<= 0) m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0)", "0) m.c344 = Constraint(expr= m.x359 == 0) m.c345 = Constraint(expr=", "0) m.c1286 = Constraint(expr= m.b599 - m.b689 <= 0) m.c1287", "= Constraint(expr= m.x419 + 20*m.b629 <= 20) m.c405 = Constraint(expr=", "m.c888 = Constraint(expr= m.x558 + 15*m.b681 <= 15) m.c889 =", "m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686) m.c460 =", "m.c1438 = Constraint(expr= m.b619 - m.b637 >= 0) m.c1439 =", "Constraint(expr= - m.b632 - m.b633 + m.b634 - m.b724 <=", "Constraint(expr= m.x136 - m.x466 - m.x469 == 0) m.c626 =", "0) m.c733 = Constraint(expr= m.x178 - m.x526 - m.x532 ==", "<= 1) m.c1255 = Constraint(expr= m.b761 + m.b762 <= 1)", "<= 9) m.c550 = Constraint(expr= m.x397 + 9*m.b646 <= 9)", "m.x458 - m.x461 == 0) m.c597 = Constraint(expr= m.x132 -", "== 0) m.c45 = Constraint(expr= m.x159 - m.x162 - m.x165", "1) m.c1112 = Constraint(expr= m.b690 + m.b691 <= 1) m.c1113", "= Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0) m.c602 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753", "m.b686 + m.b687 <= 1) m.c1106 = Constraint(expr= m.b687 +", "m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698", "<= 0) m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 +", "<= 0) m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0)", "7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718 -", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828 =", "3.71357206670431*m.b597 <= 0) m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <=", "m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625", "- m.x85 == 0) m.c23 = Constraint(expr= - m.x71 -", "m.b603 - m.b693 <= 0) m.c1291 = Constraint(expr= - m.b602", "m.x141 - m.x144 == 0) m.c37 = Constraint(expr= m.x139 -", "m.c1038 = Constraint(expr= m.b620 - m.b622 <= 0) m.c1039 =", "m.c936 = Constraint(expr= 10*m.b699 + m.x789 == 0) m.c937 =", "m.x393 - m.x396 == 0) m.c541 = Constraint(expr= m.x100 -", "m.x33 - m.x249 - m.x252 == 0) m.c178 = Constraint(expr=", "= Constraint(expr= m.x556 - 15*m.b682 <= 0) m.c887 = Constraint(expr=", "m.b656 + m.b659 >= 0) m.c1458 = Constraint(expr= - m.b654", "m.b645 + m.b646 - m.b736 <= 0) m.c1334 = Constraint(expr=", "0.940066550763924) m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924) m.c775", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688 =", "- m.b684 <= 0) m.c1101 = Constraint(expr= m.b683 - m.b685", "m.b687 <= 1) m.c1104 = Constraint(expr= m.b686 + m.b688 <=", "m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734 -", "1) m.c1110 = Constraint(expr= m.b689 + m.b691 <= 1) m.c1111", "- m.b715 <= 0) m.c1313 = Constraint(expr= m.b626 - m.b716", ">= 0) m.c1404 = Constraint(expr= m.b597 + m.b600 - m.b603", "0.999*m.b650)))*(0.001 + 0.999* m.b650) <= 0) m.c585 = Constraint(expr=(m.x459/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 =", "== 0) m.c21 = Constraint(expr= m.x69 - m.x81 - m.x84", "- m.x533 == 0) m.c852 = Constraint(expr= m.x177 - m.x528", "+ m.x790 == 0) m.c938 = Constraint(expr= 7*m.b701 + m.x791", "m.x860 == 0) m.c1008 = Constraint(expr= m.b771 + m.x861 ==", "= Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517) m.c106 = Constraint(expr=", "13.5) m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5) m.c896", "= Constraint(expr= m.x39 - m.x48 - m.x51 == 0) m.c16", "<= 0.994083415506506) m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0)", "== 0) m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279 ==", "Constraint(expr= m.x205 - m.x580 - m.x583 == 0) m.c857 =", "<= 1) m.c1118 = Constraint(expr= m.b693 + m.b694 <= 1)", "<= 0) m.c1324 = Constraint(expr= - m.b635 - m.b636 +", "20*m.b629 <= 20) m.c399 = Constraint(expr= m.x378 + 20*m.b630 <=", "== 0) m.c801 = Constraint(expr= m.x198 - m.x567 - m.x570", "<= 0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 +", "0) m.c1344 = Constraint(expr= - m.b656 + m.b657 - m.b747", "Constraint(expr= m.b653 - m.b655 <= 0) m.c1072 = Constraint(expr= m.b654", "0) m.c28 = Constraint(expr= m.x76 - m.x97 - m.x100 ==", "- m.x189 - m.x192 - m.x195 == 0) m.c52 =", "m.c1183 = Constraint(expr= m.b725 + m.b726 <= 1) m.c1184 =", "s1s s2s sc si # Total cont binary integer sos1", "1) m.c1161 = Constraint(expr= m.b713 + m.b715 <= 1) m.c1162", "- m.x221 == 0) m.c90 = Constraint(expr= m.x9 - m.x219", "m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296", "<= 0) m.c1304 = Constraint(expr= m.b617 - m.b707 <= 0)", "Constraint(expr= m.x439 == 0) m.c476 = Constraint(expr= m.x83 - m.x368", "m.x418 == 0) m.c368 = Constraint(expr= - m.x374 + m.x416", "== 0) m.c343 = Constraint(expr= m.x313 == 0) m.c344 =", "m.x273 == 0) m.c142 = Constraint(expr= m.x274 == 0) m.c143", "0) m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0) m.c682", "= Constraint(expr= m.x300 == 0) m.c289 = Constraint(expr= m.x301 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1139 = Constraint(expr= m.b704 + m.b705 <= 1) m.c1140", "m.b617 - m.b635 >= 0) m.c1437 = Constraint(expr= m.b618 -", "Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0) m.c151 = Constraint(expr= m.x244", "= Constraint(expr= m.x417 - 20*m.b630 <= 0) m.c403 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c215 = Constraint(expr= m.x62 - m.x314 - m.x320 ==", "+ 4.45628648004517*m.b606 <= 4.45628648004517) m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607", "m.x78 - m.x102 - m.x105 - m.x108 == 0) m.c31", "- 0.940066550763924*m.b669 <= 0) m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670", "0.999*m.b671)))*(0.001 + 0.999* m.b671) <= 0) m.c789 = Constraint(expr=(m.x567/(0.001 +", "+ m.x781 == 0) m.c929 = Constraint(expr= 6*m.b692 + m.x782", "Constraint(expr= m.b625 - m.b643 >= 0) m.c1445 = Constraint(expr= m.b623", "2.54515263975353) m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353) m.c160", "m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327) m.c663 =", "+ 0.999*m.b597) <= 0) m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) -", "0) m.c953 = Constraint(expr= 3*m.b716 + m.x806 == 0) m.c954", "0) m.c380 = Constraint(expr= m.x62 - m.x317 - m.x323 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 =", "1.18887736200171*m.b659 <= 1.18887736200171) m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <=", "m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390", "0.999*m.b672)))*(0.001 + 0.999* m.b672) <= 0) m.c790 = Constraint(expr=(m.x568/(0.001 +", "+ 0.999*m.b674)))*(0.001 + 0.999* m.b674) <= 0) m.c816 = Constraint(expr=(m.x573/(0.001", "m.x172 - m.x514 - m.x517 == 0) m.c767 = Constraint(expr=", "m.x568 - 0.666992981045719*m.b673 <= 0) m.c812 = Constraint(expr= m.x569 +", "20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157 - m.x170 -", "m.x292 - m.x295 == 0) m.c242 = Constraint(expr= m.x65 -", "Constraint(expr= - m.x386 + m.x440 == 0) m.c504 = Constraint(expr=", "<= 0) m.c1036 = Constraint(expr= m.b618 - m.b619 <= 0)", "m.b611 - m.b629 >= 0) m.c1431 = Constraint(expr= m.b612 -", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784 =", "m.c1316 = Constraint(expr= m.b629 - m.b719 <= 0) m.c1317 =", "= Constraint(expr= m.x552 == 0) m.c820 = Constraint(expr= m.x553 ==", "m.b761 + m.b763 <= 1) m.c1255 = Constraint(expr= m.b761 +", "- m.b703 <= 0) m.c1301 = Constraint(expr= m.b614 - m.b704", "m.b659 >= 0) m.c1470 = Constraint(expr= m.b654 - m.b660 >=", "Constraint(expr= 3*m.b705 + m.x795 == 0) m.c943 = Constraint(expr= 2*m.b706", "0) m.c1471 = Constraint(expr= m.b655 - m.b661 >= 0) m.c1472", "m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727", "1.18887736200171*m.b661 <= 0) m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <=", "- m.x515 == 0) m.c765 = Constraint(expr= m.x171 - m.x513", "= Constraint(expr= 6*m.b726 + m.x816 == 0) m.c964 = Constraint(expr=", "- m.b637 >= 0) m.c1439 = Constraint(expr= m.b617 - m.b638", "3*m.b736 + m.x826 == 0) m.c974 = Constraint(expr= 5*m.b737 +", "m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171) m.c684 = Constraint(expr= m.x480 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c198 = Constraint(expr= m.x279 - 15*m.b609 <= 0) m.c199", "m.x306 + 15*m.b624 <= 15) m.c331 = Constraint(expr= m.x307 +", "m.x370 - 1.26558121681553*m.b640 <= 0) m.c488 = Constraint(expr= m.x371 +", "0) m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0) m.c858", "= Constraint(expr= m.b747 + m.b748 <= 1) m.c1229 = Constraint(expr=", "= Constraint(expr= m.x42 - m.x267 - m.x273 == 0) m.c148", "m.b756 <= 1) m.c1244 = Constraint(expr= m.b756 + m.b757 <=", "1) m.c1279 = Constraint(expr= m.b773 + m.b774 <= 1) m.c1280", ">= 0) m.c1437 = Constraint(expr= m.b618 - m.b636 >= 0)", "- m.b686 <= 0) m.c1284 = Constraint(expr= - m.b596 +", "0) m.c592 = Constraint(expr= m.x463 == 0) m.c593 = Constraint(expr=", "m.b622 <= 0) m.c1040 = Constraint(expr= m.b623 - m.b624 <=", "m.x263 == 0) m.c114 = Constraint(expr= m.x264 == 0) m.c115", "m.x517 + 30*m.b670 <= 30) m.c782 = Constraint(expr= m.x536 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b731 + m.b732 <= 1) m.c1194 = Constraint(expr= m.b731", "m.b726 <= 0) m.c1324 = Constraint(expr= - m.b635 - m.b636", "m.b600) <= 0) m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1", "m.x300 + 15*m.b621 <= 15) m.c304 = Constraint(expr= m.x301 +", "Constraint(expr= 4*m.b742 + m.x832 == 0) m.c980 = Constraint(expr= m.b743", "0) m.c172 = Constraint(expr= m.x259 == 0) m.c173 = Constraint(expr=", "0.999*m.b658)))*(0.001 + 0.999*m.b658) <= 0) m.c641 = Constraint(expr= m.x473 ==", "m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526", "m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312", "Constraint(expr= m.b737 + m.b739 <= 1) m.c1207 = Constraint(expr= m.b737", "0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999* m.b628)", "0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999* m.b650)", "Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0) m.c627 = Constraint(expr= m.x411", "- 4*m.b742 - m.b743 - 4*m.b744 - m.b745 - 2*m.b746", "m.x564 == 0) m.c901 = Constraint(expr= m.x565 == 0) m.c902", "Constraint(expr= m.b692 + m.b693 <= 1) m.c1116 = Constraint(expr= m.b692", "m.x167 - m.x506 - m.x509 == 0) m.c762 = Constraint(expr=", "Constraint(expr= m.b771 + m.b772 <= 1) m.c1275 = Constraint(expr= m.b770", "m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291", "- m.x376 + m.x418 == 0) m.c371 = Constraint(expr= m.x323", "0) m.c1435 = Constraint(expr= m.b616 - m.b634 >= 0) m.c1436", "m.c34 = Constraint(expr= m.x136 - m.x139 == 0) m.c35 =", "m.c417 = Constraint(expr= m.x66 - m.x330 - m.x336 == 0)", "- m.x60 == 0) m.c19 = Constraint(expr= m.x46 - m.x55", "= Constraint(expr= m.b609 - m.b627 >= 0) m.c1429 = Constraint(expr=", "m.x482 - m.x485 == 0) m.c651 = Constraint(expr= m.x147 -", "m.x49 - m.x286 - m.x289 == 0) m.c215 = Constraint(expr=", ">= 0) m.c1433 = Constraint(expr= m.b614 - m.b632 >= 0)", "+ m.b658 + m.b661 >= 0) m.c1460 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 =", "1) m.c1231 = Constraint(expr= m.b749 + m.b750 <= 1) m.c1232", "0) m.c706 = Constraint(expr= m.x175 - m.x520 - m.x523 ==", "m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855", "0) m.c884 = Constraint(expr= m.x554 - 15*m.b680 <= 0) m.c885", "0.999*m.b636)))*(0.001 + 0.999* m.b636) <= 0) m.c436 = Constraint(expr=(m.x430/(0.001 +", "- m.x49 - m.x52 == 0) m.c17 = Constraint(expr= m.x44", "- m.x222 == 0) m.c91 = Constraint(expr= m.x10 - m.x220", "m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384", "Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924) m.c712 = Constraint(expr= m.x499", "m.x56 - m.x302 - m.x305 == 0) m.c321 = Constraint(expr=", "m.x495 - m.x498 == 0) m.c703 = Constraint(expr= m.x163 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x373 == 0) m.c470 = Constraint(expr= m.x383 == 0) m.c471", "1.26558121681553*m.b640 <= 0) m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 1.26558121681553*m.b636 <= 1.26558121681553) m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637", "m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171) m.c637 = Constraint(expr= m.x469 +", "m.b708 <= 1) m.c1148 = Constraint(expr= m.b708 + m.b709 <=", "+ m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999* m.b628) <= 0) m.c341", "Constraint(expr= - m.b641 - m.b642 + m.b643 - m.b733 <=", "+ m.x861 == 0) m.c1009 = Constraint(expr= 3*m.b772 + m.x862", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517) m.c128 =", "m.x341 == 0) m.c270 = Constraint(expr= m.x69 - m.x339 -", "0) m.c1377 = Constraint(expr= - m.b603 + m.b612 + m.b615", "m.b703 <= 1) m.c1135 = Constraint(expr= m.b701 + m.b702 <=", "m.x593 == 0) m.c903 = Constraint(expr= m.x594 == 0) m.c904", "m.b665 - m.b677 >= 0) m.c1479 = Constraint(expr= m.b666 -", "= Constraint(expr= m.b639 - m.b640 <= 0) m.c1058 = Constraint(expr=", "- 0.75*m.x236 + m.x260 == 0) m.c108 = Constraint(expr= -", "m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719) m.c815 =", "Constraint(expr= m.x264 == 0) m.c115 = Constraint(expr= m.x265 == 0)", "m.c976 = Constraint(expr= 6*m.b739 + m.x829 == 0) m.c977 =", "+ m.b766 <= 1) m.c1265 = Constraint(expr= m.b767 + m.b768", "m.b709 <= 1) m.c1150 = Constraint(expr= m.b708 + m.b709 <=", "= Constraint(expr= m.x376 - 20*m.b631 <= 0) m.c398 = Constraint(expr=", "Constraint(expr= m.x99 - m.x393 - m.x396 == 0) m.c541 =", "Constraint(expr= m.b701 + m.b702 <= 1) m.c1134 = Constraint(expr= m.b701", "<= 0) m.c1030 = Constraint(expr= m.b612 - m.b613 <= 0)", "0) m.c795 = Constraint(expr= m.x570 == 0) m.c796 = Constraint(expr=", "= Constraint(expr= - 0.9*m.x556 + m.x586 == 0) m.c872 =", "m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343", "Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c84 = Constraint(expr= m.x222 == 0) m.c85 =", "== 0) m.c854 = Constraint(expr= m.x203 - m.x578 - m.x581", "= Constraint(expr= m.b734 + m.b735 <= 1) m.c1200 = Constraint(expr=", "15*m.b610 <= 15) m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 =", "+ m.b732 <= 1) m.c1194 = Constraint(expr= m.b731 + m.b733", "Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 =", "Constraint(expr= m.x138 - m.x141 - m.x144 == 0) m.c37 =", "m.x279 == 0) m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280", "== 0) m.c261 = Constraint(expr= m.x276 == 0) m.c262 =", "+ m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658) <= 0) m.c641 =", "= Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0) m.c683 = Constraint(expr=", "m.x204 - m.x579 - m.x582 == 0) m.c856 = Constraint(expr=", "m.b619 <= 0) m.c1036 = Constraint(expr= m.b618 - m.b619 <=", "m.c300 = Constraint(expr= m.x297 - 15*m.b621 <= 0) m.c301 =", "m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573", "= Constraint(expr= m.x414 == 0) m.c616 = Constraint(expr= m.x415 ==", "m.b630 - m.b720 <= 0) m.c1318 = Constraint(expr= - m.b629", "Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c1437 = Constraint(expr= m.b618 - m.b636 >= 0) m.c1438", "m.x298 - 15*m.b622 <= 0) m.c302 = Constraint(expr= m.x299 +", "m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0) m.c743 =", "= Constraint(expr= m.x192 - m.x561 - m.x564 == 0) m.c907", "+ m.b600 - m.b609 >= 0) m.c1411 = Constraint(expr= m.b598", "- m.b610 >= 0) m.c1412 = Constraint(expr= m.b602 - m.b611", "5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86 -", "Constraint(expr= m.x451 == 0) m.c539 = Constraint(expr= m.x98 - m.x392", "+ m.x585 == 0) m.c871 = Constraint(expr= - 0.9*m.x556 +", "Constraint(expr= m.x577 == 0) m.c824 = Constraint(expr= m.x185 - m.x548", "<= 0) m.c520 = Constraint(expr= m.x388 - 9*m.b643 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 =", "- 0.705049913072943*m.b664 <= 0) m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662", "- m.b622 + m.b640 >= 0) m.c1397 = Constraint(expr= -", "0.999* m.b639) <= 0) m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) -", "m.b765 <= 0) m.c1363 = Constraint(expr= - m.b674 - m.b675", "- m.b673 <= 0) m.c1090 = Constraint(expr= m.b672 - m.b673", "+ 25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119", "<= 0) m.c333 = Constraint(expr= m.x351 - 9*m.b624 <= 0)", "+ 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999*", "0.999*m.b633)))*(0.001 + 0.999*m.b633) <= 0) m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634)", "= Constraint(expr= m.b720 + m.b721 <= 1) m.c1173 = Constraint(expr=", "<= 0) m.c1299 = Constraint(expr= - m.b611 + m.b612 -", "Constraint(expr= - m.b664 + m.b673 + m.b676 >= 0) m.c1463", "m.x76 - m.x97 - m.x100 == 0) m.c29 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x130 - m.x454 - m.x457 == 0) m.c572 =", "0) m.c827 = Constraint(expr= m.x200 - m.x572 - m.x575 ==", "Constraint(expr= m.x593 == 0) m.c903 = Constraint(expr= m.x594 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b652 - m.b742 <= 0) m.c1340 = Constraint(expr= m.b653 -", "m.x859 == 0) m.c1007 = Constraint(expr= 2*m.b770 + m.x860 ==", "Constraint(expr= - m.b674 + m.b675 - m.b765 <= 0) m.c1363", "= Constraint(expr= m.b659 - m.b749 <= 0) m.c1347 = Constraint(expr=", "== 0) m.c180 = Constraint(expr= m.x36 - m.x255 - m.x258", "- m.x243 - m.x246 == 0) m.c145 = Constraint(expr= m.x31", "m.x285 - 3.34221486003388*m.b612 <= 0) m.c220 = Constraint(expr= m.x286 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 =", "m.c1279 = Constraint(expr= m.b773 + m.b774 <= 1) m.c1280 =", "+ 15*m.b621 <= 15) m.c304 = Constraint(expr= m.x301 + 15*m.b622", "m.b600 - m.b609 >= 0) m.c1411 = Constraint(expr= m.b598 +", "- m.b702 <= 0) m.c1300 = Constraint(expr= - m.b611 -", "- m.x300 == 0) m.c295 = Constraint(expr= m.x55 - m.x298", "+ m.x92 == 0) m.c24 = Constraint(expr= - m.x72 -", "Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 +", "== 0) m.c927 = Constraint(expr= 7*m.b690 + m.x780 == 0)", "<= 2.54515263975353) m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0)", "m.x278 - m.x281 == 0) m.c183 = Constraint(expr= m.x45 -", "= Constraint(expr= m.b744 + m.b745 <= 1) m.c1221 = Constraint(expr=", "m.x443 == 0) m.c516 = Constraint(expr= m.x123 - m.x441 -", "<= 33.5) m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5)", "m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255", "Constraint(expr= - m.x12 - m.x15 + m.x18 == 0) m.c7", "m.x390 == 0) m.c514 = Constraint(expr= m.x97 - m.x388 -", "Constraint(expr= m.x448 - 9*m.b646 <= 0) m.c554 = Constraint(expr= m.x449", "m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354", "m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386", "Constraint(expr= m.x160 - m.x163 - m.x166 - m.x169 == 0)", "Constraint(expr= m.x445 == 0) m.c512 = Constraint(expr= m.x95 - m.x386", "- 5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112", "== 0) m.c162 = Constraint(expr= - m.x249 + m.x279 ==", "= Constraint(expr= m.b730 + m.x820 == 0) m.c968 = Constraint(expr=", "- m.b697 <= 0) m.c1295 = Constraint(expr= m.b608 - m.b698", "== 0) m.c518 = Constraint(expr= m.x386 - 9*m.b641 <= 0)", ">= 0) m.c1472 = Constraint(expr= m.b662 - m.b671 >= 0)", "0.78338879230327*m.b657 <= 0.78338879230327) m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <=", "Constraint(expr= m.b723 + m.b724 <= 1) m.c1179 = Constraint(expr= m.b722", "from pyomo.environ import * model = m = ConcreteModel() m.x2", "m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414", "m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861", "- m.x227 == 0) m.c66 = Constraint(expr= m.x12 - m.x225", "m.c1129 = Constraint(expr= m.b698 + m.b699 <= 1) m.c1130 =", "Constraint(expr= m.x588 == 0) m.c877 = Constraint(expr= m.x589 == 0)", "m.c1170 = Constraint(expr= m.b719 + m.b721 <= 1) m.c1171 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 =", "Constraint(expr= m.b746 + m.b747 <= 1) m.c1226 = Constraint(expr= m.b747", "m.c398 = Constraint(expr= m.x377 + 20*m.b629 <= 20) m.c399 =", "0.999*m.b658) <= 0) m.c641 = Constraint(expr= m.x473 == 0) m.c642", "m.c668 = Constraint(expr= m.x479 == 0) m.c669 = Constraint(expr= m.x480", "m.b600 - m.b606 >= 0) m.c1408 = Constraint(expr= m.b598 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621 =", "m.c1020 = Constraint(expr= m.b602 - m.b604 <= 0) m.c1021 =", "1) m.c1211 = Constraint(expr= m.b740 + m.b741 <= 1) m.c1212", "m.x299 + 15*m.b620 <= 15) m.c303 = Constraint(expr= m.x300 +", "<= 0) m.c1052 = Constraint(expr= m.b635 - m.b636 <= 0)", "= Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001", "= Constraint(expr= - m.b659 + m.b660 - m.b750 <= 0)", "m.b610 >= 0) m.c1412 = Constraint(expr= m.b602 - m.b611 >=", "m.b696 <= 1) m.c1122 = Constraint(expr= m.b695 + m.b697 <=", "import * model = m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b745 - 2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749", "- 20*m.b629 <= 0) m.c396 = Constraint(expr= m.x375 - 20*m.b630", "m.x339 - 1.26558121681553*m.b618 <= 0) m.c280 = Constraint(expr= m.x340 -", "m.x474 == 0) m.c643 = Constraint(expr= m.x475 == 0) m.c644", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 =", "+ 0.999*m.b653)))*(0.001 + 0.999* m.b653) <= 0) m.c612 = Constraint(expr=(m.x465/(0.001", "15*m.b670 <= 15) m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1", "m.x184 - m.x544 - m.x547 == 0) m.c800 = Constraint(expr=", "Constraint(expr= m.x103 - m.x400 - m.x403 == 0) m.c569 =", "1) m.c1277 = Constraint(expr= m.b773 + m.b774 <= 1) m.c1278", "Constraint(expr= m.x14 - m.x230 - m.x233 == 0) m.c93 =", "m.x381 - 33.5*m.b639 <= 0) m.c493 = Constraint(expr= m.x382 -", "m.b750 + m.b751 <= 1) m.c1233 = Constraint(expr= m.b749 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b762 + m.b763 <= 1) m.c1257 = Constraint(expr= m.b761 +", "m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610", "<= 0) m.c71 = Constraint(expr= m.x215 + 40*m.b596 <= 40)", "+ m.x784 == 0) m.c932 = Constraint(expr= 10*m.b695 + m.x785", "- m.x583 == 0) m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677", "m.x469 == 0) m.c620 = Constraint(expr= m.x107 - m.x410 -", "= Constraint(expr= m.x355 + 9*m.b625 <= 9) m.c338 = Constraint(expr=(m.x356/(0.001", "m.b641 - m.b642 + m.b643 - m.b733 <= 0) m.c1331", "1) m.c1113 = Constraint(expr= m.b689 + m.b691 <= 1) m.c1114", "m.c288 = Constraint(expr= m.x300 == 0) m.c289 = Constraint(expr= m.x301", "m.x336 == 0) m.c418 = Constraint(expr= m.x67 - m.x331 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= - 0.5*m.x254 + m.x278 == 0) m.c165 =", "Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 +", "<= 1) m.c1275 = Constraint(expr= m.b770 + m.b772 <= 1)", "0) m.c1443 = Constraint(expr= m.b624 - m.b642 >= 0) m.c1444", "0) m.c1334 = Constraint(expr= m.b647 - m.b737 <= 0) m.c1335", "m.b677 >= 0) m.c1464 = Constraint(expr= - m.b666 + m.b678", "m.b617 + m.b618 - m.b708 <= 0) m.c1306 = Constraint(expr=", "m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858", "Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x46 - m.x280 - m.x283 == 0) m.c185", "Constraint(expr= m.x303 - 15*m.b624 <= 0) m.c328 = Constraint(expr= m.x304", "1) m.c1233 = Constraint(expr= m.b749 + m.b751 <= 1) m.c1234", "- m.x558 == 0) m.c880 = Constraint(expr= m.x190 - m.x556", "+ m.x799 == 0) m.c947 = Constraint(expr= 2*m.b710 + m.x800", "m.b693 <= 0) m.c1291 = Constraint(expr= - m.b602 - m.b603", "m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215", "m.x242 - 4.45628648004517*m.b605 <= 0) m.c150 = Constraint(expr= m.x243 -", "+ 0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657)", "= Constraint(expr= m.b753 + m.b754 <= 1) m.c1239 = Constraint(expr=", "m.c37 = Constraint(expr= m.x139 - m.x142 - m.x145 == 0)", "m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864", "= Constraint(expr= m.x41 - m.x266 - m.x272 == 0) m.c147", "= Constraint(expr= - m.b659 - m.b660 + m.b661 - m.b751", "+ 0.999*m.b650)))*(0.001 + 0.999* m.b650) <= 0) m.c585 = Constraint(expr=(m.x459/(0.001", "i s1s s2s sc si # Total cont binary integer", "m.x542 - 0.705049913072943*m.b671 <= 0) m.c804 = Constraint(expr= m.x543 -", "= Constraint(expr= m.b749 + m.b750 <= 1) m.c1230 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 =", "m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837", "- m.x241 == 0) m.c119 = Constraint(expr= m.x38 - m.x260", "m.x862 == 0) m.c1010 = Constraint(expr= 8*m.b773 + m.x863 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 9*m.b645 <= 9) m.c550 = Constraint(expr= m.x397 + 9*m.b646", "15*m.b685 <= 0) m.c914 = Constraint(expr= m.x563 + 15*m.b683 <=", "30*m.b668 <= 30) m.c780 = Constraint(expr= m.x516 + 30*m.b669 <=", "m.x38 - m.x260 - m.x263 == 0) m.c120 = Constraint(expr=", "m.c828 = Constraint(expr= m.x201 - m.x573 - m.x576 == 0)", "3*m.b774 - 4*m.b775, sense=maximize) m.c2 = Constraint(expr= m.x2 - m.x5", "m.x191 - m.x194 == 0) m.c51 = Constraint(expr= m.x180 -", "m.c179 = Constraint(expr= m.x35 - m.x254 - m.x257 == 0)", "0) m.c474 = Constraint(expr= m.x438 == 0) m.c475 = Constraint(expr=", "m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373", "0.940066550763924*m.b661 <= 0.940066550763924) m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518", "Constraint(expr= m.x571 == 0) m.c797 = Constraint(expr= m.x182 - m.x542", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 =", "0) m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0) m.c152", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840 =", "Constraint(expr= m.b702 + m.b703 <= 1) m.c1137 = Constraint(expr= m.b701", "= Constraint(expr= m.b758 + m.b760 <= 1) m.c1252 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 =", "= Constraint(expr= m.b627 - m.b648 >= 0) m.c1450 = Constraint(expr=", "- m.x467 == 0) m.c624 = Constraint(expr= m.x135 - m.x465", "m.b627 + m.b648 + m.b651 + m.b654 >= 0) m.c1402", "<= 15) m.c786 = Constraint(expr= m.x540 + 15*m.b669 <= 15)", "- m.b641 - m.b642 + m.b643 - m.b733 <= 0)", "m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 =", "0) m.c145 = Constraint(expr= m.x31 - m.x244 - m.x247 ==", "m.x427 == 0) m.c416 = Constraint(expr= m.x65 - m.x329 -", "m.x285 - m.x288 == 0) m.c214 = Constraint(expr= m.x49 -", "m.b675 <= 0) m.c1092 = Constraint(expr= m.b674 - m.b676 <=", "= Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517) m.c153 = Constraint(expr=", "0) m.c403 = Constraint(expr= m.x418 - 20*m.b631 <= 0) m.c404", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 =", "0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677) <=", "+ 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*", "Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164", "0) m.c911 = Constraint(expr= m.x560 - 15*m.b683 <= 0) m.c912", "0) m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0) m.c500", "m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209", "1) m.c1144 = Constraint(expr= m.b705 + m.b706 <= 1) m.c1145", "0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999* m.b615)", "m.b705 <= 1) m.c1140 = Constraint(expr= m.b704 + m.b706 <=", "- 1.83548069293539*m.b630 <= 0) m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631", "Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 +", "m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943) m.c809 = Constraint(expr= m.x566 -", "== 0) m.c412 = Constraint(expr= m.x337 == 0) m.c413 =", "m.b713 + m.b714 <= 1) m.c1158 = Constraint(expr= m.b713 +", "m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515", "+ 40*m.b599 <= 40) m.c99 = Constraint(expr= m.x222 + 40*m.b600", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 =", "m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0) m.c131 =", "m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658) <= 0) m.c641 = Constraint(expr=", "= Constraint(expr= m.b609 - m.b624 >= 0) m.c1426 = Constraint(expr=", "= Constraint(expr= m.x475 == 0) m.c644 = Constraint(expr= m.x485 ==", "Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 +", "1.11894339953103*m.b651 <= 1.11894339953103) m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <=", "<= 0) m.c1040 = Constraint(expr= m.b623 - m.b624 <= 0)", "+ 25*m.x128 + 50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132", "1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001", "Constraint(expr= m.b641 - m.b731 <= 0) m.c1329 = Constraint(expr= -", "m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603", "0) m.c974 = Constraint(expr= 5*m.b737 + m.x827 == 0) m.c975", "- m.b675 <= 0) m.c1092 = Constraint(expr= m.b674 - m.b676", "0) m.c1464 = Constraint(expr= - m.b666 + m.b678 >= 0)", "m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117", "si # Total cont binary integer sos1 sos2 scont sint", "0.999*m.b626)))*(0.001 + 0.999* m.b626) <= 0) m.c339 = Constraint(expr=(m.x357/(0.001 +", "= Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0) m.c660 = Constraint(expr=", "= Constraint(expr= m.b596 + m.b599 - m.b608 >= 0) m.c1410", "0) m.c1368 = Constraint(expr= - m.b680 + m.b681 - m.b771", "m.c1014 = Constraint(expr= m.b596 - m.b598 <= 0) m.c1015 =", "= Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0) m.c129 = Constraint(expr=", "m.c797 = Constraint(expr= m.x182 - m.x542 - m.x545 == 0)", "== 0) m.c971 = Constraint(expr= 3*m.b734 + m.x824 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x158 - m.x161 - m.x164 - m.x167 == 0) m.c45", "1.18887736200171*m.b653 <= 1.18887736200171) m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x334 == 0) m.c239 = Constraint(expr= m.x50 - m.x290", "m.x554 - m.x557 == 0) m.c879 = Constraint(expr= m.x189 -", "m.c703 = Constraint(expr= m.x163 - m.x496 - m.x499 == 0)", "<= 15) m.c888 = Constraint(expr= m.x558 + 15*m.b681 <= 15)", "3.34221486003388*m.b614 <= 3.34221486003388) m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <=", "m.b625 - m.b646 >= 0) m.c1448 = Constraint(expr= m.b626 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 =", "m.x313 + 15*m.b628 <= 15) m.c359 = Constraint(expr= m.x356 -", "<= 0.940066550763924) m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 =", "= Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0) m.c360 = Constraint(expr=", "40*m.b598 <= 40) m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <=", "m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368", "m.x418 - 20*m.b631 <= 0) m.c404 = Constraint(expr= m.x419 +", "+ m.b714 <= 1) m.c1158 = Constraint(expr= m.b713 + m.b715", "m.c537 = Constraint(expr= m.x450 == 0) m.c538 = Constraint(expr= m.x451", "1) m.c1281 = Constraint(expr= m.b773 + m.b775 <= 1) m.c1282", "m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719) m.c813 = Constraint(expr= m.x570 +", "m.b754 <= 0) m.c1352 = Constraint(expr= m.b665 - m.b755 <=", "== 0) m.c534 = Constraint(expr= m.x396 == 0) m.c535 =", "m.x262 == 0) m.c110 = Constraint(expr= m.x239 == 0) m.c111", "m.b625 <= 0) m.c1043 = Constraint(expr= m.b626 - m.b627 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 =", "Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0) m.c423 = Constraint(expr= m.x330", "m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924) m.c713 =", "m.b701 + m.b703 <= 1) m.c1138 = Constraint(expr= m.b702 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 =", "2.54515263975353*m.b607 <= 2.54515263975353) m.c161 = Constraint(expr= - m.x248 + m.x278", "Constraint(expr= m.x586 - 13.5*m.b682 <= 0) m.c893 = Constraint(expr= m.x587", "m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193", "+ m.x783 == 0) m.c931 = Constraint(expr= 4*m.b694 + m.x784", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 =", "GAMS Convert at 01/15/21 11:37:33 # # Equation counts #", "Constraint(expr= m.b615 - m.b616 <= 0) m.c1034 = Constraint(expr= m.b617", "<= 20) m.c405 = Constraint(expr= m.x420 + 20*m.b630 <= 20)", "8*m.b753 + m.x843 == 0) m.c991 = Constraint(expr= 4*m.b754 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x35 - m.x254 - m.x257 == 0) m.c180 = Constraint(expr=", "= Constraint(expr= m.b710 + m.b712 <= 1) m.c1156 = Constraint(expr=", "0) m.c57 = Constraint(expr= m.x216 == 0) m.c58 = Constraint(expr=", "Constraint(expr= m.x456 == 0) m.c565 = Constraint(expr= m.x457 == 0)", "m.x479 == 0) m.c675 = Constraint(expr= m.x144 - m.x477 -", "m.x340 - 1.26558121681553*m.b619 <= 0) m.c281 = Constraint(expr= m.x341 +", "Constraint(expr= m.x311 == 0) m.c342 = Constraint(expr= m.x312 == 0)", "== 0) m.c370 = Constraint(expr= - m.x376 + m.x418 ==", "0) m.c621 = Constraint(expr= m.x108 - m.x411 - m.x414 ==", "<= 13.5) m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5)", "= Constraint(expr= - m.b611 + m.b612 - m.b702 <= 0)", "m.b599 - m.b605 >= 0) m.c1407 = Constraint(expr= m.b597 +", "m.b765 + m.b766 <= 1) m.c1265 = Constraint(expr= m.b767 +", "0) m.c192 = Constraint(expr= m.x255 - 30*m.b609 <= 0) m.c193", "+ 0.999* m.b611) <= 0) m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612)", "<= 0.705049913072943) m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943)", "Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x165 - m.x501 - m.x504 == 0) m.c730 = Constraint(expr=", "m.x560 - m.x563 == 0) m.c906 = Constraint(expr= m.x192 -", "Constraint(expr= 4*m.b715 + m.x805 == 0) m.c953 = Constraint(expr= 3*m.b716", "= Constraint(expr= m.x176 - m.x524 - m.x530 == 0) m.c732", "m.c543 = Constraint(expr= m.x126 - m.x447 - m.x450 == 0)", "<= 0) m.c354 = Constraint(expr= m.x309 - 15*m.b627 <= 0)", "m.c722 = Constraint(expr= m.x503 == 0) m.c723 = Constraint(expr= m.x504", "0) m.c65 = Constraint(expr= m.x11 - m.x224 - m.x227 ==", "0) m.c71 = Constraint(expr= m.x215 + 40*m.b596 <= 40) m.c72", "m.x471 - 1.18887736200171*m.b657 <= 0) m.c655 = Constraint(expr= m.x472 -", "= Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917) m.c841 = Constraint(expr=", "m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171", "= Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001", "m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177", "m.c1449 = Constraint(expr= m.b627 - m.b648 >= 0) m.c1450 =", "m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85", "m.x395 + 9*m.b644 <= 9) m.c549 = Constraint(expr= m.x396 +", "m.b655 + m.b658 + m.b661 >= 0) m.c1460 = Constraint(expr=", "- m.x385 == 0) m.c482 = Constraint(expr= m.x119 - m.x434", "m.c926 = Constraint(expr= 8*m.b689 + m.x779 == 0) m.c927 =", "= Constraint(expr= m.b627 - m.b654 >= 0) m.c1456 = Constraint(expr=", "m.b662 - m.b671 >= 0) m.c1473 = Constraint(expr= m.b663 -", "m.b626 - m.b650 >= 0) m.c1452 = Constraint(expr= m.b627 -", "7*m.b702 - 4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706 -", "m.c1026 = Constraint(expr= m.b608 - m.b610 <= 0) m.c1027 =", "- m.x12 - m.x15 + m.x18 == 0) m.c7 =", "m.c545 = Constraint(expr= m.x392 - 9*m.b644 <= 0) m.c546 =", "0.6*m.x304 + m.x352 == 0) m.c314 = Constraint(expr= m.x305 ==", "1) m.c1116 = Constraint(expr= m.b692 + m.b694 <= 1) m.c1117", "m.b774 <= 1) m.c1278 = Constraint(expr= m.b773 + m.b775 <=", "1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001", "m.x176 - m.x524 - m.x530 == 0) m.c732 = Constraint(expr=", "m.x587 == 0) m.c876 = Constraint(expr= m.x588 == 0) m.c877", "m.x216 == 0) m.c58 = Constraint(expr= m.x217 == 0) m.c59", "m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29", "= Constraint(expr= - 0.9*m.x554 + m.x584 == 0) m.c870 =", "m.c1340 = Constraint(expr= m.b653 - m.b743 <= 0) m.c1341 =", "<= 0) m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0)", "<= 0) m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0)", "0) m.c1054 = Constraint(expr= m.b636 - m.b637 <= 0) m.c1055", "- 9*m.b645 <= 0) m.c547 = Constraint(expr= m.x394 - 9*m.b646", "- 9*m.b643 <= 0) m.c527 = Constraint(expr= m.x443 + 9*m.b641", "m.x343 == 0) m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <=", "0) m.c193 = Constraint(expr= m.x256 - 30*m.b610 <= 0) m.c194", "m.x425 == 0) m.c420 = Constraint(expr= m.x114 - m.x423 -", "m.x337 == 0) m.c419 = Constraint(expr= m.x113 - m.x422 -", "m.b726 <= 1) m.c1184 = Constraint(expr= m.b726 + m.b727 <=", "m.c301 = Constraint(expr= m.x298 - 15*m.b622 <= 0) m.c302 =", "Constraint(expr= m.x217 + 40*m.b598 <= 40) m.c74 = Constraint(expr= m.x224", "m.c447 = Constraint(expr= m.x117 - m.x429 - m.x432 == 0)", "m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001 +", "m.x387 - 9*m.b642 <= 0) m.c520 = Constraint(expr= m.x388 -", "= Constraint(expr= m.x324 == 0) m.c373 = Constraint(expr= m.x325 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c50 = Constraint(expr= m.x179 - m.x188 - m.x191 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 =", "Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171) m.c657 = Constraint(expr= m.x474", "- m.b627 <= 0) m.c1044 = Constraint(expr= m.b626 - m.b628", "m.c968 = Constraint(expr= 2*m.b731 + m.x821 == 0) m.c969 =", "m.c353 = Constraint(expr= m.x308 - 15*m.b626 <= 0) m.c354 =", "m.b704 + m.b706 <= 1) m.c1141 = Constraint(expr= m.b704 +", "m.c1460 = Constraint(expr= - m.b662 + m.b671 + m.b674 >=", "+ 0.999* m.b638) <= 0) m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639)", "<= 0) m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376)", "0.940066550763924*m.b663 <= 0) m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <=", "m.c1214 = Constraint(expr= m.b741 + m.b742 <= 1) m.c1215 =", "m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109", "= Constraint(expr= m.x82 - m.x364 - m.x367 == 0) m.c446", "Constraint(expr= m.x517 + 30*m.b670 <= 30) m.c782 = Constraint(expr= m.x536", "0.690184503917672*m.b677 <= 0) m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <=", "= Constraint(expr= 2*m.b706 + m.x796 == 0) m.c944 = Constraint(expr=", "Constraint(expr= m.b695 + m.b697 <= 1) m.c1123 = Constraint(expr= m.b695", "- m.x337 == 0) m.c419 = Constraint(expr= m.x113 - m.x422", "+ m.b760 <= 1) m.c1249 = Constraint(expr= m.b758 + m.b759", "- m.b654 + m.b655 - m.b745 <= 0) m.c1343 =", "0) m.c437 = Constraint(expr= m.x365 == 0) m.c438 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 =", "+ m.b769 <= 1) m.c1267 = Constraint(expr= m.b767 + m.b768", "Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x224 - m.x227 == 0) m.c66 = Constraint(expr= m.x12 -", "m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22", "m.x274 == 0) m.c143 = Constraint(expr= m.x29 - m.x242 -", "m.x9 == 0) m.c4 = Constraint(expr= m.x4 - m.x7 -", "+ m.b637 + m.b640 >= 0) m.c1391 = Constraint(expr= -", "m.x300 == 0) m.c289 = Constraint(expr= m.x301 == 0) m.c290", "1) m.c1126 = Constraint(expr= m.b696 + m.b697 <= 1) m.c1127", "3.04984759446376) m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0) m.c633", "= Constraint(expr= m.x516 + 30*m.b669 <= 30) m.c781 = Constraint(expr=", "m.b615 >= 0) m.c1417 = Constraint(expr= m.b604 - m.b616 >=", "= Constraint(expr= m.x45 - m.x279 - m.x282 == 0) m.c184", "<= 0) m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 +", "= Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348) m.c426 = Constraint(expr=", "0) m.c1363 = Constraint(expr= - m.b674 - m.b675 + m.b676", "Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b771 + m.x861 == 0) m.c1009 = Constraint(expr= 3*m.b772", "m.c917 = Constraint(expr= m.x590 - 9*m.b683 <= 0) m.c918 =", "Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171) m.c659 = Constraint(expr= m.x482", "- m.x254 - m.x257 == 0) m.c180 = Constraint(expr= m.x36", "m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9", "<= 0) m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0)", "m.x145 - m.x478 - m.x481 == 0) m.c677 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 =", "m.x811 == 0) m.c959 = Constraint(expr= 3*m.b722 + m.x812 ==", "= Constraint(expr= m.x126 - m.x447 - m.x450 == 0) m.c544", "- 2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86 - 5*m.x87", "= Constraint(expr= - 0.75*m.x236 + m.x260 == 0) m.c108 =", "<= 0) m.c137 = Constraint(expr= m.x245 == 0) m.c138 =", "m.x221 == 0) m.c84 = Constraint(expr= m.x222 == 0) m.c85", "m.c951 = Constraint(expr= 7*m.b714 + m.x804 == 0) m.c952 =", "3.04984759446376*m.b647 <= 3.04984759446376) m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <=", "m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197 +", "m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42", "m.b619 - m.b709 <= 0) m.c1307 = Constraint(expr= m.b620 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 =", "m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702", "<= 0) m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0)", "0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640)", "Constraint(expr= m.x373 == 0) m.c470 = Constraint(expr= m.x383 == 0)", "+ 30*m.b610 <= 30) m.c197 = Constraint(expr= m.x278 - 15*m.b608", "= Constraint(expr= m.x96 - m.x387 - m.x390 == 0) m.c514", "- m.b761 <= 0) m.c1359 = Constraint(expr= - m.b671 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c89 = Constraint(expr= m.x8 - m.x218 - m.x221 == 0)", "m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366", "= Constraint(expr= m.x193 - m.x562 - m.x565 == 0) m.c908", "m.b660 + m.b661 - m.b751 <= 0) m.c1349 = Constraint(expr=", "Constraint(expr= m.x425 == 0) m.c414 = Constraint(expr= m.x426 == 0)", "1.11894339953103*m.b651 <= 0) m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <=", "m.x511 == 0) m.c755 = Constraint(expr= m.x515 == 0) m.c756", "m.b730 + m.x820 == 0) m.c968 = Constraint(expr= 2*m.b731 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626 =", "- 3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769", "= Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672) m.c869 = Constraint(expr=", "+ m.b685 - m.b775 <= 0) m.c1373 = Constraint(expr= m.b596", "m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600", "m.b694 <= 1) m.c1121 = Constraint(expr= m.b695 + m.b696 <=", "- 0.9*m.x555 + m.x585 == 0) m.c871 = Constraint(expr= -", "1) m.c1166 = Constraint(expr= m.b717 + m.b718 <= 1) m.c1167", "0) m.c695 = Constraint(expr= m.x497 == 0) m.c696 = Constraint(expr=", "+ m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633) <= 0) m.c409 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 =", "0) m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0) m.c772", "- 2.54515263975353*m.b617 <= 0) m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618", "- m.b669 + m.b670 - m.b760 <= 0) m.c1358 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 =", "0) m.c906 = Constraint(expr= m.x192 - m.x561 - m.x564 ==", "<= 0) m.c1100 = Constraint(expr= m.b683 - m.b684 <= 0)", "- m.b610 + m.b622 + m.b625 + m.b628 >= 0)", "- m.x401 == 0) m.c567 = Constraint(expr= m.x102 - m.x399", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 =", "+ m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999* m.b613) <= 0) m.c206", "m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30", "m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809", "m.x282 == 0) m.c184 = Constraint(expr= m.x46 - m.x280 -", "= Constraint(expr= m.b671 - m.b673 <= 0) m.c1090 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171 =", "m.x559 + 15*m.b682 <= 15) m.c890 = Constraint(expr= m.x584 -", "0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999* m.b655) <= 0)", "<= 0) m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0)", "0.994083415506506*m.b677 <= 0) m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <=", "+ 40*m.x156 + 40*m.x157 - m.x170 - m.x171 - m.x172", "0) m.c217 = Constraint(expr= m.x64 - m.x316 - m.x322 ==", "m.c114 = Constraint(expr= m.x264 == 0) m.c115 = Constraint(expr= m.x265", "m.x154 == 0) m.c41 = Constraint(expr= m.x152 - m.x155 -", "m.x87 - m.x375 - m.x378 == 0) m.c385 = Constraint(expr=", "Constraint(expr= m.x173 - m.x518 - m.x521 == 0) m.c705 =", "0) m.c984 = Constraint(expr= 5*m.b747 + m.x837 == 0) m.c985", "+ m.b629 >= 0) m.c1380 = Constraint(expr= - m.b612 +", "Constraint(expr= m.b654 - m.b657 >= 0) m.c1468 = Constraint(expr= m.b655", "m.x161 - m.x494 - m.x497 == 0) m.c702 = Constraint(expr=", "m.x858 == 0) m.c1006 = Constraint(expr= 6*m.b769 + m.x859 ==", "== 0) m.c531 = Constraint(expr= - m.x393 + m.x447 ==", "m.b752 + m.b753 <= 1) m.c1236 = Constraint(expr= m.b752 +", "Constraint(expr= m.x276 == 0) m.c262 = Constraint(expr= m.x277 == 0)", "m.x378 == 0) m.c385 = Constraint(expr= m.x88 - m.x376 -", "m.b724 <= 1) m.c1181 = Constraint(expr= m.b725 + m.b726 <=", "m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588", "m.b700 <= 0) m.c1298 = Constraint(expr= m.b611 - m.b701 <=", "4*m.b687 + m.x777 == 0) m.c925 = Constraint(expr= 6*m.b688 +", "<= 4.45628648004517) m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517)", "<= 1) m.c1254 = Constraint(expr= m.b761 + m.b763 <= 1)", "= Constraint(expr= m.x69 - m.x339 - m.x342 == 0) m.c271", "+ m.x520 == 0) m.c695 = Constraint(expr= m.x497 == 0)", "m.x339 - m.x342 == 0) m.c271 = Constraint(expr= m.x70 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691 =", "0) m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001", "m.b649) <= 0) m.c560 = Constraint(expr= m.x401 == 0) m.c561", "m.b673 <= 0) m.c1090 = Constraint(expr= m.b672 - m.b673 <=", "+ m.b667 - m.b757 <= 0) m.c1355 = Constraint(expr= m.b668", "Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388) m.c133 = Constraint(expr= m.x265", "1) m.c1216 = Constraint(expr= m.b741 + m.b742 <= 1) m.c1217", "= Constraint(expr= m.x259 == 0) m.c173 = Constraint(expr= m.x281 ==", "m.c565 = Constraint(expr= m.x457 == 0) m.c566 = Constraint(expr= m.x101", "0) m.c378 = Constraint(expr= m.x420 == 0) m.c379 = Constraint(expr=", "0.999*m.b675)))*(0.001 + 0.999* m.b675) <= 0) m.c817 = Constraint(expr=(m.x574/(0.001 +", "<= 15) m.c889 = Constraint(expr= m.x559 + 15*m.b682 <= 15)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 =", "+ m.b672 + m.b675 >= 0) m.c1462 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 =", "0) m.c46 = Constraint(expr= m.x160 - m.x163 - m.x166 -", "- m.b640 <= 0) m.c1057 = Constraint(expr= m.b639 - m.b640", "Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b624 + m.b625 - m.b715 <= 0) m.c1313 =", "m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108", "m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470", "<= 0.705049913072943) m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0)", "m.c786 = Constraint(expr= m.x540 + 15*m.b669 <= 15) m.c787 =", "Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517) m.c154 = Constraint(expr= m.x247", "= Constraint(expr= m.x350 - 9*m.b623 <= 0) m.c333 = Constraint(expr=", "m.x568 - m.x571 == 0) m.c803 = Constraint(expr= m.x542 -", "Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001 +", "+ m.b715 <= 1) m.c1162 = Constraint(expr= m.b714 + m.b715", "- m.b721 <= 0) m.c1319 = Constraint(expr= m.b632 - m.b722", "0) m.c91 = Constraint(expr= m.x10 - m.x220 - m.x223 ==", "- 30*m.b668 <= 0) m.c777 = Constraint(expr= m.x513 - 30*m.b669", "Constraint(expr= m.b731 + m.b733 <= 1) m.c1198 = Constraint(expr= m.b732", "+ m.b622 - m.b712 <= 0) m.c1310 = Constraint(expr= m.b623", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793 =", "0) m.c793 = Constraint(expr= m.x547 == 0) m.c794 = Constraint(expr=", "= Constraint(expr= m.b707 + m.b709 <= 1) m.c1147 = Constraint(expr=", "m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456", "0) m.c170 = Constraint(expr= m.x257 == 0) m.c171 = Constraint(expr=", "m.c293 = Constraint(expr= m.x53 - m.x296 - m.x299 == 0)", "m.c704 = Constraint(expr= m.x173 - m.x518 - m.x521 == 0)", "<= 0) m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0)", "0) m.c1473 = Constraint(expr= m.b663 - m.b672 >= 0) m.c1474", "0) m.c236 = Constraint(expr= m.x332 == 0) m.c237 = Constraint(expr=", "m.b743 + m.b745 <= 1) m.c1219 = Constraint(expr= m.b743 +", "Constraint(expr= m.x13 - m.x226 - m.x229 == 0) m.c68 =", "m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0) m.c391 =", "0) m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0) m.c423", "- m.x510 == 0) m.c763 = Constraint(expr= m.x169 - m.x508", "== 0) m.c8 = Constraint(expr= m.x17 - m.x20 - m.x23", "= Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943) m.c835 = Constraint(expr=", "0) m.c1016 = Constraint(expr= m.b599 - m.b600 <= 0) m.c1017", "Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675", "m.x60 - m.x309 - m.x312 == 0) m.c349 = Constraint(expr=", "+ 4.45628648004517*m.b599 <= 4.45628648004517) m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600", "= Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001", "9*m.b645 <= 9) m.c550 = Constraint(expr= m.x397 + 9*m.b646 <=", "0) m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0) m.c865", "= Constraint(expr= m.b609 - m.b610 <= 0) m.c1028 = Constraint(expr=", "m.x560 - 15*m.b683 <= 0) m.c912 = Constraint(expr= m.x561 -", "Constraint(expr= m.b689 + m.b690 <= 1) m.c1110 = Constraint(expr= m.b689", "== 0) m.c33 = Constraint(expr= m.x135 - m.x138 == 0)", "Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0) m.c187 = Constraint(expr= m.x250", "Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b642 + m.b645 >= 0) m.c1399 = Constraint(expr= - m.b625", "m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0) m.c75 =", "m.c1298 = Constraint(expr= m.b611 - m.b701 <= 0) m.c1299 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924) m.c776 = Constraint(expr= m.x512", "Constraint(expr= - m.b616 + m.b634 >= 0) m.c1385 = Constraint(expr=", "m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342", "== 0) m.c901 = Constraint(expr= m.x565 == 0) m.c902 =", "- m.x229 == 0) m.c68 = Constraint(expr= m.x212 - 40*m.b596", "<= 9) m.c522 = Constraint(expr= m.x390 + 9*m.b642 <= 9)", "m.b736 <= 1) m.c1204 = Constraint(expr= m.b735 + m.b736 <=", "= Constraint(expr= m.x134 - m.x464 - m.x467 == 0) m.c624", "Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517) m.c128 = Constraint(expr= m.x260", "+ m.x791 == 0) m.c939 = Constraint(expr= 7*m.b702 + m.x792", "m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56", "= Constraint(expr= m.b744 + m.b745 <= 1) m.c1223 = Constraint(expr=", "- m.x194 == 0) m.c51 = Constraint(expr= m.x180 - m.x189", "Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b715 <= 1) m.c1162 = Constraint(expr= m.b714 + m.b715 <=", ">= 0) m.c1486 = Constraint(expr= m.b670 - m.b685 >= 0)", "1) m.c1123 = Constraint(expr= m.b695 + m.b696 <= 1) m.c1124", "m.c955 = Constraint(expr= 3*m.b718 + m.x808 == 0) m.c956 =", "m.x357 - m.x360 == 0) m.c352 = Constraint(expr= m.x79 -", "m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425) m.c502 =", "== 0) m.c947 = Constraint(expr= 2*m.b710 + m.x800 == 0)", "- m.x537 - m.x540 == 0) m.c769 = Constraint(expr= m.x181", ">= 0) m.c1395 = Constraint(expr= - m.b621 + m.b639 >=", "4.45628648004517*m.b601 <= 0) m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <=", "= Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001", "<= 1.04900943706034) m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034)", "- m.x488 - m.x491 == 0) m.c678 = Constraint(expr= m.x150", "Constraint(expr= - 0.6*m.x302 + m.x350 == 0) m.c312 = Constraint(expr=", "m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506) m.c745 = Constraint(expr= m.x532 +", ">= 0) m.c1377 = Constraint(expr= - m.b603 + m.b612 +", "3.04984759446376) m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376) m.c631", "0) m.c356 = Constraint(expr= m.x311 + 15*m.b626 <= 15) m.c357", "0) m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553) m.c453", "Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0) m.c188 =", "m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536 == 0) m.c750", "m.c794 = Constraint(expr= m.x569 == 0) m.c795 = Constraint(expr= m.x570", "= Constraint(expr= m.x393 - 9*m.b645 <= 0) m.c547 = Constraint(expr=", "m.x288 == 0) m.c214 = Constraint(expr= m.x49 - m.x286 -", "m.b654 - m.b657 >= 0) m.c1468 = Constraint(expr= m.b655 -", "m.c478 = Constraint(expr= m.x85 - m.x370 - m.x373 == 0)", "m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924) m.c740 = Constraint(expr= m.x524 -", "m.x27 - m.x30 - m.x33 == 0) m.c13 = Constraint(expr=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855 =", "m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353) m.c278 =", "m.c542 = Constraint(expr= m.x125 - m.x446 - m.x449 == 0)", "+ 240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686", "Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 9*m.b641 <= 0) m.c525 = Constraint(expr= m.x441 - 9*m.b642", "1) m.c1375 = Constraint(expr= m.b598 + m.b601 == 1) m.c1376", "m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406", "m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <= 0) m.c492 =", "m.b635 - m.b636 <= 0) m.c1053 = Constraint(expr= m.b635 -", "0.999* m.b666) <= 0) m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) -", "<= 1) m.c1163 = Constraint(expr= m.b716 + m.b717 <= 1)", "+ 0.999* m.b648) <= 0) m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649)", "m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226", "= Constraint(expr= m.x158 - m.x161 - m.x164 - m.x167 ==", "m.c1405 = Constraint(expr= m.b598 + m.b601 - m.b604 >= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x140 - m.x470 - m.x473 == 0) m.c648 =", "6*m.b769 + m.x859 == 0) m.c1007 = Constraint(expr= 2*m.b770 +", "m.x267 - m.x273 == 0) m.c148 = Constraint(expr= m.x43 -", "m.c1368 = Constraint(expr= - m.b680 + m.b681 - m.b771 <=", "1) m.c1227 = Constraint(expr= m.b746 + m.b748 <= 1) m.c1228", "m.b635 >= 0) m.c1437 = Constraint(expr= m.b618 - m.b636 >=", "2.54515263975353*m.b605 <= 2.54515263975353) m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <=", "m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 =", "== 0) m.c177 = Constraint(expr= m.x33 - m.x249 - m.x252", "0.9*m.x555 + m.x585 == 0) m.c871 = Constraint(expr= - 0.9*m.x556", "0) m.c412 = Constraint(expr= m.x337 == 0) m.c413 = Constraint(expr=", "Constraint(expr= 10*m.b695 + m.x785 == 0) m.c933 = Constraint(expr= 9*m.b696", "Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0)", "+ m.b688 <= 1) m.c1109 = Constraint(expr= m.b689 + m.b690", "= Constraint(expr= m.x190 - m.x556 - m.x559 == 0) m.c881", "= Constraint(expr= m.b630 - m.b631 <= 0) m.c1049 = Constraint(expr=", "- log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597) <= 0)", "m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656", "0) m.c85 = Constraint(expr= m.x223 == 0) m.c86 = Constraint(expr=", "Constraint(expr= m.x62 - m.x317 - m.x323 == 0) m.c381 =", "0) m.c594 = Constraint(expr= m.x105 - m.x405 - m.x408 ==", "m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361", "0) m.c997 = Constraint(expr= 3*m.b760 + m.x850 == 0) m.c998", "m.c1039 = Constraint(expr= m.b621 - m.b622 <= 0) m.c1040 =", "<= 1) m.c1125 = Constraint(expr= m.b695 + m.b697 <= 1)", "m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253", "m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924) m.c776 = Constraint(expr= m.x512 -", "<= 0) m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0)", ">= 0) m.c1485 = Constraint(expr= m.b669 - m.b684 >= 0)", "= Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0) m.c451 = Constraint(expr=", "m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321", "= Constraint(expr= m.x212 - 40*m.b596 <= 0) m.c69 = Constraint(expr=", "= Constraint(expr= m.x487 == 0) m.c647 = Constraint(expr= m.x140 -", "0.999*m.b637)))*(0.001 + 0.999* m.b637) <= 0) m.c437 = Constraint(expr= m.x365", "Constraint(expr= m.x515 + 30*m.b668 <= 30) m.c780 = Constraint(expr= m.x516", "3.34221486003388*m.b602 <= 0) m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <=", "0) m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924) m.c711", "m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332", "m.x591 - m.x594 == 0) m.c910 = Constraint(expr= m.x211 -", "m.x274 == 0) m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <=", "m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228", "m.x309 - 15*m.b627 <= 0) m.c355 = Constraint(expr= m.x310 -", "<= 1.18887736200171) m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171)", "0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999* m.b636) <= 0)", "- m.b663 + m.b672 + m.b675 >= 0) m.c1462 =", "m.c6 = Constraint(expr= - m.x12 - m.x15 + m.x18 ==", "m.c71 = Constraint(expr= m.x215 + 40*m.b596 <= 40) m.c72 =", "= Constraint(expr= m.b659 - m.b661 <= 0) m.c1078 = Constraint(expr=", "= Constraint(expr= m.x114 - m.x423 - m.x426 == 0) m.c421", "15*m.b668 <= 15) m.c786 = Constraint(expr= m.x540 + 15*m.b669 <=", "+ m.b637 - m.b727 <= 0) m.c1325 = Constraint(expr= m.b638", "= Constraint(expr= - 0.6*m.x303 + m.x351 == 0) m.c313 =", "0) m.c446 = Constraint(expr= m.x116 - m.x428 - m.x431 ==", "= Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 =", "= Constraint(expr= m.b701 + m.b703 <= 1) m.c1135 = Constraint(expr=", "m.c1082 = Constraint(expr= m.b665 - m.b666 <= 0) m.c1083 =", "m.c1193 = Constraint(expr= m.b731 + m.b732 <= 1) m.c1194 =", "0) m.c1029 = Constraint(expr= m.b611 - m.b613 <= 0) m.c1030", "0) m.c1427 = Constraint(expr= m.b608 - m.b626 >= 0) m.c1428", "m.c1160 = Constraint(expr= m.b714 + m.b715 <= 1) m.c1161 =", "+ 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999*", "0) m.c1403 = Constraint(expr= m.b596 + m.b599 - m.b602 >=", "m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0) m.c812 =", "m.x16 - m.x232 - m.x235 == 0) m.c95 = Constraint(expr=", "0) m.c1005 = Constraint(expr= 8*m.b768 + m.x858 == 0) m.c1006", "m.c1349 = Constraint(expr= m.b662 - m.b752 <= 0) m.c1350 =", "m.b774 <= 0) m.c1372 = Constraint(expr= - m.b683 - m.b684", "= Constraint(expr= m.x263 == 0) m.c114 = Constraint(expr= m.x264 ==", "m.x342 == 0) m.c265 = Constraint(expr= m.x343 == 0) m.c266", "m.x272 == 0) m.c141 = Constraint(expr= m.x273 == 0) m.c142", ">= 0) m.c1482 = Constraint(expr= m.b669 - m.b681 >= 0)", "Constraint(expr= 8*m.b689 + m.x779 == 0) m.c927 = Constraint(expr= 7*m.b690", "= Constraint(expr= 8*m.b768 + m.x858 == 0) m.c1006 = Constraint(expr=", "Constraint(expr= m.b598 + m.b601 == 1) m.c1376 = Constraint(expr= -", "m.b726 + m.b727 <= 1) m.c1185 = Constraint(expr= m.b725 +", "Constraint(expr= - m.b615 + m.b633 >= 0) m.c1384 = Constraint(expr=", "Constraint(expr= m.b756 + m.b757 <= 1) m.c1245 = Constraint(expr= m.b755", "Constraint(expr= m.x81 - m.x363 - m.x366 == 0) m.c445 =", "= Constraint(expr= m.x241 == 0) m.c113 = Constraint(expr= m.x263 ==", "+ 4.45628648004517*m.b609 <= 4.45628648004517) m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610", "m.x400 - 3.04984759446376*m.b649 <= 0) m.c575 = Constraint(expr= m.x401 +", "<= 0) m.c1294 = Constraint(expr= - m.b605 - m.b606 +", "Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348) m.c256 = Constraint(expr= m.x334", "0) m.c1020 = Constraint(expr= m.b602 - m.b604 <= 0) m.c1021", "Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x591 - 9*m.b684 <= 0) m.c919 = Constraint(expr= m.x592 -", "m.b655) <= 0) m.c614 = Constraint(expr= m.x413 == 0) m.c615", "= Constraint(expr= m.b596 + m.b599 - m.b605 >= 0) m.c1407", "m.b692 + m.b694 <= 1) m.c1117 = Constraint(expr= m.b692 +", "0) m.c1000 = Constraint(expr= 7*m.b763 + m.x853 == 0) m.c1001", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650 =", "+ 35*m.x127 + 25*m.x128 + 50*m.x129 + 10*m.x130 + 15*m.x131", "3.04984759446376*m.b655 <= 0) m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <=", "15*m.b680 <= 15) m.c888 = Constraint(expr= m.x558 + 15*m.b681 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 =", "Constraint(expr= m.b662 - m.b671 >= 0) m.c1473 = Constraint(expr= m.b663", "3.04984759446376*m.b627 <= 3.04984759446376) m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <=", "+ m.b703 <= 1) m.c1138 = Constraint(expr= m.b702 + m.b703", "<= 0) m.c1074 = Constraint(expr= m.b656 - m.b658 <= 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821 =", "m.b628 <= 0) m.c1046 = Constraint(expr= m.b629 - m.b630 <=", "m.x242 - m.x245 == 0) m.c144 = Constraint(expr= m.x30 -", "- m.x168 == 0) m.c46 = Constraint(expr= m.x160 - m.x163", "m.c405 = Constraint(expr= m.x420 + 20*m.b630 <= 20) m.c406 =", "- m.x547 == 0) m.c800 = Constraint(expr= m.x197 - m.x566", "Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 +", "Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539) m.c229 = Constraint(expr= m.x322", "+ 0.705049913072943*m.b672 <= 0.705049913072943) m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673", "3.04984759446376*m.b627 <= 0) m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c921 = Constraint(expr= m.x594 + 9*m.b684 <= 9) m.c922 =", "+ m.b636 + m.b639 >= 0) m.c1390 = Constraint(expr= -", "- log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617) <= 0)", "0) m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0) m.c628", "m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416 == 0) m.c366", "0) m.c918 = Constraint(expr= m.x591 - 9*m.b684 <= 0) m.c919", "Constraint(expr= 7*m.b719 + m.x809 == 0) m.c957 = Constraint(expr= 2*m.b720", "m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615", "m.c368 = Constraint(expr= - m.x374 + m.x416 == 0) m.c369", "0) m.c1309 = Constraint(expr= - m.b620 - m.b621 + m.b622", "<= 13.5) m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5)", "15) m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0) m.c360", "Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0) m.c129 = Constraint(expr= m.x261", "m.b638 >= 0) m.c1440 = Constraint(expr= m.b618 - m.b639 >=", "<= 0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 +", "Constraint(expr= - 0.6*m.x561 + m.x591 == 0) m.c898 = Constraint(expr=", "- 3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760", "Constraint(expr= m.x569 == 0) m.c795 = Constraint(expr= m.x570 == 0)", "= Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001", "<= 0) m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103)", "m.x584 == 0) m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585", "+ 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999*", "- m.b664 <= 0) m.c1081 = Constraint(expr= m.b663 - m.b664", "- 0.994083415506506*m.b667 <= 0) m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665", "m.c1409 = Constraint(expr= m.b596 + m.b599 - m.b608 >= 0)", "Constraint(expr= m.x581 == 0) m.c849 = Constraint(expr= m.x582 == 0)", "== 0) m.c314 = Constraint(expr= m.x305 == 0) m.c315 =", "= Constraint(expr= - 0.6*m.x304 + m.x352 == 0) m.c314 =", "= Constraint(expr= m.x560 - 15*m.b683 <= 0) m.c912 = Constraint(expr=", "- m.x589 == 0) m.c884 = Constraint(expr= m.x554 - 15*m.b680", "+ m.x780 == 0) m.c928 = Constraint(expr= 6*m.b691 + m.x781", "- 4.45628648004517*m.b608 <= 0) m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609", "+ m.b612 - m.b702 <= 0) m.c1300 = Constraint(expr= -", "+ m.b642 - m.b732 <= 0) m.c1330 = Constraint(expr= -", "m.x90 + m.x93 == 0) m.c25 = Constraint(expr= - m.x73", "Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999*", "m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210", "m.x572 - 0.480234946352917*m.b674 <= 0) m.c837 = Constraint(expr= m.x573 -", "0) m.c370 = Constraint(expr= - m.x376 + m.x418 == 0)", "= Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517) m.c191 = Constraint(expr=", "Constraint(expr= m.x325 == 0) m.c374 = Constraint(expr= m.x377 == 0)", "m.x539 + 15*m.b668 <= 15) m.c786 = Constraint(expr= m.x540 +", "+ m.b738 <= 1) m.c1208 = Constraint(expr= m.b738 + m.b739", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 =", "0) m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0) m.c708", "= Constraint(expr= m.b704 + m.b706 <= 1) m.c1144 = Constraint(expr=", "<= 3.04984759446376) m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376)", "Constraint(expr= m.b617 - m.b619 <= 0) m.c1036 = Constraint(expr= m.b618", "m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417", "Constraint(expr= m.b692 + m.b694 <= 1) m.c1120 = Constraint(expr= m.b693", "== 0) m.c991 = Constraint(expr= 4*m.b754 + m.x844 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 =", "m.c762 = Constraint(expr= m.x168 - m.x507 - m.x510 == 0)", "m.x176 - m.x527 - m.x533 == 0) m.c852 = Constraint(expr=", "<= 1) m.c1136 = Constraint(expr= m.b702 + m.b703 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0) m.c431", "0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999* m.b648) <= 0)", "m.b755 + m.b757 <= 1) m.c1243 = Constraint(expr= m.b755 +", "Constraint(expr= m.b764 + m.b766 <= 1) m.c1264 = Constraint(expr= m.b765", "Constraint(expr= m.b596 + m.b599 - m.b605 >= 0) m.c1407 =", "m.b657 - m.b747 <= 0) m.c1345 = Constraint(expr= - m.b656", "m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190", "- m.b712 <= 0) m.c1310 = Constraint(expr= m.b623 - m.b713", "+ m.x840 == 0) m.c988 = Constraint(expr= 9*m.b751 + m.x841", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 =", "m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544", "m.c176 = Constraint(expr= m.x32 - m.x248 - m.x251 == 0)", "0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999* m.b654)", "0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0)", "m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171) m.c658 = Constraint(expr= m.x475 +", "= Constraint(expr= m.b759 + m.b760 <= 1) m.c1253 = Constraint(expr=", "= Constraint(expr= m.b701 + m.b703 <= 1) m.c1138 = Constraint(expr=", "Constraint(expr= m.x516 == 0) m.c757 = Constraint(expr= m.x517 == 0)", "= Constraint(expr= m.x276 == 0) m.c262 = Constraint(expr= m.x277 ==", "= Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924) m.c711 = Constraint(expr=", "<= 0) m.c1047 = Constraint(expr= m.b629 - m.b631 <= 0)", "+ m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999* m.b616) <= 0) m.c233", "33.5*m.b640 <= 0) m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <=", "Constraint(expr= m.x221 == 0) m.c84 = Constraint(expr= m.x222 == 0)", "m.b764 <= 0) m.c1362 = Constraint(expr= - m.b674 + m.b675", "- 0.480234946352917*m.b674 <= 0) m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675", "Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753 - 4*m.b754", "0) m.c470 = Constraint(expr= m.x383 == 0) m.c471 = Constraint(expr=", "m.x448 - 9*m.b646 <= 0) m.c554 = Constraint(expr= m.x449 +", "0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999* m.b655)", "<= 1) m.c1210 = Constraint(expr= m.b738 + m.b739 <= 1)", "G L N X C B # 1486 571 111", "m.x573 - 0.480234946352917*m.b675 <= 0) m.c838 = Constraint(expr= m.x574 -", "m.x137 - m.x140 - m.x143 == 0) m.c36 = Constraint(expr=", "+ 13.5*m.b682 <= 13.5) m.c896 = Constraint(expr= - 0.6*m.x560 +", "0) m.c314 = Constraint(expr= m.x305 == 0) m.c315 = Constraint(expr=", "m.x311 + 15*m.b626 <= 15) m.c357 = Constraint(expr= m.x312 +", "== 0) m.c474 = Constraint(expr= m.x438 == 0) m.c475 =", "- m.x431 == 0) m.c447 = Constraint(expr= m.x117 - m.x429", "m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 =", "= Constraint(expr= m.x38 - m.x47 - m.x50 == 0) m.c15", "- m.b619 >= 0) m.c1421 = Constraint(expr= m.b608 - m.b620", "= Constraint(expr= m.x31 - m.x244 - m.x247 == 0) m.c146", "0.6*m.x560 + m.x590 == 0) m.c897 = Constraint(expr= - 0.6*m.x561", "<= 9) m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 +", "0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999* m.b653) <= 0)", "Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0) m.c188 = Constraint(expr= m.x251", "Constraint(expr= m.b761 + m.b763 <= 1) m.c1255 = Constraint(expr= m.b761", "m.c1269 = Constraint(expr= m.b767 + m.b769 <= 1) m.c1270 =", "Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0) m.c104 = Constraint(expr= m.x233", "m.b662 + m.b663 - m.b753 <= 0) m.c1351 = Constraint(expr=", "+ m.x442 == 0) m.c506 = Constraint(expr= m.x389 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 =", "m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832 =", "0) m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553) m.c282", "0) m.c598 = Constraint(expr= m.x133 - m.x460 - m.x463 ==", "= Constraint(expr= m.b695 + m.b697 <= 1) m.c1123 = Constraint(expr=", "- m.x50 == 0) m.c15 = Constraint(expr= m.x39 - m.x48", "m.b601 <= 0) m.c1018 = Constraint(expr= m.b600 - m.b601 <=", "log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598) <= 0) m.c56", "m.c800 = Constraint(expr= m.x197 - m.x566 - m.x569 == 0)", "<= 0) m.c1359 = Constraint(expr= - m.b671 + m.b672 -", "- 4.45628648004517*m.b606 <= 0) m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607", "m.b646 <= 0) m.c1063 = Constraint(expr= m.b645 - m.b646 <=", "m.x857 == 0) m.c1005 = Constraint(expr= 8*m.b768 + m.x858 ==", "+ m.x859 == 0) m.c1007 = Constraint(expr= 2*m.b770 + m.x860", "m.b623 - m.b713 <= 0) m.c1311 = Constraint(expr= - m.b623", "Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 =", ">= 0) m.c1440 = Constraint(expr= m.b618 - m.b639 >= 0)", "m.c799 = Constraint(expr= m.x184 - m.x544 - m.x547 == 0)", "m.x409 == 0) m.c596 = Constraint(expr= m.x131 - m.x458 -", "0) m.c849 = Constraint(expr= m.x582 == 0) m.c850 = Constraint(expr=", "- m.b612 + m.b630 >= 0) m.c1381 = Constraint(expr= -", "== 0) m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586 ==", "m.b629 - m.b631 <= 0) m.c1048 = Constraint(expr= m.b630 -", "1) m.c1180 = Constraint(expr= m.b723 + m.b724 <= 1) m.c1181", "m.c1105 = Constraint(expr= m.b686 + m.b687 <= 1) m.c1106 =", "m.b634 <= 0) m.c1051 = Constraint(expr= m.b633 - m.b634 <=", "m.c298 = Constraint(expr= m.x73 - m.x346 - m.x349 == 0)", "0.705049913072943*m.b664 <= 0) m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 =", "= Constraint(expr= m.x384 == 0) m.c472 = Constraint(expr= m.x385 ==", "m.b616 <= 0) m.c1034 = Constraint(expr= m.b617 - m.b618 <=", "m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447", "Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1101 = Constraint(expr= m.b683 - m.b685 <= 0) m.c1102", "1) m.c1219 = Constraint(expr= m.b743 + m.b744 <= 1) m.c1220", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823 =", "+ 33.5*m.b638 <= 33.5) m.c495 = Constraint(expr= m.x384 + 33.5*m.b639", "0.666992981045719*m.b673 <= 0) m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <=", "m.c240 = Constraint(expr= m.x51 - m.x291 - m.x294 == 0)", "m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999* m.b651) <= 0) m.c586 =", "m.b650 - m.b651 <= 0) m.c1068 = Constraint(expr= m.b650 -", "= Constraint(expr= m.b713 + m.b714 <= 1) m.c1160 = Constraint(expr=", "Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 +", "- m.x460 - m.x463 == 0) m.c599 = Constraint(expr= m.x404", "Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517) m.c126 = Constraint(expr= m.x240", "m.b770 <= 0) m.c1368 = Constraint(expr= - m.b680 + m.b681", "0.9*m.x298 + m.x346 == 0) m.c287 = Constraint(expr= m.x299 ==", "m.c997 = Constraint(expr= 3*m.b760 + m.x850 == 0) m.c998 =", "m.b596 + m.b599 == 1) m.c1374 = Constraint(expr= m.b597 +", "m.x586 - 13.5*m.b682 <= 0) m.c893 = Constraint(expr= m.x587 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 =", "- 40*m.b599 <= 0) m.c96 = Constraint(expr= m.x219 - 40*m.b600", "# FX 0 0 0 0 0 0 0 0", "m.c1021 = Constraint(expr= m.b603 - m.b604 <= 0) m.c1022 =", "- m.b648 + m.b649 - m.b739 <= 0) m.c1337 =", "m.c985 = Constraint(expr= 2*m.b748 + m.x838 == 0) m.c986 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 =", "<= 1) m.c1192 = Constraint(expr= m.b729 + m.b730 <= 1)", "m.c902 = Constraint(expr= m.x593 == 0) m.c903 = Constraint(expr= m.x594", "15) m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <= 0) m.c306", "0) m.c509 = Constraint(expr= m.x443 == 0) m.c510 = Constraint(expr=", "m.x319 - m.x325 == 0) m.c383 = Constraint(expr= m.x86 -", "m.c84 = Constraint(expr= m.x222 == 0) m.c85 = Constraint(expr= m.x223", "m.x549 - m.x552 == 0) m.c826 = Constraint(expr= m.x187 -", "0) m.c15 = Constraint(expr= m.x39 - m.x48 - m.x51 ==", "m.c376 = Constraint(expr= m.x379 == 0) m.c377 = Constraint(expr= m.x419", "m.b771 + m.b772 <= 1) m.c1275 = Constraint(expr= m.b770 +", "m.b613 >= 0) m.c1415 = Constraint(expr= m.b602 - m.b614 >=", "<= 0) m.c1050 = Constraint(expr= m.b632 - m.b634 <= 0)", "= Constraint(expr= m.b725 + m.b726 <= 1) m.c1184 = Constraint(expr=", "= Constraint(expr= m.x321 == 0) m.c211 = Constraint(expr= m.x322 ==", "m.x558 == 0) m.c874 = Constraint(expr= m.x559 == 0) m.c875", "Constraint(expr= m.x46 - m.x55 - m.x58 - m.x61 == 0)", "m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0) m.c742 =", "Constraint(expr= - 0.5*m.x513 + m.x537 == 0) m.c751 = Constraint(expr=", "m.c1154 = Constraint(expr= m.b711 + m.b712 <= 1) m.c1155 =", "<= 1) m.c1223 = Constraint(expr= m.b746 + m.b747 <= 1)", "1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999* m.b605) <= 0)", "0) m.c148 = Constraint(expr= m.x43 - m.x268 - m.x274 ==", "m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517) m.c153 =", "Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0) m.c865 = Constraint(expr= m.x580", "m.x13 - m.x16 + m.x19 == 0) m.c8 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388) m.c250 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 =", "m.x187 == 0) m.c50 = Constraint(expr= m.x179 - m.x188 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.04984759446376*m.b626 <= 3.04984759446376) m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <=", "Constraint(expr= m.x104 - m.x404 - m.x407 == 0) m.c594 =", "<= 1) m.c1227 = Constraint(expr= m.b746 + m.b748 <= 1)", "= Constraint(expr= m.x65 - m.x326 - m.x332 == 0) m.c243", "= Constraint(expr= - m.b655 + m.b658 + m.b661 >= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x592 - 9*m.b685 <= 0) m.c920 = Constraint(expr= m.x593 +", "4*m.b694 + m.x784 == 0) m.c932 = Constraint(expr= 10*m.b695 +", "m.x352 - 9*m.b625 <= 0) m.c335 = Constraint(expr= m.x353 +", "m.c12 = Constraint(expr= m.x24 - m.x27 - m.x30 - m.x33", "+ 30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120 + 20*m.x121", "Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0) m.c735 = Constraint(expr= m.x501", "= Constraint(expr= 4*m.b744 + m.x834 == 0) m.c982 = Constraint(expr=", "m.b689 + m.b691 <= 1) m.c1114 = Constraint(expr= m.b690 +", "m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70", "m.b644 - m.b645 + m.b646 - m.b736 <= 0) m.c1334", "m.b721 <= 1) m.c1173 = Constraint(expr= m.b719 + m.b721 <=", "= Constraint(expr= m.x365 == 0) m.c438 = Constraint(expr= m.x366 ==", "m.b691 <= 1) m.c1115 = Constraint(expr= m.b692 + m.b693 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999* m.b650) <= 0) m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651)", "+ 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999*", "= Constraint(expr= 3*m.b774 + m.x864 == 0) m.c1012 = Constraint(expr=", "- m.b601 <= 0) m.c1019 = Constraint(expr= m.b602 - m.b603", "m.b638) <= 0) m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1", "m.c1310 = Constraint(expr= m.b623 - m.b713 <= 0) m.c1311 =", "- 1.11894339953103*m.b651 <= 0) m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652", "Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x270 - m.x276 == 0) m.c268 = Constraint(expr= m.x43", "m.c10 = Constraint(expr= m.x19 - m.x22 - m.x25 == 0)", "m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481", "<= 0) m.c402 = Constraint(expr= m.x417 - 20*m.b630 <= 0)", "0) m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0) m.c600", "m.x217 == 0) m.c65 = Constraint(expr= m.x11 - m.x224 -", "0) m.c69 = Constraint(expr= m.x213 - 40*m.b597 <= 0) m.c70", "= Constraint(expr= - m.b609 + m.b621 + m.b624 + m.b627", "0.999*m.b628)))*(0.001 + 0.999* m.b628) <= 0) m.c341 = Constraint(expr= m.x311", "m.c670 = Constraint(expr= m.x481 == 0) m.c671 = Constraint(expr= m.x491", "Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789", "m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350 == 0) m.c312", "33.5*m.b638 <= 33.5) m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <=", "Constraint(expr= m.x558 + 15*m.b681 <= 15) m.c889 = Constraint(expr= m.x559", "0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656) <=", "= Constraint(expr= - m.b632 - m.b633 + m.b634 - m.b724", "0) m.c213 = Constraint(expr= m.x48 - m.x285 - m.x288 ==", "+ 285*m.x197 + 390*m.x198 + 350*m.x199 + 290*m.x200 + 405*m.x201", "Constraint(expr= m.b696 + m.b697 <= 1) m.c1125 = Constraint(expr= m.b695", "= Constraint(expr= 9*m.b724 + m.x814 == 0) m.c962 = Constraint(expr=", "<= 0) m.c1339 = Constraint(expr= - m.b650 - m.b651 +", "<= 0) m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b765 + m.b766 <= 1) m.c1263 = Constraint(expr= m.b764 +", "m.c958 = Constraint(expr= 9*m.b721 + m.x811 == 0) m.c959 =", "4.45628648004517) m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517) m.c154", "+ m.b769 <= 1) m.c1271 = Constraint(expr= m.b770 + m.b771", "m.b617 - m.b618 + m.b619 - m.b709 <= 0) m.c1307", "m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417 == 0) m.c367", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596 =", "- 15*m.b609 <= 0) m.c199 = Constraint(expr= m.x280 - 15*m.b610", "m.x52 - m.x292 - m.x295 == 0) m.c242 = Constraint(expr=", "0) m.c173 = Constraint(expr= m.x281 == 0) m.c174 = Constraint(expr=", "m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 +", "== 0) m.c650 = Constraint(expr= m.x146 - m.x482 - m.x485", "Constraint(expr= m.x322 == 0) m.c212 = Constraint(expr= m.x47 - m.x284", "m.x436 - 2.30162356062425*m.b640 <= 0) m.c500 = Constraint(expr= m.x437 +", "m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275", "Constraint(expr= m.x16 - m.x232 - m.x235 == 0) m.c95 =", "+ 0.999* m.b614) <= 0) m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615)", "- 0.572481933717686*m.b635 <= 0) m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636", "0.999*m.b647)))*(0.001 + 0.999* m.b647) <= 0) m.c558 = Constraint(expr=(m.x453/(0.001 +", "+ m.x822 == 0) m.c970 = Constraint(expr= 2*m.b733 + m.x823", "+ m.b741 <= 1) m.c1212 = Constraint(expr= m.b740 + m.b742", "<= 1) m.c1258 = Constraint(expr= m.b762 + m.b763 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0)", "m.c540 = Constraint(expr= m.x99 - m.x393 - m.x396 == 0)", "= Constraint(expr= 9*m.b766 + m.x856 == 0) m.c1004 = Constraint(expr=", "m.b713 + m.b715 <= 1) m.c1159 = Constraint(expr= m.b713 +", "m.c1270 = Constraint(expr= m.b768 + m.b769 <= 1) m.c1271 =", "- 4.45628648004517*m.b609 <= 0) m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610", "= Constraint(expr= m.x55 - m.x298 - m.x301 == 0) m.c296", "m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428", "- m.x156 - m.x159 == 0) m.c43 = Constraint(expr= m.x154", "m.x142 - m.x472 - m.x475 == 0) m.c650 = Constraint(expr=", "<= 0) m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0)", "<= 9) m.c555 = Constraint(expr= m.x450 + 9*m.b645 <= 9)", "m.c991 = Constraint(expr= 4*m.b754 + m.x844 == 0) m.c992 =", "<= 0) m.c1356 = Constraint(expr= - m.b668 + m.b669 -", "m.c521 = Constraint(expr= m.x389 + 9*m.b641 <= 9) m.c522 =", "- m.x103 - m.x106 - m.x109 == 0) m.c32 =", "<= 15) m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <= 0)", "m.x332 == 0) m.c237 = Constraint(expr= m.x333 == 0) m.c238", "0) m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001", "== 0) m.c414 = Constraint(expr= m.x426 == 0) m.c415 =", "0) m.c1478 = Constraint(expr= m.b665 - m.b677 >= 0) m.c1479", "m.x279 - 15*m.b609 <= 0) m.c199 = Constraint(expr= m.x280 -", "<= 1.83548069293539) m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539)", "m.b635 - m.b637 <= 0) m.c1054 = Constraint(expr= m.b636 -", "0) m.c343 = Constraint(expr= m.x313 == 0) m.c344 = Constraint(expr=", "m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254", "3.34221486003388*m.b603 <= 3.34221486003388) m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <=", "m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0) m.c833 =", "m.c1145 = Constraint(expr= m.b707 + m.b708 <= 1) m.c1146 =", "9*m.b721 + m.x811 == 0) m.c959 = Constraint(expr= 3*m.b722 +", "Constraint(expr= m.x183 - m.x543 - m.x546 == 0) m.c799 =", "m.c596 = Constraint(expr= m.x131 - m.x458 - m.x461 == 0)", "+ m.b660 - m.b750 <= 0) m.c1348 = Constraint(expr= -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431) m.c79 = Constraint(expr= m.x229", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 =", "- 7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742", "== 0) m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352 ==", "9*m.b644 <= 9) m.c555 = Constraint(expr= m.x450 + 9*m.b645 <=", "- m.b689 <= 0) m.c1287 = Constraint(expr= - m.b599 +", "m.b623 + m.b624 - m.b714 <= 0) m.c1312 = Constraint(expr=", "= Constraint(expr= m.x211 - m.x592 - m.x595 == 0) m.c911", "= Constraint(expr= m.b746 + m.b747 <= 1) m.c1224 = Constraint(expr=", "m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502", "= Constraint(expr= m.x283 + 15*m.b610 <= 15) m.c203 = Constraint(expr=(m.x314/(0.001", "m.x442 - 9*m.b643 <= 0) m.c527 = Constraint(expr= m.x443 +", "m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229", "4*m.b713 + m.x803 == 0) m.c951 = Constraint(expr= 7*m.b714 +", "m.b612 + m.b615 >= 0) m.c1378 = Constraint(expr= - m.b604", "0.999* m.b650) <= 0) m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) -", "m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376) m.c365 = Constraint(expr= - 0.9*m.x317", "= Constraint(expr= m.x499 == 0) m.c698 = Constraint(expr= m.x521 ==", "m.b598 <= 0) m.c1015 = Constraint(expr= m.b597 - m.b598 <=", "m.x48 - m.x285 - m.x288 == 0) m.c214 = Constraint(expr=", "m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294", "Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0) m.c360 = Constraint(expr= m.x357", "+ m.b750 <= 1) m.c1230 = Constraint(expr= m.b749 + m.b751", "m.b662 - m.b663 + m.b664 - m.b754 <= 0) m.c1352", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 =", "= Constraint(expr= m.b741 + m.b742 <= 1) m.c1217 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x39 - m.x261 - m.x264 == 0) m.c121 = Constraint(expr=", "<= 0) m.c526 = Constraint(expr= m.x442 - 9*m.b643 <= 0)", "= Constraint(expr= m.b689 + m.b690 <= 1) m.c1110 = Constraint(expr=", "m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149", "m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376) m.c577 = Constraint(expr= m.x403 +", "m.x578 - m.x581 == 0) m.c855 = Constraint(expr= m.x204 -", "= Constraint(expr= m.b743 + m.b744 <= 1) m.c1218 = Constraint(expr=", "- 1.18887736200171*m.b656 <= 0) m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657", "Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0)", "<= 1) m.c1272 = Constraint(expr= m.b770 + m.b772 <= 1)", "Constraint(expr= m.x77 - m.x101 - m.x104 - m.x107 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1462 = Constraint(expr= - m.b664 + m.b673 + m.b676", "11:37:33 # # Equation counts # Total E G L", "m.c798 = Constraint(expr= m.x183 - m.x543 - m.x546 == 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0) m.c629 = Constraint(expr=", "m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5) m.c497 =", "= Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 =", "0) m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425) m.c501", "m.x96 - m.x387 - m.x390 == 0) m.c514 = Constraint(expr=", "<= 0) m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 +", "m.b653 - m.b654 <= 0) m.c1071 = Constraint(expr= m.b653 -", "- m.x333 == 0) m.c244 = Constraint(expr= m.x67 - m.x328", "m.x55 - m.x298 - m.x301 == 0) m.c296 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0)", "9*m.b642 <= 0) m.c526 = Constraint(expr= m.x442 - 9*m.b643 <=", "1) m.c1257 = Constraint(expr= m.b761 + m.b763 <= 1) m.c1258", "- m.b657 <= 0) m.c1074 = Constraint(expr= m.b656 - m.b658", "- 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999* m.b667) <=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664 =", "= Constraint(expr= m.x385 == 0) m.c473 = Constraint(expr= m.x437 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 =", "m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924) m.c775 = Constraint(expr= m.x511 +", "= Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924) m.c739 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 =", "m.c145 = Constraint(expr= m.x31 - m.x244 - m.x247 == 0)", "- 0.75*m.x496 + m.x520 == 0) m.c695 = Constraint(expr= m.x497", "= Constraint(expr= m.b726 + m.b727 <= 1) m.c1185 = Constraint(expr=", "Constraint(expr= - m.b654 + m.b657 + m.b660 >= 0) m.c1459", "- m.b622 >= 0) m.c1424 = Constraint(expr= m.b608 - m.b623", "m.b714 <= 0) m.c1312 = Constraint(expr= - m.b623 - m.b624", "- m.b652 >= 0) m.c1454 = Constraint(expr= m.b626 - m.b653", "0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999* m.b675)", "= Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539) m.c229 = Constraint(expr=", "= Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553) m.c284 = Constraint(expr=", "Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 +", "m.x545 == 0) m.c792 = Constraint(expr= m.x546 == 0) m.c793", "Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943) m.c836 = Constraint(expr= m.x572", "m.b715 <= 1) m.c1159 = Constraint(expr= m.b713 + m.b714 <=", "4.45628648004517*m.b609 <= 0) m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <=", "3.04984759446376) m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0) m.c606", "- m.x80 - m.x83 == 0) m.c21 = Constraint(expr= m.x69", "- log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618) <= 0)", "0.999* m.b600) <= 0) m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) -", "- m.x265 == 0) m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602", "+ m.b759 <= 1) m.c1250 = Constraint(expr= m.b759 + m.b760", "m.b600 - m.b690 <= 0) m.c1288 = Constraint(expr= - m.b599", "m.b650 + m.b653 >= 0) m.c1401 = Constraint(expr= - m.b627", "Constraint(expr= m.x479 == 0) m.c669 = Constraint(expr= m.x480 == 0)", "Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0) m.c634 = Constraint(expr= m.x466", "m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 +", "m.c782 = Constraint(expr= m.x536 - 15*m.b668 <= 0) m.c783 =", "Constraint(expr= m.x447 - 9*m.b645 <= 0) m.c553 = Constraint(expr= m.x448", "= Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0) m.c130 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999* m.b654) <= 0) m.c613 =", "m.c1068 = Constraint(expr= m.b650 - m.b652 <= 0) m.c1069 =", "m.x500 - 0.940066550763924*m.b665 <= 0) m.c735 = Constraint(expr= m.x501 -", "Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517) m.c155 = Constraint(expr= m.x266", "<= 13.5) m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590 ==", "0) m.c955 = Constraint(expr= 3*m.b718 + m.x808 == 0) m.c956", "0) m.c1300 = Constraint(expr= - m.b611 - m.b612 + m.b613", "m.b651 >= 0) m.c1453 = Constraint(expr= m.b628 - m.b652 >=", "== 0) m.c348 = Constraint(expr= m.x60 - m.x309 - m.x312", "0) m.c818 = Constraint(expr= m.x551 == 0) m.c819 = Constraint(expr=", "m.c1023 = Constraint(expr= m.b605 - m.b607 <= 0) m.c1024 =", "<= 0) m.c1367 = Constraint(expr= m.b680 - m.b770 <= 0)", "1.83548069293539*m.b631 <= 1.83548069293539) m.c395 = Constraint(expr= m.x374 - 20*m.b629 <=", "<= 0) m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 +", "0) m.c977 = Constraint(expr= 2*m.b740 + m.x830 == 0) m.c978", "0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c463 = Constraint(expr=(m.x436/(0.001 +", "0.940066550763924*m.b660 <= 0.940066550763924) m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <=", "- m.x249 - m.x252 == 0) m.c178 = Constraint(expr= m.x34", "1.18887736200171*m.b659 <= 0) m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <=", "+ m.x592 == 0) m.c899 = Constraint(expr= m.x563 == 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795 =", "m.c819 = Constraint(expr= m.x552 == 0) m.c820 = Constraint(expr= m.x553", "- 2*m.b725 - 6*m.b726 - 3*m.b727 - 4*m.b728 - 8*m.b729", "Constraint(expr= m.x446 - 9*m.b644 <= 0) m.c552 = Constraint(expr= m.x447", "m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943) m.c719 =", "0) m.c1321 = Constraint(expr= - m.b632 - m.b633 + m.b634", "m.c1361 = Constraint(expr= m.b674 - m.b764 <= 0) m.c1362 =", "== 0) m.c94 = Constraint(expr= m.x16 - m.x232 - m.x235", "= Constraint(expr= m.x49 - m.x286 - m.x289 == 0) m.c215", "m.x417 == 0) m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418", "- m.x440 - m.x443 == 0) m.c516 = Constraint(expr= m.x123", "m.c783 = Constraint(expr= m.x537 - 15*m.b669 <= 0) m.c784 =", "m.c401 = Constraint(expr= m.x416 - 20*m.b629 <= 0) m.c402 =", "m.c509 = Constraint(expr= m.x443 == 0) m.c510 = Constraint(expr= m.x444", ">= 0) m.c1430 = Constraint(expr= m.b611 - m.b629 >= 0)", "m.c618 = Constraint(expr= m.x468 == 0) m.c619 = Constraint(expr= m.x469", "m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720", "4*m.b715 + m.x805 == 0) m.c953 = Constraint(expr= 3*m.b716 +", "= Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517) m.c190 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 =", "m.x142 - m.x145 == 0) m.c38 = Constraint(expr= - m.x146", "+ 40*m.x118 + 30*m.x119 + 20*m.x120 + 20*m.x121 + 35*m.x122", "== 0) m.c352 = Constraint(expr= m.x79 - m.x358 - m.x361", "Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x472 - 1.18887736200171*m.b658 <= 0) m.c656 = Constraint(expr= m.x473 +", "0) m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0) m.c866", "m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <= 0) m.c892 =", "m.c345 = Constraint(expr= m.x360 == 0) m.c346 = Constraint(expr= m.x361", "Constraint(expr= 9*m.b749 + m.x839 == 0) m.c987 = Constraint(expr= 2*m.b750", "m.x429 - m.x432 == 0) m.c448 = Constraint(expr= m.x118 -", "+ 0.690184503917672*m.b677 <= 0.690184503917672) m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678", "m.x504 == 0) m.c724 = Constraint(expr= m.x505 == 0) m.c725", "0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999* m.b666)", "0.705049913072943) m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943) m.c719", "m.x152 == 0) m.c39 = Constraint(expr= - m.x147 - m.x150", "Constraint(expr= m.b753 + m.b754 <= 1) m.c1239 = Constraint(expr= m.b752", "m.x259 == 0) m.c182 = Constraint(expr= m.x44 - m.x278 -", "m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0) m.c362 =", "- m.x363 - m.x366 == 0) m.c445 = Constraint(expr= m.x82", "<= 0) m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 +", "Constraint(expr= m.x450 + 9*m.b645 <= 9) m.c556 = Constraint(expr= m.x451", "+ m.b760 <= 1) m.c1253 = Constraint(expr= m.b761 + m.b762", "= Constraint(expr= m.x34 - m.x250 - m.x253 == 0) m.c179", "m.b679 - m.b769 <= 0) m.c1367 = Constraint(expr= m.b680 -", "m.x515 == 0) m.c765 = Constraint(expr= m.x171 - m.x513 -", "0) m.c1297 = Constraint(expr= - m.b608 - m.b609 + m.b610", "+ m.x586 == 0) m.c872 = Constraint(expr= m.x557 == 0)", "0) m.c1017 = Constraint(expr= m.b599 - m.b601 <= 0) m.c1018", "m.c914 = Constraint(expr= m.x563 + 15*m.b683 <= 15) m.c915 =", "= Constraint(expr= m.x12 - m.x225 - m.x228 == 0) m.c67", "- 0.940066550763924*m.b664 <= 0) m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 =", "m.b684 + m.b685 - m.b775 <= 0) m.c1373 = Constraint(expr=", "- m.x429 - m.x432 == 0) m.c448 = Constraint(expr= m.x118", "= Constraint(expr= m.b723 + m.b724 <= 1) m.c1179 = Constraint(expr=", "m.x43 - m.x271 - m.x277 == 0) m.c269 = Constraint(expr=", "m.c652 = Constraint(expr= m.x148 - m.x484 - m.x487 == 0)", "= Constraint(expr= 2*m.b770 + m.x860 == 0) m.c1008 = Constraint(expr=", "== 0) m.c90 = Constraint(expr= m.x9 - m.x219 - m.x222", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685 =", "== 0) m.c533 = Constraint(expr= m.x395 == 0) m.c534 =", "1.18887736200171*m.b655 <= 0) m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <=", "== 0) m.c417 = Constraint(expr= m.x66 - m.x330 - m.x336", "m.x328 - 1.32154609891348*m.b616 <= 0) m.c254 = Constraint(expr= m.x332 +", "m.x281 == 0) m.c174 = Constraint(expr= m.x282 == 0) m.c175", "= Constraint(expr= m.x164 - m.x500 - m.x503 == 0) m.c729", "= Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348) m.c428 = Constraint(expr=", ">= 0) m.c1383 = Constraint(expr= - m.b615 + m.b633 >=", "= Constraint(expr= - m.b606 + m.b618 >= 0) m.c1387 =", "<= 0) m.c1341 = Constraint(expr= - m.b653 + m.b654 -", "<= 1) m.c1252 = Constraint(expr= m.b759 + m.b760 <= 1)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 =", "= Constraint(expr= m.x278 - 15*m.b608 <= 0) m.c198 = Constraint(expr=", "m.c351 = Constraint(expr= m.x78 - m.x357 - m.x360 == 0)", "<= 0) m.c548 = Constraint(expr= m.x395 + 9*m.b644 <= 9)", "m.c548 = Constraint(expr= m.x395 + 9*m.b644 <= 9) m.c549 =", "0) m.c674 = Constraint(expr= m.x143 - m.x476 - m.x479 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 =", "= Constraint(expr= 9*m.b721 + m.x811 == 0) m.c959 = Constraint(expr=", "m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717", "m.b652 + m.b655 >= 0) m.c1403 = Constraint(expr= m.b596 +", "m.c623 = Constraint(expr= m.x134 - m.x464 - m.x467 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x378 + 20*m.b630 <= 20) m.c400 = Constraint(expr= m.x379", "0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001", "m.c935 = Constraint(expr= 6*m.b698 + m.x788 == 0) m.c936 =", "Constraint(expr= m.b689 + m.b691 <= 1) m.c1114 = Constraint(expr= m.b690", "= Constraint(expr= m.x282 == 0) m.c175 = Constraint(expr= m.x283 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608 =", "m.b721 <= 1) m.c1171 = Constraint(expr= m.b719 + m.b720 <=", "m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415", "= Constraint(expr= m.x120 - m.x435 - m.x438 == 0) m.c484", "0) m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001", "0) m.c5 = Constraint(expr= - m.x11 - m.x14 + m.x17", "= Constraint(expr= m.x440 - 9*m.b641 <= 0) m.c525 = Constraint(expr=", "+ m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999* m.b614) <= 0) m.c231", "- m.b690 <= 0) m.c1288 = Constraint(expr= - m.b599 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 =", "0) m.c21 = Constraint(expr= m.x69 - m.x81 - m.x84 ==", "= Constraint(expr= - m.b662 - m.b663 + m.b664 - m.b754", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 =", "= Constraint(expr= m.b662 - m.b663 <= 0) m.c1080 = Constraint(expr=", "1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999* m.b666) <= 0)", "= Constraint(expr= m.b743 + m.b744 <= 1) m.c1220 = Constraint(expr=", "m.b764 + m.b766 <= 1) m.c1261 = Constraint(expr= m.b764 +", "= Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376) m.c630 = Constraint(expr=", "m.c1027 = Constraint(expr= m.b609 - m.b610 <= 0) m.c1028 =", "0.999*m.b614)))*(0.001 + 0.999* m.b614) <= 0) m.c231 = Constraint(expr=(m.x327/(0.001 +", "m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376) m.c632 = Constraint(expr= m.x464 -", "m.x401 == 0) m.c567 = Constraint(expr= m.x102 - m.x399 -", "m.c1254 = Constraint(expr= m.b761 + m.b763 <= 1) m.c1255 =", "9) m.c923 = Constraint(expr= 5*m.b686 + m.x776 == 0) m.c924", "Constraint(expr= m.b626 - m.b650 >= 0) m.c1452 = Constraint(expr= m.b627", "== 0) m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346 ==", "== 0) m.c320 = Constraint(expr= m.x56 - m.x302 - m.x305", "Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)", "+ m.b750 <= 1) m.c1232 = Constraint(expr= m.b750 + m.b751", "0) m.c1482 = Constraint(expr= m.b669 - m.b681 >= 0) m.c1483", "Constraint(expr= - m.b602 + m.b611 + m.b614 >= 0) m.c1377", "Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 0.940066550763924) m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924)", "+ m.x846 == 0) m.c994 = Constraint(expr= 8*m.b757 + m.x847", "m.x449 + 9*m.b644 <= 9) m.c555 = Constraint(expr= m.x450 +", "m.c354 = Constraint(expr= m.x309 - 15*m.b627 <= 0) m.c355 =", "+ 0.999* m.b659) <= 0) m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 1.18887736200171*m.b661 <= 0) m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659", "m.c1002 = Constraint(expr= 3*m.b765 + m.x855 == 0) m.c1003 =", "<= 1) m.c1268 = Constraint(expr= m.b768 + m.b769 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 =", "0) m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0) m.c158", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 =", "3*m.b736 - 5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740 -", "Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= - 0.75*m.x495 + m.x519 == 0) m.c694 = Constraint(expr=", "== 0) m.c172 = Constraint(expr= m.x259 == 0) m.c173 =", "<= 0) m.c397 = Constraint(expr= m.x376 - 20*m.b631 <= 0)", "Constraint(expr= m.x592 - 9*m.b685 <= 0) m.c920 = Constraint(expr= m.x593", "m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0) m.c601 =", "Constraint(expr= m.x491 == 0) m.c672 = Constraint(expr= m.x492 == 0)", "6*m.b759 - 3*m.b760 - 4*m.b761 - 8*m.b762 - 7*m.b763 -", "+ m.b771 <= 1) m.c1274 = Constraint(expr= m.b771 + m.b772", "= Constraint(expr= - m.b608 - m.b609 + m.b610 - m.b700", "m.b667 <= 0) m.c1085 = Constraint(expr= m.b668 - m.b669 <=", "m.c163 = Constraint(expr= - m.x250 + m.x280 == 0) m.c164", "Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 +", "m.c35 = Constraint(expr= m.x137 - m.x140 - m.x143 == 0)", "= Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001", "m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0) m.c220 =", "m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999* m.b673) <= 0) m.c791 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.b621 - m.b622 <= 0) m.c1040 = Constraint(expr=", "= Constraint(expr= m.x179 - m.x188 - m.x191 - m.x194 ==", "0) m.c970 = Constraint(expr= 2*m.b733 + m.x823 == 0) m.c971", "m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0) m.c860 =", "0) m.c180 = Constraint(expr= m.x36 - m.x255 - m.x258 ==", "- 6*m.b700 - 7*m.b701 - 7*m.b702 - 4*m.b703 - 4*m.b704", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 =", "Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0) m.c158 = Constraint(expr= m.x272", "m.x420 == 0) m.c388 = Constraint(expr= m.x112 - m.x418 -", "== 0) m.c752 = Constraint(expr= m.x509 == 0) m.c753 =", "- m.b610 <= 0) m.c1027 = Constraint(expr= m.b609 - m.b610", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 =", "= Constraint(expr= m.x443 == 0) m.c510 = Constraint(expr= m.x444 ==", "m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731", "- 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize) m.c2 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 =", "m.x169 - m.x508 - m.x511 == 0) m.c764 = Constraint(expr=", "m.c826 = Constraint(expr= m.x187 - m.x550 - m.x553 == 0)", "m.b761 + m.b762 <= 1) m.c1254 = Constraint(expr= m.b761 +", "0) m.c929 = Constraint(expr= 6*m.b692 + m.x782 == 0) m.c930", "m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553) m.c454 =", "m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120", "Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= - m.x2 - m.x3 - m.x4", "m.x386 + m.x440 == 0) m.c504 = Constraint(expr= - m.x387", "m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517", "m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5) m.c495 =", "m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196", "0.999*m.b627)))*(0.001 + 0.999* m.b627) <= 0) m.c340 = Constraint(expr=(m.x358/(0.001 +", "Constraint(expr= m.x595 == 0) m.c905 = Constraint(expr= m.x191 - m.x560", "Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x177 - m.x528 - m.x534 == 0) m.c853 = Constraint(expr=", "- m.x544 - m.x547 == 0) m.c800 = Constraint(expr= m.x197", "= Constraint(expr= m.b747 + m.b748 <= 1) m.c1227 = Constraint(expr=", "== 0) m.c350 = Constraint(expr= m.x77 - m.x356 - m.x359", "Constraint(expr= m.b710 + m.b712 <= 1) m.c1156 = Constraint(expr= m.b711", "- m.x491 == 0) m.c678 = Constraint(expr= m.x150 - m.x489", "= Constraint(expr= m.x345 - 13.5*m.b621 <= 0) m.c307 = Constraint(expr=", "- 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999* m.b615) <=", "m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561", "m.c1067 = Constraint(expr= m.b650 - m.b651 <= 0) m.c1068 =", "= Constraint(expr= m.b740 + m.b741 <= 1) m.c1214 = Constraint(expr=", "m.c25 = Constraint(expr= - m.x73 - m.x91 + m.x94 ==", "m.b623 + m.b641 + m.b644 >= 0) m.c1398 = Constraint(expr=", "<= 1) m.c1238 = Constraint(expr= m.b753 + m.b754 <= 1)", "= Constraint(expr= m.x104 - m.x404 - m.x407 == 0) m.c594", "0) m.c1036 = Constraint(expr= m.b618 - m.b619 <= 0) m.c1037", "- m.b773 <= 0) m.c1371 = Constraint(expr= - m.b683 +", "+ 15*m.b685 <= 15) m.c917 = Constraint(expr= m.x590 - 9*m.b683", "Constraint(expr= m.x76 - m.x352 - m.x355 == 0) m.c326 =", "0) m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943) m.c807", "m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b621 - m.b711 <= 0) m.c1309 = Constraint(expr= - m.b620", "= Constraint(expr= m.b650 - m.b651 <= 0) m.c1068 = Constraint(expr=", "- 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999* m.b654) <=", "- m.x97 - m.x100 == 0) m.c29 = Constraint(expr= m.x77", "0) m.c1061 = Constraint(expr= m.b644 - m.b645 <= 0) m.c1062", "m.x150 - m.x489 - m.x492 == 0) m.c679 = Constraint(expr=", "Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0) m.c227 = Constraint(expr= m.x320", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864 =", "+ m.x278 == 0) m.c165 = Constraint(expr= - 0.5*m.x255 +", "Constraint(expr= 5*m.b697 + m.x787 == 0) m.c935 = Constraint(expr= 6*m.b698", "m.x258 == 0) m.c172 = Constraint(expr= m.x259 == 0) m.c173", "Constraint(expr= m.x179 - m.x536 - m.x539 == 0) m.c768 =", "m.b664 - m.b754 <= 0) m.c1352 = Constraint(expr= m.b665 -", "m.c94 = Constraint(expr= m.x16 - m.x232 - m.x235 == 0)", "m.x350 - 9*m.b623 <= 0) m.c333 = Constraint(expr= m.x351 -", "== 0) m.c903 = Constraint(expr= m.x594 == 0) m.c904 =", "- m.x141 - m.x144 == 0) m.c37 = Constraint(expr= m.x139", "m.x346 - m.x349 == 0) m.c299 = Constraint(expr= m.x296 -", "Constraint(expr= m.x499 == 0) m.c698 = Constraint(expr= m.x521 == 0)", "== 0) m.c908 = Constraint(expr= m.x209 - m.x590 - m.x593", "0.6*m.x302 + m.x350 == 0) m.c312 = Constraint(expr= - 0.6*m.x303", "m.b712 <= 0) m.c1310 = Constraint(expr= m.b623 - m.b713 <=", "m.b663 - m.b753 <= 0) m.c1351 = Constraint(expr= - m.b662", "Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 =", "+ m.b623 + m.b626 >= 0) m.c1392 = Constraint(expr= -", "= Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431) m.c79 = Constraint(expr=", "- m.b623 + m.b641 + m.b644 >= 0) m.c1398 =", "m.x54 - m.x297 - m.x300 == 0) m.c295 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348)", "- m.b648 <= 0) m.c1065 = Constraint(expr= m.b647 - m.b649", "Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b640 >= 0) m.c1397 = Constraint(expr= - m.b623 + m.b641", "m.b655 <= 0) m.c1073 = Constraint(expr= m.b656 - m.b657 <=", "m.c1066 = Constraint(expr= m.b648 - m.b649 <= 0) m.c1067 =", "Constraint(expr= m.x182 - m.x542 - m.x545 == 0) m.c798 =", "Constraint(expr= m.b626 - m.b628 <= 0) m.c1045 = Constraint(expr= m.b627", "0) m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0) m.c486", "== 0) m.c755 = Constraint(expr= m.x515 == 0) m.c756 =", "Constraint(expr= m.b773 + m.b775 <= 1) m.c1279 = Constraint(expr= m.b773", "m.c823 = Constraint(expr= m.x577 == 0) m.c824 = Constraint(expr= m.x185", "== 0) m.c562 = Constraint(expr= m.x403 == 0) m.c563 =", "0.994083415506506*m.b666 <= 0) m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <=", "m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 =", "m.c404 = Constraint(expr= m.x419 + 20*m.b629 <= 20) m.c405 =", "m.x253 == 0) m.c179 = Constraint(expr= m.x35 - m.x254 -", "m.c1267 = Constraint(expr= m.b767 + m.b768 <= 1) m.c1268 =", "= Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171) m.c637 = Constraint(expr=", "Constraint(expr= m.b680 - m.b681 <= 0) m.c1098 = Constraint(expr= m.b680", "m.b627 >= 0) m.c1429 = Constraint(expr= m.b610 - m.b628 >=", "m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71", "= Constraint(expr= m.x71 - m.x344 - m.x347 == 0) m.c297", "- 9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753", "= Constraint(expr= - m.b617 - m.b618 + m.b619 - m.b709", "m.x224 - 3.71357206670431*m.b596 <= 0) m.c75 = Constraint(expr= m.x225 -", "m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034) m.c583 =", "m.b630 + m.b631 - m.b721 <= 0) m.c1319 = Constraint(expr=", "- m.b682 >= 0) m.c1484 = Constraint(expr= m.b668 - m.b683", "0) m.c353 = Constraint(expr= m.x308 - 15*m.b626 <= 0) m.c354", "Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b612 + m.b613 - m.b703 <= 0) m.c1301 = Constraint(expr=", ">= 0) m.c1435 = Constraint(expr= m.b616 - m.b634 >= 0)", "m.c531 = Constraint(expr= - m.x393 + m.x447 == 0) m.c532", "m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411", "0) m.c879 = Constraint(expr= m.x189 - m.x555 - m.x558 ==", "Constraint(expr= m.b752 + m.b753 <= 1) m.c1236 = Constraint(expr= m.b752", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 =", "0) m.c56 = Constraint(expr= m.x215 == 0) m.c57 = Constraint(expr=", "0) m.c722 = Constraint(expr= m.x503 == 0) m.c723 = Constraint(expr=", "== 0) m.c32 = Constraint(expr= m.x134 - m.x137 == 0)", "sc si # Total cont binary integer sos1 sos2 scont", "+ m.b724 <= 1) m.c1179 = Constraint(expr= m.b722 + m.b724", "= Constraint(expr= m.b773 + m.b774 <= 1) m.c1278 = Constraint(expr=", "- m.x165 - m.x168 == 0) m.c46 = Constraint(expr= m.x160", "Constraint(expr= m.x58 - m.x304 - m.x307 == 0) m.c323 =", "= Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0) m.c710 = Constraint(expr=", "0) m.c1285 = Constraint(expr= - m.b596 - m.b597 + m.b598", "0) m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0) m.c131", "+ 405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205", "Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 =", "== 0) m.c175 = Constraint(expr= m.x283 == 0) m.c176 =", "<= 0) m.c1293 = Constraint(expr= - m.b605 + m.b606 -", "= Constraint(expr= - m.b620 + m.b621 - m.b711 <= 0)", "m.b611 - m.b612 + m.b613 - m.b703 <= 0) m.c1301", "m.b663 + m.b664 - m.b754 <= 0) m.c1352 = Constraint(expr=", "m.b678 + m.b679 - m.b769 <= 0) m.c1367 = Constraint(expr=", "m.b644 + m.b645 - m.b735 <= 0) m.c1333 = Constraint(expr=", "== 0) m.c140 = Constraint(expr= m.x272 == 0) m.c141 =", "0) m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0) m.c831", "<= 1) m.c1148 = Constraint(expr= m.b708 + m.b709 <= 1)", "Constraint(expr= m.b614 - m.b632 >= 0) m.c1434 = Constraint(expr= m.b615", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 =", "m.c412 = Constraint(expr= m.x337 == 0) m.c413 = Constraint(expr= m.x425", "Constraint(expr= m.b663 - m.b664 <= 0) m.c1082 = Constraint(expr= m.b665", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835 =", "Constraint(expr= 9*m.b724 + m.x814 == 0) m.c962 = Constraint(expr= 2*m.b725", "m.c1371 = Constraint(expr= - m.b683 + m.b684 - m.b774 <=", "m.x327 - m.x333 == 0) m.c244 = Constraint(expr= m.x67 -", "m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137", "= Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686) m.c459 = Constraint(expr=", "= Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0) m.c735 = Constraint(expr=", "m.c1119 = Constraint(expr= m.b692 + m.b694 <= 1) m.c1120 =", "+ 430*m.x205 + 290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209", "= Constraint(expr= m.x208 - m.x586 - m.x589 == 0) m.c884", "Constraint(expr= - 0.9*m.x297 + m.x345 == 0) m.c286 = Constraint(expr=", "<= 0) m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c295 = Constraint(expr= m.x55 - m.x298 - m.x301", "- 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999* m.b614) <=", "<= 1.32154609891348) m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348)", "= Constraint(expr= m.b761 + m.b763 <= 1) m.c1255 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 =", "1.26558121681553*m.b635 <= 1.26558121681553) m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <=", "m.x145 == 0) m.c38 = Constraint(expr= - m.x146 - m.x149", "9*m.b642 <= 0) m.c520 = Constraint(expr= m.x388 - 9*m.b643 <=", "m.b702 + m.b703 <= 1) m.c1139 = Constraint(expr= m.b704 +", "0) m.c1416 = Constraint(expr= m.b603 - m.b615 >= 0) m.c1417", "4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691 -", "= Constraint(expr= m.x99 - m.x393 - m.x396 == 0) m.c541", "m.b634 >= 0) m.c1436 = Constraint(expr= m.b617 - m.b635 >=", "m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309", "- m.b605 >= 0) m.c1407 = Constraint(expr= m.b597 + m.b600", "- m.b617 + m.b618 - m.b708 <= 0) m.c1306 =", "0) m.c980 = Constraint(expr= m.b743 + m.x833 == 0) m.c981", "Constraint(expr= - m.b617 + m.b618 - m.b708 <= 0) m.c1306", "= Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5) m.c496 = Constraint(expr=", "- m.x325 == 0) m.c383 = Constraint(expr= m.x86 - m.x374", "m.x157 - m.x160 == 0) m.c44 = Constraint(expr= m.x158 -", "1.18887736200171*m.b657 <= 1.18887736200171) m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <=", "m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425", "m.x322 == 0) m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <=", "m.b755 + m.b757 <= 1) m.c1246 = Constraint(expr= m.b756 +", "+ m.b630 >= 0) m.c1381 = Constraint(expr= - m.b613 +", "= Constraint(expr= m.x533 == 0) m.c846 = Constraint(expr= m.x534 ==", "2*m.b725 + m.x815 == 0) m.c963 = Constraint(expr= 6*m.b726 +", "<= 0) m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 =", "+ m.x17 == 0) m.c6 = Constraint(expr= - m.x12 -", "7*m.b690 + m.x780 == 0) m.c928 = Constraint(expr= 6*m.b691 +", "m.c1282 = Constraint(expr= m.b774 + m.b775 <= 1) m.c1283 =", "m.b724 <= 1) m.c1180 = Constraint(expr= m.b723 + m.b724 <=", "m.c1190 = Constraint(expr= m.b729 + m.b730 <= 1) m.c1191 =", "m.c1113 = Constraint(expr= m.b689 + m.b691 <= 1) m.c1114 =", "<= 0) m.c1351 = Constraint(expr= - m.b662 - m.b663 +", "0) m.c672 = Constraint(expr= m.x492 == 0) m.c673 = Constraint(expr=", "10*m.x86 - 5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90 -", "m.x536 - m.x539 == 0) m.c768 = Constraint(expr= m.x180 -", "= Constraint(expr= m.x427 == 0) m.c416 = Constraint(expr= m.x65 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 =", "- 9*m.b644 <= 0) m.c546 = Constraint(expr= m.x393 - 9*m.b645", "m.c624 = Constraint(expr= m.x135 - m.x465 - m.x468 == 0)", "m.c1135 = Constraint(expr= m.b701 + m.b702 <= 1) m.c1136 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= - 0.5*m.x255 + m.x279 == 0) m.c166 =", "0) m.c1388 = Constraint(expr= - m.b617 + m.b635 + m.b638", "= Constraint(expr= - 0.75*m.x238 + m.x262 == 0) m.c110 =", "m.x215 + 40*m.b596 <= 40) m.c72 = Constraint(expr= m.x216 +", "m.x348 == 0) m.c298 = Constraint(expr= m.x73 - m.x346 -", "m.c966 = Constraint(expr= 8*m.b729 + m.x819 == 0) m.c967 =", "m.b613 <= 0) m.c1030 = Constraint(expr= m.b612 - m.b613 <=", "+ 0.999*m.b656)))*(0.001 + 0.999*m.b656) <= 0) m.c639 = Constraint(expr=(m.x483/(0.001 +", "m.b639) <= 0) m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1", "m.x800 == 0) m.c948 = Constraint(expr= 5*m.b711 + m.x801 ==", "m.x849 == 0) m.c997 = Constraint(expr= 3*m.b760 + m.x850 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635 =", "<= 0) m.c553 = Constraint(expr= m.x448 - 9*m.b646 <= 0)", "m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487", "0) m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345 == 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c386 = Constraint(expr= m.x110 - m.x416 - m.x419 ==", "- m.x485 == 0) m.c651 = Constraint(expr= m.x147 - m.x483", "Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x397 + 9*m.b646 <= 9) m.c551 = Constraint(expr= m.x446 -", "m.x261 == 0) m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262", "- m.b600 <= 0) m.c1017 = Constraint(expr= m.b599 - m.b601", "= Constraint(expr= m.b692 + m.b694 <= 1) m.c1117 = Constraint(expr=", "= Constraint(expr= m.b635 - m.b637 <= 0) m.c1054 = Constraint(expr=", "Constraint(expr= m.x584 - 13.5*m.b680 <= 0) m.c891 = Constraint(expr= m.x585", "0) m.c174 = Constraint(expr= m.x282 == 0) m.c175 = Constraint(expr=", "Constraint(expr= m.b596 - m.b686 <= 0) m.c1284 = Constraint(expr= -", "= Constraint(expr= - m.b629 + m.b630 - m.b720 <= 0)", "m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517) m.c105 = Constraint(expr= m.x234 +", "m.c1274 = Constraint(expr= m.b771 + m.b772 <= 1) m.c1275 =", "+ 3.04984759446376*m.b652 <= 3.04984759446376) m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650", "- m.x418 - m.x421 == 0) m.c389 = Constraint(expr= m.x317", "m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765", "= Constraint(expr= 2*m.b740 + m.x830 == 0) m.c978 = Constraint(expr=", "m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430", "5*m.b752 - 8*m.b753 - 4*m.b754 - 2*m.b755 - 3*m.b756 -", "<= 0) m.c1053 = Constraint(expr= m.b635 - m.b637 <= 0)", "= Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327) m.c663 = Constraint(expr=", "0) m.c1007 = Constraint(expr= 2*m.b770 + m.x860 == 0) m.c1008", "= Constraint(expr= m.b737 + m.b739 <= 1) m.c1210 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 =", "= Constraint(expr= m.b756 + m.b757 <= 1) m.c1245 = Constraint(expr=", "0 0 0 # # Variable counts # x b", "m.c22 = Constraint(expr= m.x70 - m.x82 - m.x85 == 0)", "== 0) m.c324 = Constraint(expr= m.x75 - m.x351 - m.x354", "m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567", "m.x424 - m.x427 == 0) m.c422 = Constraint(expr= m.x329 -", ">= 0) m.c1431 = Constraint(expr= m.b612 - m.b630 >= 0)", "Constraint(expr= m.x345 - 13.5*m.b621 <= 0) m.c307 = Constraint(expr= m.x346", "m.c1213 = Constraint(expr= m.b740 + m.b741 <= 1) m.c1214 =", "m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173", "0.705049913072943*m.b672 <= 0) m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <=", "0) m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0) m.c275", "= Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719) m.c814 = Constraint(expr=", "m.c1181 = Constraint(expr= m.b725 + m.b726 <= 1) m.c1182 =", "m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999* m.b648) <= 0) m.c559 =", "2*m.b710 + m.x800 == 0) m.c948 = Constraint(expr= 5*m.b711 +", "= Constraint(expr= m.b753 + m.b754 <= 1) m.c1241 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b766 <= 1) m.c1264 = Constraint(expr= m.b765 + m.b766 <=", "+ 0.999*m.b657) <= 0) m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) -", "# Reformulation has removed 1 variable and 1 equation from", "= Constraint(expr= m.b695 + m.b696 <= 1) m.c1122 = Constraint(expr=", "m.c549 = Constraint(expr= m.x396 + 9*m.b645 <= 9) m.c550 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0)", "1.32154609891348) m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348) m.c427", "m.c1097 = Constraint(expr= m.b680 - m.b681 <= 0) m.c1098 =", "m.b763 <= 0) m.c1361 = Constraint(expr= m.b674 - m.b764 <=", "Constraint(expr= m.x121 - m.x436 - m.x439 == 0) m.c485 =", "m.b743 <= 0) m.c1341 = Constraint(expr= - m.b653 + m.b654", "+ m.x814 == 0) m.c962 = Constraint(expr= 2*m.b725 + m.x815", "<= 40) m.c100 = Constraint(expr= m.x223 + 40*m.b601 <= 40)", "+ 0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597)", "Constraint(expr= m.b740 + m.b741 <= 1) m.c1214 = Constraint(expr= m.b741", "m.b772 <= 0) m.c1370 = Constraint(expr= m.b683 - m.b773 <=", "+ 250*m.x210 + 300*m.x211 - 5*m.b686 - 4*m.b687 - 6*m.b688", "m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323", "0) m.c1068 = Constraint(expr= m.b650 - m.b652 <= 0) m.c1069", "+ m.b772 <= 1) m.c1276 = Constraint(expr= m.b771 + m.b772", "0) m.c1365 = Constraint(expr= - m.b677 + m.b678 - m.b768", "m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295", "+ m.b620 + m.b623 + m.b626 >= 0) m.c1392 =", "- 6*m.b692 - 9*m.b693 - 4*m.b694 - 10*m.b695 - 9*m.b696", "Constraint(expr= m.x66 - m.x330 - m.x336 == 0) m.c418 =", "= Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924) m.c691 = Constraint(expr=", "+ m.b624 + m.b627 >= 0) m.c1393 = Constraint(expr= -", "m.x244 - m.x247 == 0) m.c146 = Constraint(expr= m.x41 -", "m.b683 - m.b684 + m.b685 - m.b775 <= 0) m.c1373", "= Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0) m.c392 = Constraint(expr=", "m.c469 = Constraint(expr= m.x373 == 0) m.c470 = Constraint(expr= m.x383", "<= 0) m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 +", "== 0) m.c470 = Constraint(expr= m.x383 == 0) m.c471 =", "m.c530 = Constraint(expr= - m.x392 + m.x446 == 0) m.c531", "1.18887736200171*m.b653 <= 0) m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <=", "0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999* m.b672)", "m.x794 == 0) m.c942 = Constraint(expr= 3*m.b705 + m.x795 ==", "0) m.c883 = Constraint(expr= m.x208 - m.x586 - m.x589 ==", "m.x25 - m.x28 - m.x31 - m.x34 == 0) m.c14", "m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90", "= Constraint(expr= 6*m.b698 + m.x788 == 0) m.c936 = Constraint(expr=", "m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0) m.c806 =", ">= 0) m.c1398 = Constraint(expr= - m.b624 + m.b642 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 =", "- m.x426 == 0) m.c421 = Constraint(expr= m.x115 - m.x424", "<= 0) m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0)", "m.c1189 = Constraint(expr= m.b728 + m.b729 <= 1) m.c1190 =", "0) m.c783 = Constraint(expr= m.x537 - 15*m.b669 <= 0) m.c784", "m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562", "m.c445 = Constraint(expr= m.x82 - m.x364 - m.x367 == 0)", "m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538 == 0) m.c752", "m.c1179 = Constraint(expr= m.b722 + m.b724 <= 1) m.c1180 =", "m.c100 = Constraint(expr= m.x223 + 40*m.b601 <= 40) m.c101 =", "<= 0) m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0)", "m.c1485 = Constraint(expr= m.b669 - m.b684 >= 0) m.c1486 =", "Constraint(expr= - m.b599 - m.b600 + m.b601 - m.b691 <=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.b693 + m.b694 <= 1) m.c1119 = Constraint(expr= m.b692", "m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640", "m.x110 - m.x416 - m.x419 == 0) m.c387 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x435 - m.x438 == 0) m.c484 = Constraint(expr= m.x121", "+ m.x811 == 0) m.c959 = Constraint(expr= 3*m.b722 + m.x812", "+ m.b772 <= 1) m.c1273 = Constraint(expr= m.b770 + m.b771", "0) m.c144 = Constraint(expr= m.x30 - m.x243 - m.x246 ==", "0) m.c316 = Constraint(expr= m.x307 == 0) m.c317 = Constraint(expr=", "m.x346 - 13.5*m.b622 <= 0) m.c308 = Constraint(expr= m.x347 +", "m.b751 <= 1) m.c1231 = Constraint(expr= m.b749 + m.b750 <=", "Constraint(expr= m.b699 + m.b700 <= 1) m.c1133 = Constraint(expr= m.b701", "= Constraint(expr= - m.b650 - m.b651 + m.b652 - m.b742", "0.572481933717686) m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686) m.c460", "0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999* m.b654) <= 0)", "Constraint(expr= m.x539 == 0) m.c759 = Constraint(expr= m.x540 == 0)", "Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348) m.c426 = Constraint(expr= m.x336", "Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x418 - 20*m.b631 <= 0) m.c404 = Constraint(expr=", "m.x346 == 0) m.c287 = Constraint(expr= m.x299 == 0) m.c288", "m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73", "m.x408 == 0) m.c595 = Constraint(expr= m.x106 - m.x406 -", "m.c373 = Constraint(expr= m.x325 == 0) m.c374 = Constraint(expr= m.x377", "m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491", "= Constraint(expr= m.b653 - m.b654 <= 0) m.c1071 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 =", "Constraint(expr= m.x175 - m.x520 - m.x523 == 0) m.c707 =", "0) m.c1330 = Constraint(expr= - m.b641 - m.b642 + m.b643", "m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572", "<= 0) m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.b693 + m.b694 <= 1) m.c1121 = Constraint(expr= m.b695", "+ 0.994083415506506*m.b665 <= 0.994083415506506) m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666", "<= 1) m.c1164 = Constraint(expr= m.b716 + m.b718 <= 1)", "m.b753 + m.b754 <= 1) m.c1239 = Constraint(expr= m.b752 +", "<= 0) m.c1327 = Constraint(expr= - m.b638 - m.b639 +", "- 0.705049913072943*m.b672 <= 0) m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673", "m.b748 <= 0) m.c1346 = Constraint(expr= m.b659 - m.b749 <=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834 =", "2.54515263975353*m.b607 <= 0) m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <=", "= Constraint(expr= m.x180 - m.x189 - m.x192 - m.x195 ==", "m.c590 = Constraint(expr= m.x461 == 0) m.c591 = Constraint(expr= m.x462", "m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778", "= Constraint(expr= m.x139 - m.x142 - m.x145 == 0) m.c38", "Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 =", "<= 1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 +", "= Constraint(expr= - 0.6*m.x560 + m.x590 == 0) m.c897 =", "== 0) m.c471 = Constraint(expr= m.x384 == 0) m.c472 =", "m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 +", "+ m.x797 == 0) m.c945 = Constraint(expr= 6*m.b708 + m.x798", "m.x195 == 0) m.c52 = Constraint(expr= m.x181 - m.x190 -", "m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0) m.c430 =", "Constraint(expr= m.x382 - 33.5*m.b640 <= 0) m.c494 = Constraint(expr= m.x383", "= Constraint(expr= m.b711 + m.b712 <= 1) m.c1155 = Constraint(expr=", "m.b771 <= 1) m.c1272 = Constraint(expr= m.b770 + m.b772 <=", "9) m.c336 = Constraint(expr= m.x354 + 9*m.b624 <= 9) m.c337", "m.x438 == 0) m.c475 = Constraint(expr= m.x439 == 0) m.c476", "m.b734 + m.b735 <= 1) m.c1202 = Constraint(expr= m.b735 +", "0) m.c669 = Constraint(expr= m.x480 == 0) m.c670 = Constraint(expr=", "m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553) m.c489 = Constraint(expr= m.x372 +", "m.x379 == 0) m.c377 = Constraint(expr= m.x419 == 0) m.c378", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 =", "m.x226 - m.x229 == 0) m.c68 = Constraint(expr= m.x212 -", "<= 0) m.c521 = Constraint(expr= m.x389 + 9*m.b641 <= 9)", "+ 0.78338879230327*m.b658 <= 0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) -", "== 0) m.c731 = Constraint(expr= m.x176 - m.x524 - m.x530", "m.c1435 = Constraint(expr= m.b616 - m.b634 >= 0) m.c1436 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x75 - m.x96 - m.x99 == 0) m.c28 =", "<= 0) m.c1365 = Constraint(expr= - m.b677 + m.b678 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 =", "== 0) m.c271 = Constraint(expr= m.x70 - m.x340 - m.x343", "m.x301 == 0) m.c290 = Constraint(expr= m.x347 == 0) m.c291", "m.c1384 = Constraint(expr= - m.b616 + m.b634 >= 0) m.c1385", "<= 0.666992981045719) m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719)", "0) m.c368 = Constraint(expr= - m.x374 + m.x416 == 0)", "m.c27 = Constraint(expr= m.x75 - m.x96 - m.x99 == 0)", "m.x378 + 20*m.b630 <= 20) m.c400 = Constraint(expr= m.x379 +", "- m.x392 + m.x446 == 0) m.c531 = Constraint(expr= -", "6*m.b708 + m.x798 == 0) m.c946 = Constraint(expr= 7*m.b709 +", "m.c330 = Constraint(expr= m.x306 + 15*m.b624 <= 15) m.c331 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 =", "Constraint(expr= m.x8 - m.x218 - m.x221 == 0) m.c90 =", "- m.x104 - m.x107 == 0) m.c30 = Constraint(expr= m.x78", "- m.b718 <= 0) m.c1316 = Constraint(expr= m.b629 - m.b719", "m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856 =", "0) m.c996 = Constraint(expr= 6*m.b759 + m.x849 == 0) m.c997", "+ 0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658)", "1) m.c1114 = Constraint(expr= m.b690 + m.b691 <= 1) m.c1115", "Constraint(expr= m.x474 == 0) m.c643 = Constraint(expr= m.x475 == 0)", "<= 0) m.c1326 = Constraint(expr= - m.b638 + m.b639 -", "0.999*m.b596)))*(0.001 + 0.999*m.b596) <= 0) m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597)", "m.c767 = Constraint(expr= m.x179 - m.x536 - m.x539 == 0)", "m.x345 - 13.5*m.b621 <= 0) m.c307 = Constraint(expr= m.x346 -", "Constraint(expr= m.b598 + m.b601 - m.b604 >= 0) m.c1406 =", "= Constraint(expr= - m.b624 + m.b642 + m.b645 >= 0)", "m.b642 - m.b643 <= 0) m.c1061 = Constraint(expr= m.b644 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= 8*m.b689 + m.x779 == 0) m.c927 = Constraint(expr=", "1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999* m.b599) <= 0)", "<= 0) m.c1029 = Constraint(expr= m.b611 - m.b613 <= 0)", "m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999* m.b607) <= 0) m.c137 =", "+ m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999* m.b600) <= 0) m.c82", "Constraint(expr= 5*m.b707 + m.x797 == 0) m.c945 = Constraint(expr= 6*m.b708", "+ m.b762 <= 1) m.c1256 = Constraint(expr= m.b762 + m.b763", "Constraint(expr= 3*m.b756 + m.x846 == 0) m.c994 = Constraint(expr= 8*m.b757", "<= 0) m.c1308 = Constraint(expr= - m.b620 + m.b621 -", "Constraint(expr= m.b698 + m.b700 <= 1) m.c1132 = Constraint(expr= m.b699", "9*m.b624 <= 0) m.c334 = Constraint(expr= m.x352 - 9*m.b625 <=", "- 3*m.b774 - 4*m.b775, sense=maximize) m.c2 = Constraint(expr= m.x2 -", "- m.b670 <= 0) m.c1088 = Constraint(expr= m.b671 - m.b672", "= Constraint(expr= - m.b632 + m.b633 - m.b723 <= 0)", "+ m.b640 - m.b730 <= 0) m.c1328 = Constraint(expr= m.b641", "Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0) m.c716 =", "3.71357206670431*m.b597 <= 3.71357206670431) m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <=", "m.x465 - 1.18887736200171*m.b654 <= 0) m.c634 = Constraint(expr= m.x466 -", "Constraint(expr= m.b620 - m.b621 <= 0) m.c1038 = Constraint(expr= m.b620", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 =", "m.c769 = Constraint(expr= m.x181 - m.x538 - m.x541 == 0)", "20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157 -", "0) m.c448 = Constraint(expr= m.x118 - m.x430 - m.x433 ==", "- m.b626 - m.b627 + m.b628 - m.b718 <= 0)", ">= 0) m.c1428 = Constraint(expr= m.b609 - m.b627 >= 0)", "m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195", "Constraint(expr= - 0.5*m.x255 + m.x279 == 0) m.c166 = Constraint(expr=", "m.c474 = Constraint(expr= m.x438 == 0) m.c475 = Constraint(expr= m.x439", ">= 0) m.c1415 = Constraint(expr= m.b602 - m.b614 >= 0)", "m.x587 == 0) m.c882 = Constraint(expr= m.x207 - m.x585 -", "Constraint(expr= m.b755 + m.b756 <= 1) m.c1244 = Constraint(expr= m.b756", "= Constraint(expr= m.b670 - m.b682 >= 0) m.c1484 = Constraint(expr=", "+ 13.5*m.b620 <= 13.5) m.c309 = Constraint(expr= m.x348 + 13.5*m.b621", "m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205", "m.b618 <= 0) m.c1035 = Constraint(expr= m.b617 - m.b619 <=", "Constraint(expr= m.b698 + m.b699 <= 1) m.c1130 = Constraint(expr= m.b699", "0 # FX 0 0 0 0 0 0 0", "m.c139 = Constraint(expr= m.x247 == 0) m.c140 = Constraint(expr= m.x272", "m.x421 + 20*m.b631 <= 20) m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632)", "1) m.c1185 = Constraint(expr= m.b725 + m.b727 <= 1) m.c1186", "+ 15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117", "Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376) m.c603 = Constraint(expr= m.x408", "- m.x482 - m.x485 == 0) m.c651 = Constraint(expr= m.x147", "<= 20) m.c401 = Constraint(expr= m.x416 - 20*m.b629 <= 0)", "m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779", "- m.x150 + m.x153 == 0) m.c40 = Constraint(expr= -", "0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999* m.b648)", "0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999* m.b637) <= 0)", "m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345 == 0) m.c286", "m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759", "m.b618 >= 0) m.c1420 = Constraint(expr= m.b607 - m.b619 >=", "0) m.c297 = Constraint(expr= m.x72 - m.x345 - m.x348 ==", "<= 0) m.c1093 = Constraint(expr= m.b675 - m.b676 <= 0)", "Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0) m.c157 = Constraint(expr= m.x268", "= Constraint(expr= m.x468 == 0) m.c619 = Constraint(expr= m.x469 ==", "m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359", "- m.x411 - m.x414 == 0) m.c622 = Constraint(expr= m.x109", "Constraint(expr= m.x565 == 0) m.c902 = Constraint(expr= m.x593 == 0)", "0.705049913072943*m.b671 <= 0) m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <=", "- 9*m.b685 <= 0) m.c920 = Constraint(expr= m.x593 + 9*m.b683", "0) m.c1329 = Constraint(expr= - m.b641 + m.b642 - m.b732", "m.c83 = Constraint(expr= m.x221 == 0) m.c84 = Constraint(expr= m.x222", "m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521", "+ 4.45628648004517*m.b605 <= 4.45628648004517) m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606", "m.c1428 = Constraint(expr= m.b609 - m.b627 >= 0) m.c1429 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0)", "- 20*m.b629 <= 0) m.c402 = Constraint(expr= m.x417 - 20*m.b630", ">= 0) m.c1457 = Constraint(expr= - m.b653 + m.b656 +", "m.c942 = Constraint(expr= 3*m.b705 + m.x795 == 0) m.c943 =", "+ m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656) <= 0) m.c639 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 =", "<= 9) m.c921 = Constraint(expr= m.x594 + 9*m.b684 <= 9)", "m.b636 - m.b637 <= 0) m.c1055 = Constraint(expr= m.b638 -", "m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619", "= Constraint(expr= m.x479 == 0) m.c669 = Constraint(expr= m.x480 ==", "0) m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001", "1) m.c1224 = Constraint(expr= m.b746 + m.b748 <= 1) m.c1225", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818 =", "m.c675 = Constraint(expr= m.x144 - m.x477 - m.x480 == 0)", "m.x821 == 0) m.c969 = Constraint(expr= 5*m.b732 + m.x822 ==", "m.obj = Objective(expr= - m.x2 - m.x3 - m.x4 +", "4.45628648004517*m.b605 <= 0) m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <=", "- 2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710", "Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353) m.c161 = Constraint(expr= -", "- 0.572481933717686*m.b637 <= 0) m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635", "m.x380 - 33.5*m.b638 <= 0) m.c492 = Constraint(expr= m.x381 -", "m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034) m.c584 =", "+ 0.705049913072943*m.b673 <= 0.705049913072943) m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671", "- m.b674 + m.b675 - m.b765 <= 0) m.c1363 =", "+ m.x851 == 0) m.c999 = Constraint(expr= 8*m.b762 + m.x852", "m.b662 - m.b752 <= 0) m.c1350 = Constraint(expr= - m.b662", "Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x865 == 0) m.c1013 = Constraint(expr= m.b596 - m.b597", "0) m.c904 = Constraint(expr= m.x595 == 0) m.c905 = Constraint(expr=", "<= 0) m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0)", "Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0) m.c431 = Constraint(expr= m.x425", "m.c879 = Constraint(expr= m.x189 - m.x555 - m.x558 == 0)", "= Constraint(expr= m.x77 - m.x101 - m.x104 - m.x107 ==", "m.x558 == 0) m.c880 = Constraint(expr= m.x190 - m.x556 -", "Constraint(expr= m.b731 + m.b733 <= 1) m.c1195 = Constraint(expr= m.b731", "Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348) m.c427 = Constraint(expr= m.x337", "Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c987 = Constraint(expr= 2*m.b750 + m.x840 == 0) m.c988 =", "+ m.b756 <= 1) m.c1244 = Constraint(expr= m.b756 + m.b757", "3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709 -", "<= 0) m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0)", "m.c1114 = Constraint(expr= m.b690 + m.b691 <= 1) m.c1115 =", "Constraint(expr= - 0.6*m.x562 + m.x592 == 0) m.c899 = Constraint(expr=", "m.b699 <= 1) m.c1130 = Constraint(expr= m.b699 + m.b700 <=", "= Constraint(expr= m.b599 - m.b689 <= 0) m.c1287 = Constraint(expr=", "0.999*m.b619)))*(0.001 + 0.999*m.b619) <= 0) m.c260 = Constraint(expr= m.x275 ==", "m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585", "= Constraint(expr= - m.b653 + m.b654 - m.b744 <= 0)", "0) m.c208 = Constraint(expr= m.x289 == 0) m.c209 = Constraint(expr=", "m.x39 - m.x48 - m.x51 == 0) m.c16 = Constraint(expr=", "= Constraint(expr= m.x66 - m.x330 - m.x336 == 0) m.c418", "== 0) m.c733 = Constraint(expr= m.x178 - m.x526 - m.x532", "- m.b600 + m.b601 - m.b691 <= 0) m.c1289 =", "m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566", "m.b607 <= 0) m.c1024 = Constraint(expr= m.b606 - m.b607 <=", "m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924) m.c712 = Constraint(expr= m.x499 +", "- m.x535 == 0) m.c854 = Constraint(expr= m.x203 - m.x578", "m.b703 <= 1) m.c1139 = Constraint(expr= m.b704 + m.b705 <=", "= Constraint(expr= m.x112 - m.x418 - m.x421 == 0) m.c389", "1) m.c1107 = Constraint(expr= m.b686 + m.b688 <= 1) m.c1108", "6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694 - 10*m.b695 -", "m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460", "Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x26 - m.x236 - m.x239 == 0) m.c117 =", "- 1.32154609891348*m.b616 <= 0) m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 =", "m.x555 - m.x558 == 0) m.c880 = Constraint(expr= m.x190 -", "m.c887 = Constraint(expr= m.x557 + 15*m.b680 <= 15) m.c888 =", "Constraint(expr= m.x349 == 0) m.c293 = Constraint(expr= m.x53 - m.x296", "m.x864 == 0) m.c1012 = Constraint(expr= 4*m.b775 + m.x865 ==", "m.b677 - m.b767 <= 0) m.c1365 = Constraint(expr= - m.b677", "m.b707 <= 0) m.c1305 = Constraint(expr= - m.b617 + m.b618", "- m.b691 <= 0) m.c1289 = Constraint(expr= m.b602 - m.b692", "m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0) m.c187 =", "m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 +", "m.b696 <= 1) m.c1124 = Constraint(expr= m.b696 + m.b697 <=", "1) m.c1179 = Constraint(expr= m.b722 + m.b724 <= 1) m.c1180", "m.c201 = Constraint(expr= m.x282 + 15*m.b609 <= 15) m.c202 =", "m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171) m.c638 =", "Constraint(expr= m.b750 + m.b751 <= 1) m.c1233 = Constraint(expr= m.b749", "m.x335 == 0) m.c411 = Constraint(expr= m.x336 == 0) m.c412", "Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b738 + m.b739 <= 1) m.c1211 = Constraint(expr= m.b740 +", "m.c1345 = Constraint(expr= - m.b656 - m.b657 + m.b658 -", "0) m.c1392 = Constraint(expr= - m.b609 + m.b621 + m.b624", "0) m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539) m.c393", "== 0) m.c475 = Constraint(expr= m.x439 == 0) m.c476 =", "m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 +", "m.b656 - m.b746 <= 0) m.c1344 = Constraint(expr= - m.b656", "<= 0) m.c1071 = Constraint(expr= m.b653 - m.b655 <= 0)", "0.78338879230327*m.b658 <= 0) m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <=", "m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001 +", "m.c327 = Constraint(expr= m.x303 - 15*m.b624 <= 0) m.c328 =", "- m.x406 - m.x409 == 0) m.c596 = Constraint(expr= m.x131", "m.c263 = Constraint(expr= m.x341 == 0) m.c264 = Constraint(expr= m.x342", "m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781", "0) m.c31 = Constraint(expr= m.x79 - m.x103 - m.x106 -", "0.572481933717686*m.b637 <= 0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1", "- 3.34221486003388*m.b615 <= 0) m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616", "0) m.c469 = Constraint(expr= m.x373 == 0) m.c470 = Constraint(expr=", "m.x451 == 0) m.c545 = Constraint(expr= m.x392 - 9*m.b644 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 =", "0) m.c94 = Constraint(expr= m.x16 - m.x232 - m.x235 ==", "- m.b615 <= 0) m.c1032 = Constraint(expr= m.b614 - m.b616", "m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404", "- m.x2 - m.x3 - m.x4 + 5*m.x20 + 10*m.x21", "m.c1219 = Constraint(expr= m.b743 + m.b744 <= 1) m.c1220 =", "DLL # 3373 3193 180 0 # # Reformulation has", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 =", "m.x492 == 0) m.c679 = Constraint(expr= m.x151 - m.x490 -", "- 0.5*m.x513 + m.x537 == 0) m.c751 = Constraint(expr= -", "= Constraint(expr= m.b681 - m.b682 <= 0) m.c1100 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 3.71357206670431*m.b597 <= 3.71357206670431) m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598", "m.b679 <= 0) m.c1097 = Constraint(expr= m.b680 - m.b681 <=", "<= 0) m.c356 = Constraint(expr= m.x311 + 15*m.b626 <= 15)", "0) m.c293 = Constraint(expr= m.x53 - m.x296 - m.x299 ==", "m.c676 = Constraint(expr= m.x145 - m.x478 - m.x481 == 0)", "= Constraint(expr= m.x336 == 0) m.c412 = Constraint(expr= m.x337 ==", "+ m.b712 <= 1) m.c1153 = Constraint(expr= m.b710 + m.b711", "+ m.b672 - m.b762 <= 0) m.c1360 = Constraint(expr= -", "m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266", "m.c470 = Constraint(expr= m.x383 == 0) m.c471 = Constraint(expr= m.x384", "m.c983 = Constraint(expr= 2*m.b746 + m.x836 == 0) m.c984 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 +", "+ m.x845 == 0) m.c993 = Constraint(expr= 3*m.b756 + m.x846", "0) m.c1299 = Constraint(expr= - m.b611 + m.b612 - m.b702", "+ m.b613 - m.b703 <= 0) m.c1301 = Constraint(expr= m.b614", "Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x448 - 9*m.b646 <= 0) m.c554 = Constraint(expr=", "- 5*m.b752 - 8*m.b753 - 4*m.b754 - 2*m.b755 - 3*m.b756", "== 0) m.c538 = Constraint(expr= m.x451 == 0) m.c539 =", "Constraint(expr= m.x210 - m.x591 - m.x594 == 0) m.c910 =", "== 0) m.c181 = Constraint(expr= m.x37 - m.x256 - m.x259", "m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999* m.b672) <= 0) m.c790 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b769 <= 1) m.c1269 = Constraint(expr= m.b767 + m.b769 <=", "== 0) m.c147 = Constraint(expr= m.x42 - m.x267 - m.x273", "m.x416 == 0) m.c369 = Constraint(expr= - m.x375 + m.x417", "Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c479 = Constraint(expr= m.x92 - m.x380 - m.x383", "+ 0.999* m.b655) <= 0) m.c614 = Constraint(expr= m.x413 ==", "= Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353) m.c278 = Constraint(expr=", "m.b711 + m.b712 <= 1) m.c1157 = Constraint(expr= m.b713 +", "= Constraint(expr= m.x150 - m.x489 - m.x492 == 0) m.c679", "- 1.26558121681553*m.b635 <= 0) m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636", "m.c1081 = Constraint(expr= m.b663 - m.b664 <= 0) m.c1082 =", "+ m.b711 <= 1) m.c1154 = Constraint(expr= m.b711 + m.b712", "Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b611 - m.b612 + m.b613 - m.b703 <= 0)", "= Constraint(expr= m.b624 - m.b642 >= 0) m.c1444 = Constraint(expr=", "m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476", "m.b681 + m.b682 - m.b772 <= 0) m.c1370 = Constraint(expr=", "+ m.b723 <= 1) m.c1178 = Constraint(expr= m.b723 + m.b724", "m.x402 == 0) m.c568 = Constraint(expr= m.x103 - m.x400 -", "0) m.c1382 = Constraint(expr= - m.b614 + m.b632 >= 0)", "m.x284 - 3.34221486003388*m.b611 <= 0) m.c219 = Constraint(expr= m.x285 -", "m.c1227 = Constraint(expr= m.b746 + m.b748 <= 1) m.c1228 =", "+ 0.705049913072943*m.b671 <= 0.705049913072943) m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672", "Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171) m.c636 = Constraint(expr= m.x468", "<= 0) m.c194 = Constraint(expr= m.x257 + 30*m.b608 <= 30)", "m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801 =", "3.04984759446376*m.b654 <= 3.04984759446376) m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <=", "+ m.x818 == 0) m.c966 = Constraint(expr= 8*m.b729 + m.x819", "m.b752 + m.b753 <= 1) m.c1238 = Constraint(expr= m.b753 +", "- 3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709", "+ 1.26558121681553*m.b639 <= 1.26558121681553) m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640", "Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0) m.c429 = Constraint(expr= m.x423", "9*m.b623 <= 0) m.c333 = Constraint(expr= m.x351 - 9*m.b624 <=", "m.b609 - m.b699 <= 0) m.c1297 = Constraint(expr= - m.b608", "0) m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351 == 0)", "m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674)", "== 0) m.c982 = Constraint(expr= m.b745 + m.x835 == 0)", "m.b732 <= 1) m.c1196 = Constraint(expr= m.b732 + m.b733 <=", "m.x434 - 2.30162356062425*m.b638 <= 0) m.c498 = Constraint(expr= m.x435 -", "= Constraint(expr= m.x63 - m.x318 - m.x324 == 0) m.c382", "m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812", "m.c1470 = Constraint(expr= m.b654 - m.b660 >= 0) m.c1471 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 =", "= Constraint(expr= m.x111 - m.x417 - m.x420 == 0) m.c388", "Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 +", "0.994083415506506*m.b679 <= 0) m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <=", "= Constraint(expr= m.b617 - m.b635 >= 0) m.c1437 = Constraint(expr=", "m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0) m.c103 =", "= Constraint(expr= m.b627 - m.b651 >= 0) m.c1453 = Constraint(expr=", "- m.x437 == 0) m.c483 = Constraint(expr= m.x120 - m.x435", "0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999* m.b659)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 =", "+ 0.999*m.b619)))*(0.001 + 0.999*m.b619) <= 0) m.c260 = Constraint(expr= m.x275", "- m.b634 <= 0) m.c1051 = Constraint(expr= m.b633 - m.b634", "1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999* m.b607) <= 0)", "1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999* m.b665) <= 0)", "0) m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001", "m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353) m.c159 =", "Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 +", "- 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999* m.b607) <=", "= Constraint(expr= m.x119 - m.x434 - m.x437 == 0) m.c483", "= Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001", "Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0) m.c773 = Constraint(expr= m.x509", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 =", "m.b711 <= 1) m.c1154 = Constraint(expr= m.b711 + m.b712 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 =", "m.x31 - m.x244 - m.x247 == 0) m.c146 = Constraint(expr=", "- 1.04900943706034*m.b649 <= 0) m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 =", "Constraint(expr= m.x77 - m.x356 - m.x359 == 0) m.c351 =", "0.705049913072943*m.b676 <= 0.705049913072943) m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <=", "Constraint(expr= m.x533 == 0) m.c846 = Constraint(expr= m.x534 == 0)", "= Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376) m.c363 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 =", "m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424", "Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c178 = Constraint(expr= m.x34 - m.x250 - m.x253 == 0)", "m.x364 - m.x367 == 0) m.c446 = Constraint(expr= m.x116 -", "m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719", "Constraint(expr= m.x498 == 0) m.c697 = Constraint(expr= m.x499 == 0)", "+ 15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156", "- m.b607 <= 0) m.c1025 = Constraint(expr= m.b608 - m.b609", "m.b609 - m.b610 <= 0) m.c1028 = Constraint(expr= m.b611 -", "m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999* m.b628) <= 0) m.c341 =", "<= 1) m.c1187 = Constraint(expr= m.b728 + m.b729 <= 1)", "m.c1346 = Constraint(expr= m.b659 - m.b749 <= 0) m.c1347 =", "m.c1416 = Constraint(expr= m.b603 - m.b615 >= 0) m.c1417 =", "m.c140 = Constraint(expr= m.x272 == 0) m.c141 = Constraint(expr= m.x273", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 =", "- 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999* m.b676) <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c111 = Constraint(expr= m.x240 == 0) m.c112 = Constraint(expr= m.x241", "== 0) m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519 ==", "Constraint(expr= m.x293 == 0) m.c234 = Constraint(expr= m.x294 == 0)", "m.x592 == 0) m.c899 = Constraint(expr= m.x563 == 0) m.c900", "m.x227 == 0) m.c60 = Constraint(expr= m.x228 == 0) m.c61", "m.b610 - m.b622 >= 0) m.c1424 = Constraint(expr= m.b608 -", "0) m.c1294 = Constraint(expr= - m.b605 - m.b606 + m.b607", "- m.x572 - m.x575 == 0) m.c828 = Constraint(expr= m.x201", "= Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924) m.c775 = Constraint(expr=", "m.b767 + m.b768 <= 1) m.c1266 = Constraint(expr= m.b767 +", "Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x841 == 0) m.c989 = Constraint(expr= 5*m.b752 + m.x842 ==", "m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642", "- m.x23 == 0) m.c9 = Constraint(expr= m.x18 - m.x21", "== 0) m.c47 = Constraint(expr= m.x173 - m.x182 - m.x185", "== 0) m.c174 = Constraint(expr= m.x282 == 0) m.c175 =", "m.x449 == 0) m.c537 = Constraint(expr= m.x450 == 0) m.c538", "0) m.c979 = Constraint(expr= 4*m.b742 + m.x832 == 0) m.c980", "Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b649 >= 0) m.c1451 = Constraint(expr= m.b626 - m.b650", "Constraint(expr= m.b612 - m.b630 >= 0) m.c1432 = Constraint(expr= m.b613", "m.b774 + m.b775 <= 1) m.c1283 = Constraint(expr= m.b596 -", "m.b761 <= 0) m.c1359 = Constraint(expr= - m.b671 + m.b672", "15*m.b628 <= 0) m.c356 = Constraint(expr= m.x311 + 15*m.b626 <=", "Constraint(expr= m.b774 + m.b775 <= 1) m.c1283 = Constraint(expr= m.b596", "== 0) m.c564 = Constraint(expr= m.x456 == 0) m.c565 =", "0.999* m.b606) <= 0) m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) -", "m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678 =", "m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431", "Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517) m.c107 =", "m.c729 = Constraint(expr= m.x165 - m.x501 - m.x504 == 0)", "Constraint(expr= m.b770 + m.b772 <= 1) m.c1276 = Constraint(expr= m.b771", "m.b712 <= 1) m.c1156 = Constraint(expr= m.b711 + m.b712 <=", "- m.x368 - m.x371 == 0) m.c477 = Constraint(expr= m.x84", "m.c524 = Constraint(expr= m.x440 - 9*m.b641 <= 0) m.c525 =", "<= 0) m.c1098 = Constraint(expr= m.b680 - m.b682 <= 0)", "m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597) <= 0) m.c55 = Constraint(expr=(m.x226/(0.001", "<= 0) m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 +", "m.x527 - m.x533 == 0) m.c852 = Constraint(expr= m.x177 -", "- m.x400 - m.x403 == 0) m.c569 = Constraint(expr= m.x128", "m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273", "Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x483 - m.x486 == 0) m.c652 = Constraint(expr= m.x148", "0) m.c1343 = Constraint(expr= m.b656 - m.b746 <= 0) m.c1344", "Constraint(expr= m.b611 - m.b629 >= 0) m.c1431 = Constraint(expr= m.b612", "== 0) m.c592 = Constraint(expr= m.x463 == 0) m.c593 =", "1) m.c1137 = Constraint(expr= m.b701 + m.b703 <= 1) m.c1138", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815 =", "- 0.705049913072943*m.b675 <= 0) m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676", "m.x384 == 0) m.c481 = Constraint(expr= m.x94 - m.x382 -", "= Constraint(expr= m.b596 - m.b598 <= 0) m.c1015 = Constraint(expr=", "m.c1259 = Constraint(expr= m.b764 + m.b765 <= 1) m.c1260 =", "m.x508 - m.x511 == 0) m.c764 = Constraint(expr= m.x170 -", "m.x506 - m.x509 == 0) m.c762 = Constraint(expr= m.x168 -", "0) m.c958 = Constraint(expr= 9*m.b721 + m.x811 == 0) m.c959", "m.b654 + m.b657 + m.b660 >= 0) m.c1459 = Constraint(expr=", "+ m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999* m.b650) <= 0) m.c585", "+ 1.26558121681553*m.b640 <= 1.26558121681553) m.c491 = Constraint(expr= m.x380 - 33.5*m.b638", "m.x820 == 0) m.c968 = Constraint(expr= 2*m.b731 + m.x821 ==", "== 0) m.c146 = Constraint(expr= m.x41 - m.x266 - m.x272", "= Constraint(expr= m.b650 - m.b740 <= 0) m.c1338 = Constraint(expr=", "m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 +", "m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127", "0.705049913072943) m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943) m.c836", "= Constraint(expr= m.b680 - m.b682 <= 0) m.c1099 = Constraint(expr=", "m.b719 + m.b721 <= 1) m.c1171 = Constraint(expr= m.b719 +", "= Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0) m.c225 = Constraint(expr=", "0) m.c418 = Constraint(expr= m.x67 - m.x331 - m.x337 ==", "m.c757 = Constraint(expr= m.x517 == 0) m.c758 = Constraint(expr= m.x539", "Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x389 == 0) m.c513 = Constraint(expr= m.x96 - m.x387", "m.x136 - m.x466 - m.x469 == 0) m.c626 = Constraint(expr=", "0) m.c886 = Constraint(expr= m.x556 - 15*m.b682 <= 0) m.c887", "<= 1) m.c1108 = Constraint(expr= m.b687 + m.b688 <= 1)", "m.c1218 = Constraint(expr= m.b743 + m.b745 <= 1) m.c1219 =", "15) m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <= 0) m.c891", "m.b674 >= 0) m.c1461 = Constraint(expr= - m.b663 + m.b672", "m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685", "- m.x414 == 0) m.c622 = Constraint(expr= m.x109 - m.x412", "m.b671) <= 0) m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1", "0) m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517) m.c189", "0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999* m.b647) <= 0)", "Constraint(expr= m.b743 + m.b745 <= 1) m.c1219 = Constraint(expr= m.b743", "0) m.c597 = Constraint(expr= m.x132 - m.x459 - m.x462 ==", "- m.x550 - m.x553 == 0) m.c827 = Constraint(expr= m.x200", "m.x353 == 0) m.c318 = Constraint(expr= m.x354 == 0) m.c319", "= Constraint(expr= m.b668 - m.b758 <= 0) m.c1356 = Constraint(expr=", "0) m.c1417 = Constraint(expr= m.b604 - m.b616 >= 0) m.c1418", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 =", "<= 0) m.c1298 = Constraint(expr= m.b611 - m.b701 <= 0)", "m.x467 == 0) m.c618 = Constraint(expr= m.x468 == 0) m.c619", "7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694 -", "Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517) m.c107 = Constraint(expr= -", "0) m.c1291 = Constraint(expr= - m.b602 - m.b603 + m.b604", "m.x303 - 15*m.b624 <= 0) m.c328 = Constraint(expr= m.x304 -", "Constraint(expr= m.b773 + m.b774 <= 1) m.c1280 = Constraint(expr= m.b774", "m.b633 >= 0) m.c1435 = Constraint(expr= m.b616 - m.b634 >=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0) m.c275 = Constraint(expr=", "m.x208 - m.x586 - m.x589 == 0) m.c884 = Constraint(expr=", "m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5) m.c896 =", "Constraint(expr= m.b722 + m.b724 <= 1) m.c1180 = Constraint(expr= m.b723", "0) m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001", "= Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001", "<= 0) m.c1102 = Constraint(expr= m.b684 - m.b685 <= 0)", "= Constraint(expr= m.b698 + m.b700 <= 1) m.c1129 = Constraint(expr=", "m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 +", "+ m.x792 == 0) m.c940 = Constraint(expr= 4*m.b703 + m.x793", "= Constraint(expr= m.b746 + m.b747 <= 1) m.c1226 = Constraint(expr=", "= Constraint(expr= m.x449 + 9*m.b644 <= 9) m.c555 = Constraint(expr=", "- m.x259 == 0) m.c182 = Constraint(expr= m.x44 - m.x278", "m.b773 + m.b775 <= 1) m.c1279 = Constraint(expr= m.b773 +", "0) m.c36 = Constraint(expr= m.x138 - m.x141 - m.x144 ==", "m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0) m.c858 =", "Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0) m.c281 = Constraint(expr= m.x341", "<= 0) m.c1323 = Constraint(expr= - m.b635 + m.b636 -", "m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217", "m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553) m.c455 =", "= Constraint(expr= m.x64 - m.x319 - m.x325 == 0) m.c383", "m.x511 == 0) m.c764 = Constraint(expr= m.x170 - m.x512 -", "m.x231 - 4.45628648004517*m.b600 <= 0) m.c103 = Constraint(expr= m.x232 -", "= Constraint(expr= m.b752 + m.b753 <= 1) m.c1236 = Constraint(expr=", "+ m.b638 >= 0) m.c1395 = Constraint(expr= - m.b621 +", "m.b694 <= 1) m.c1117 = Constraint(expr= m.b692 + m.b693 <=", "= Constraint(expr= m.x14 - m.x230 - m.x233 == 0) m.c93", "m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845", "== 0) m.c768 = Constraint(expr= m.x180 - m.x537 - m.x540", "Constraint(expr= m.x32 - m.x248 - m.x251 == 0) m.c177 =", "= Constraint(expr= m.b656 - m.b658 <= 0) m.c1075 = Constraint(expr=", "m.c560 = Constraint(expr= m.x401 == 0) m.c561 = Constraint(expr= m.x402", "m.x469 == 0) m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <=", "== 0) m.c822 = Constraint(expr= m.x576 == 0) m.c823 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517) m.c106 = Constraint(expr= m.x235", "0) m.c300 = Constraint(expr= m.x297 - 15*m.b621 <= 0) m.c301", "+ m.b757 <= 1) m.c1247 = Constraint(expr= m.b758 + m.b759", "m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348", "== 0) m.c139 = Constraint(expr= m.x247 == 0) m.c140 =", "0) m.c7 = Constraint(expr= - m.x13 - m.x16 + m.x19", "<= 0) m.c546 = Constraint(expr= m.x393 - 9*m.b645 <= 0)", "- m.x577 == 0) m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674", "1) m.c1122 = Constraint(expr= m.b695 + m.b697 <= 1) m.c1123", "Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b744 + m.b745 <= 1) m.c1223 = Constraint(expr= m.b746 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c110 = Constraint(expr= m.x239 == 0) m.c111 =", "= Constraint(expr= m.x136 - m.x466 - m.x469 == 0) m.c626", "= Constraint(expr= m.x372 == 0) m.c469 = Constraint(expr= m.x373 ==", "0) m.c901 = Constraint(expr= m.x565 == 0) m.c902 = Constraint(expr=", "15*m.b627 <= 15) m.c358 = Constraint(expr= m.x313 + 15*m.b628 <=", "m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388) m.c132 =", "m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924) m.c711 =", "== 0) m.c674 = Constraint(expr= m.x143 - m.x476 - m.x479", "= Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001", "+ m.b600 == 1) m.c1375 = Constraint(expr= m.b598 + m.b601", "<= 0) m.c1056 = Constraint(expr= m.b638 - m.b640 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0) m.c391", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 =", "m.b613 <= 0) m.c1031 = Constraint(expr= m.b614 - m.b615 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 =", "m.c505 = Constraint(expr= - m.x388 + m.x442 == 0) m.c506", "Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 =", "15*m.b622 <= 0) m.c302 = Constraint(expr= m.x299 + 15*m.b620 <=", "m.x3 - m.x6 - m.x9 == 0) m.c4 = Constraint(expr=", "0) m.c994 = Constraint(expr= 8*m.b757 + m.x847 == 0) m.c995", "m.c1375 = Constraint(expr= m.b598 + m.b601 == 1) m.c1376 =", "m.b627 <= 0) m.c1044 = Constraint(expr= m.b626 - m.b628 <=", "m.b650 - m.b651 + m.b652 - m.b742 <= 0) m.c1340", "<= 0) m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0)", "Constraint(expr= m.x109 - m.x412 - m.x415 == 0) m.c623 =", "m.x562 - 15*m.b685 <= 0) m.c914 = Constraint(expr= m.x563 +", "<= 0) m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517)", "- m.b634 <= 0) m.c1052 = Constraint(expr= m.b635 - m.b636", "m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 +", "3*m.b756 + m.x846 == 0) m.c994 = Constraint(expr= 8*m.b757 +", "Constraint(expr= 3*m.b718 + m.x808 == 0) m.c956 = Constraint(expr= 7*m.b719", "m.x221 + 40*m.b599 <= 40) m.c99 = Constraint(expr= m.x222 +", "m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517) m.c126 =", "m.b606) <= 0) m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1", "m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722", "== 0) m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280 ==", "Constraint(expr= m.b681 - m.b682 <= 0) m.c1100 = Constraint(expr= m.b683", "<= 0) m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0)", "== 0) m.c446 = Constraint(expr= m.x116 - m.x428 - m.x431", "== 0) m.c482 = Constraint(expr= m.x119 - m.x434 - m.x437", "m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686) m.c461 =", "<= 0) m.c1081 = Constraint(expr= m.b663 - m.b664 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0) m.c606 = Constraint(expr= m.x459", "<= 1) m.c1202 = Constraint(expr= m.b735 + m.b736 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686) m.c459 =", "Constraint(expr= m.x564 == 0) m.c901 = Constraint(expr= m.x565 == 0)", "- m.x26 - m.x29 - m.x32 == 0) m.c12 =", "= Constraint(expr= m.b710 + m.b711 <= 1) m.c1154 = Constraint(expr=", "<= 30) m.c780 = Constraint(expr= m.x516 + 30*m.b669 <= 30)", "1) m.c1252 = Constraint(expr= m.b759 + m.b760 <= 1) m.c1253", "0) m.c1387 = Constraint(expr= - m.b607 + m.b619 >= 0)", "m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713", "0) m.c1455 = Constraint(expr= m.b627 - m.b654 >= 0) m.c1456", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c622 = Constraint(expr= m.x109 - m.x412 - m.x415 == 0)", "0) m.c147 = Constraint(expr= m.x42 - m.x267 - m.x273 ==", "= Constraint(expr= 5*m.b747 + m.x837 == 0) m.c985 = Constraint(expr=", "- 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999* m.b628) <=", "m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798", "- m.b731 <= 0) m.c1329 = Constraint(expr= - m.b641 +", "0) m.c1362 = Constraint(expr= - m.b674 + m.b675 - m.b765", "0) m.c643 = Constraint(expr= m.x475 == 0) m.c644 = Constraint(expr=", "= Constraint(expr= m.b689 + m.b691 <= 1) m.c1111 = Constraint(expr=", "m.x123 - m.x441 - m.x444 == 0) m.c517 = Constraint(expr=", "Constraint(expr= m.x379 + 20*m.b631 <= 20) m.c401 = Constraint(expr= m.x416", "Constraint(expr= m.b603 - m.b604 <= 0) m.c1022 = Constraint(expr= m.b605", "m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112", "Constraint(expr= m.x287 == 0) m.c207 = Constraint(expr= m.x288 == 0)", "= Constraint(expr= m.x129 - m.x453 - m.x456 == 0) m.c571", "== 0) m.c262 = Constraint(expr= m.x277 == 0) m.c263 =", "+ 400*m.x204 + 430*m.x205 + 290*m.x206 + 300*m.x207 + 240*m.x208", "m.x152 - m.x155 - m.x158 == 0) m.c42 = Constraint(expr=", "2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710 -", "m.x178 - m.x529 - m.x535 == 0) m.c854 = Constraint(expr=", "m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315", "- m.x142 - m.x145 == 0) m.c38 = Constraint(expr= -", "m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80", "- m.x391 == 0) m.c515 = Constraint(expr= m.x122 - m.x440", "- m.b603 + m.b612 + m.b615 >= 0) m.c1378 =", "m.x61 - m.x310 - m.x313 == 0) m.c350 = Constraint(expr=", "m.x103 - m.x400 - m.x403 == 0) m.c569 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 =", "Constraint(expr= 5*m.b752 + m.x842 == 0) m.c990 = Constraint(expr= 8*m.b753", "0) m.c1298 = Constraint(expr= m.b611 - m.b701 <= 0) m.c1299", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676 =", "0.999*m.b611)))*(0.001 + 0.999* m.b611) <= 0) m.c204 = Constraint(expr=(m.x315/(0.001 +", "m.x352 - m.x355 == 0) m.c326 = Constraint(expr= m.x302 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 =", "= Constraint(expr= m.b613 - m.b631 >= 0) m.c1433 = Constraint(expr=", "0) m.c419 = Constraint(expr= m.x113 - m.x422 - m.x425 ==", "m.b717 + m.b718 <= 1) m.c1169 = Constraint(expr= m.b719 +", "== 0) m.c441 = Constraint(expr= m.x432 == 0) m.c442 =", "== 0) m.c701 = Constraint(expr= m.x161 - m.x494 - m.x497", "m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917) m.c841 =", "Constraint(expr= m.b747 + m.b748 <= 1) m.c1229 = Constraint(expr= m.b749", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 =", "m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644", "m.x810 == 0) m.c958 = Constraint(expr= 9*m.b721 + m.x811 ==", "m.x150 + m.x153 == 0) m.c40 = Constraint(expr= - m.x148", "- m.x11 - m.x14 + m.x17 == 0) m.c6 =", "= Constraint(expr= - 0.75*m.x494 + m.x518 == 0) m.c693 =", "m.b756 + m.b757 <= 1) m.c1247 = Constraint(expr= m.b758 +", "m.b751 <= 0) m.c1349 = Constraint(expr= m.b662 - m.b752 <=", "= Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0) m.c633 = Constraint(expr=", "Constraint(expr= m.x532 == 0) m.c728 = Constraint(expr= m.x164 - m.x500", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 =", "- m.x28 - m.x31 - m.x34 == 0) m.c14 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602 =", "<= 0) m.c1033 = Constraint(expr= m.b615 - m.b616 <= 0)", "1.26558121681553*m.b637 <= 0) m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <=", "Constraint(expr= m.x589 == 0) m.c878 = Constraint(expr= m.x188 - m.x554", "== 0) m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0)", "0) m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001", "= Constraint(expr= m.b702 + m.b703 <= 1) m.c1137 = Constraint(expr=", "- m.x139 == 0) m.c35 = Constraint(expr= m.x137 - m.x140", "1) m.c1240 = Constraint(expr= m.b753 + m.b754 <= 1) m.c1241", "m.c930 = Constraint(expr= 9*m.b693 + m.x783 == 0) m.c931 =", "0) m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0) m.c247", "+ 1.18887736200171*m.b659 <= 1.18887736200171) m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660", "Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0) m.c771 = Constraint(expr= m.x507", "m.x309 - m.x312 == 0) m.c349 = Constraint(expr= m.x61 -", "Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0)", "= Constraint(expr= m.b758 + m.b760 <= 1) m.c1249 = Constraint(expr=", "m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319", "<= 0) m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 +", "m.x334 == 0) m.c239 = Constraint(expr= m.x50 - m.x290 -", "- 15*m.b625 <= 0) m.c329 = Constraint(expr= m.x305 + 15*m.b623", "= Constraint(expr= m.b677 - m.b679 <= 0) m.c1096 = Constraint(expr=", "= Constraint(expr= m.b610 - m.b628 >= 0) m.c1430 = Constraint(expr=", "- m.x256 - m.x259 == 0) m.c182 = Constraint(expr= m.x44", "= Constraint(expr= m.b596 - m.b597 <= 0) m.c1014 = Constraint(expr=", "0) m.c537 = Constraint(expr= m.x450 == 0) m.c538 = Constraint(expr=", "= Constraint(expr= m.b768 + m.b769 <= 1) m.c1271 = Constraint(expr=", "m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0) m.c804 =", "== 0) m.c620 = Constraint(expr= m.x107 - m.x410 - m.x413", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)", "1) m.c1243 = Constraint(expr= m.b755 + m.b756 <= 1) m.c1244", "0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999* m.b676) <= 0)", "= Constraint(expr= m.x206 - m.x584 - m.x587 == 0) m.c882", "m.x557 == 0) m.c873 = Constraint(expr= m.x558 == 0) m.c874", "+ m.x800 == 0) m.c948 = Constraint(expr= 5*m.b711 + m.x801", "m.c477 = Constraint(expr= m.x84 - m.x369 - m.x372 == 0)", "m.c989 = Constraint(expr= 5*m.b752 + m.x842 == 0) m.c990 =", "Constraint(expr= m.x379 == 0) m.c377 = Constraint(expr= m.x419 == 0)", "m.c1425 = Constraint(expr= m.b609 - m.b624 >= 0) m.c1426 =", "m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865", "m.x528 - m.x534 == 0) m.c853 = Constraint(expr= m.x178 -", "0) m.c652 = Constraint(expr= m.x148 - m.x484 - m.x487 ==", "0) m.c944 = Constraint(expr= 5*m.b707 + m.x797 == 0) m.c945", "Constraint(expr= - m.x393 + m.x447 == 0) m.c532 = Constraint(expr=", "= Constraint(expr= 3*m.b722 + m.x812 == 0) m.c960 = Constraint(expr=", "Constraint(expr= m.x39 - m.x261 - m.x264 == 0) m.c121 =", "m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388) m.c132 = Constraint(expr= m.x264 +", "m.c233 = Constraint(expr= m.x293 == 0) m.c234 = Constraint(expr= m.x294", "<= 0) m.c404 = Constraint(expr= m.x419 + 20*m.b629 <= 20)", "- m.x319 - m.x325 == 0) m.c383 = Constraint(expr= m.x86", "== 0) m.c315 = Constraint(expr= m.x306 == 0) m.c316 =", "Constraint(expr= 2*m.b725 + m.x815 == 0) m.c963 = Constraint(expr= 6*m.b726", "<= 0) m.c1099 = Constraint(expr= m.b681 - m.b682 <= 0)", "m.b760 <= 0) m.c1358 = Constraint(expr= m.b671 - m.b761 <=", "m.c1204 = Constraint(expr= m.b735 + m.b736 <= 1) m.c1205 =", "m.b644 - m.b734 <= 0) m.c1332 = Constraint(expr= - m.b644", "Constraint(expr= - 0.75*m.x237 + m.x261 == 0) m.c109 = Constraint(expr=", "m.c527 = Constraint(expr= m.x443 + 9*m.b641 <= 9) m.c528 =", "Constraint(expr= m.x41 - m.x269 - m.x275 == 0) m.c267 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690 =", "== 0) m.c138 = Constraint(expr= m.x246 == 0) m.c139 =", "7*m.b709 - 2*m.b710 - 5*m.b711 - 2*m.b712 - 4*m.b713 -", "<= 13.5) m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5)", "m.c1467 = Constraint(expr= m.b654 - m.b657 >= 0) m.c1468 =", "= Constraint(expr= 10*m.b695 + m.x785 == 0) m.c933 = Constraint(expr=", "== 0) m.c419 = Constraint(expr= m.x113 - m.x422 - m.x425", "Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b617 - m.b619 <= 0) m.c1036 = Constraint(expr= m.b618 -", "m.c58 = Constraint(expr= m.x217 == 0) m.c59 = Constraint(expr= m.x227", "Constraint(expr= m.b624 - m.b645 >= 0) m.c1447 = Constraint(expr= m.b625", "m.c13 = Constraint(expr= m.x25 - m.x28 - m.x31 - m.x34", "Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x585 - 13.5*m.b681 <= 0) m.c892 = Constraint(expr= m.x586 -", "m.x780 == 0) m.c928 = Constraint(expr= 6*m.b691 + m.x781 ==", "+ m.b691 <= 1) m.c1113 = Constraint(expr= m.b689 + m.b691", "<= 0) m.c192 = Constraint(expr= m.x255 - 30*m.b609 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 =", "= Constraint(expr= m.x40 - m.x262 - m.x265 == 0) m.c122", "+ 15*m.b670 <= 15) m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) -", "= Constraint(expr= 10*m.b699 + m.x789 == 0) m.c937 = Constraint(expr=", "Constraint(expr= m.b759 + m.b760 <= 1) m.c1251 = Constraint(expr= m.b758", "- log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679) <= 0)", "<= 0) m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 +", "m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534", "= Constraint(expr= m.x13 - m.x226 - m.x229 == 0) m.c68", "m.c1305 = Constraint(expr= - m.b617 + m.b618 - m.b708 <=", "<= 0) m.c1015 = Constraint(expr= m.b597 - m.b598 <= 0)", "= Constraint(expr= m.x594 == 0) m.c904 = Constraint(expr= m.x595 ==", "== 0) m.c568 = Constraint(expr= m.x103 - m.x400 - m.x403", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 =", "Constraint(expr= - m.b610 + m.b622 + m.b625 + m.b628 >=", "m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517) m.c106 =", "= Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327) m.c664 = Constraint(expr=", ">= 0) m.c1379 = Constraint(expr= - m.b611 + m.b629 >=", "m.b625 + m.b628 >= 0) m.c1394 = Constraint(expr= - m.b620", "1.26558121681553*m.b617 <= 0) m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <=", "+ m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c466", "m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11", "- m.x539 == 0) m.c768 = Constraint(expr= m.x180 - m.x537", "- m.b709 <= 0) m.c1307 = Constraint(expr= m.b620 - m.b710", "m.b700 <= 1) m.c1133 = Constraint(expr= m.b701 + m.b702 <=", "m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222", "+ m.b717 <= 1) m.c1166 = Constraint(expr= m.b717 + m.b718", "m.c64 = Constraint(expr= m.x7 - m.x214 - m.x217 == 0)", "<= 1) m.c1180 = Constraint(expr= m.b723 + m.b724 <= 1)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 =", "m.b601 - m.b691 <= 0) m.c1289 = Constraint(expr= m.b602 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b724 <= 0) m.c1322 = Constraint(expr= m.b635 - m.b725 <=", "0.690184503917672*m.b679 <= 0) m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <=", "Constraint(expr= - m.b665 + m.b666 - m.b756 <= 0) m.c1354", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87 =", "== 0) m.c382 = Constraint(expr= m.x64 - m.x319 - m.x325", "m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672) m.c868 = Constraint(expr= m.x583 +", "+ 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*", "= Constraint(expr= m.x180 - m.x537 - m.x540 == 0) m.c769", "- m.x377 == 0) m.c384 = Constraint(expr= m.x87 - m.x375", "Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001 +", "- m.b676 <= 0) m.c1094 = Constraint(expr= m.b677 - m.b678", "- 2.54515263975353*m.b618 <= 0) m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619", "Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25", "Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x520 - 0.705049913072943*m.b664 <= 0) m.c716 = Constraint(expr= m.x521 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 =", "= Constraint(expr= m.b677 - m.b678 <= 0) m.c1095 = Constraint(expr=", "Constraint(expr= m.x438 == 0) m.c475 = Constraint(expr= m.x439 == 0)", "- m.x293 == 0) m.c240 = Constraint(expr= m.x51 - m.x291", "- 3.34221486003388*m.b603 <= 0) m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604", "3.04984759446376*m.b648 <= 3.04984759446376) m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <=", "<= 1) m.c1155 = Constraint(expr= m.b710 + m.b712 <= 1)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0)", "4*m.b694 - 10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698 -", "0) m.c949 = Constraint(expr= 2*m.b712 + m.x802 == 0) m.c950", "<= 1.32154609891348) m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348)", "0) m.c570 = Constraint(expr= m.x129 - m.x453 - m.x456 ==", "9*m.b696 + m.x786 == 0) m.c934 = Constraint(expr= 5*m.b697 +", "== 0) m.c18 = Constraint(expr= m.x45 - m.x54 - m.x57", "+ m.x829 == 0) m.c977 = Constraint(expr= 2*m.b740 + m.x830", "m.b710 + m.b712 <= 1) m.c1153 = Constraint(expr= m.b710 +", "m.x138 - m.x141 - m.x144 == 0) m.c37 = Constraint(expr=", "m.c766 = Constraint(expr= m.x172 - m.x514 - m.x517 == 0)", "m.b709 <= 1) m.c1147 = Constraint(expr= m.b707 + m.b708 <=", "0) m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001", "== 0) m.c820 = Constraint(expr= m.x553 == 0) m.c821 =", "Constraint(expr= m.x62 - m.x314 - m.x320 == 0) m.c216 =", "m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517) m.c105 =", "Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553) m.c284 = Constraint(expr= -", "m.x353 == 0) m.c324 = Constraint(expr= m.x75 - m.x351 -", "counts # x b i s1s s2s sc si #", "= Constraint(expr= m.x25 - m.x28 - m.x31 - m.x34 ==", "m.c1001 = Constraint(expr= 7*m.b764 + m.x854 == 0) m.c1002 =", "<= 0) m.c96 = Constraint(expr= m.x219 - 40*m.b600 <= 0)", "0) m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001", "m.b611 >= 0) m.c1413 = Constraint(expr= m.b603 - m.b612 >=", "- 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999* m.b613) <=", "0) m.c1035 = Constraint(expr= m.b617 - m.b619 <= 0) m.c1036", "= Constraint(expr= 2*m.b750 + m.x840 == 0) m.c988 = Constraint(expr=", "+ m.x853 == 0) m.c1001 = Constraint(expr= 7*m.b764 + m.x854", "Constraint(expr= - m.b677 - m.b678 + m.b679 - m.b769 <=", "== 0) m.c88 = Constraint(expr= m.x235 == 0) m.c89 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c1310 = Constraint(expr= m.b623 - m.b713 <= 0) m.c1311", "m.b619 >= 0) m.c1421 = Constraint(expr= m.b608 - m.b620 >=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820 =", "1.18887736200171*m.b656 <= 0) m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <=", "Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 +", "m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630", "Constraint(expr= m.x333 == 0) m.c238 = Constraint(expr= m.x334 == 0)", "m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0) m.c655 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 =", "m.x528 - 0.994083415506506*m.b678 <= 0) m.c859 = Constraint(expr= m.x529 -", "0) m.c214 = Constraint(expr= m.x49 - m.x286 - m.x289 ==", "Constraint(expr= m.x417 - 20*m.b630 <= 0) m.c403 = Constraint(expr= m.x418", "m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699", "Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x220 - 40*m.b601 <= 0) m.c98 = Constraint(expr=", "+ m.b606 - m.b696 <= 0) m.c1294 = Constraint(expr= -", "<= 0) m.c301 = Constraint(expr= m.x298 - 15*m.b622 <= 0)", "+ 0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678)", "0) m.c177 = Constraint(expr= m.x33 - m.x249 - m.x252 ==", "m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539) m.c230 =", "m.x63 - m.x318 - m.x324 == 0) m.c382 = Constraint(expr=", "= Constraint(expr= m.b762 + m.b763 <= 1) m.c1259 = Constraint(expr=", "m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171) m.c685 = Constraint(expr= m.x481 +", "= Constraint(expr= m.x113 - m.x422 - m.x425 == 0) m.c420", "= Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c183 = Constraint(expr= m.x45 - m.x279 - m.x282", "Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0) m.c683 = Constraint(expr= m.x479", "0) m.c239 = Constraint(expr= m.x50 - m.x290 - m.x293 ==", "m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680", "m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 +", "m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812 =", "m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168", "Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= - 0.9*m.x555 + m.x585 == 0) m.c871 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 0) m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 +", "m.b620 >= 0) m.c1422 = Constraint(expr= m.b609 - m.b621 >=", "m.b614 - m.b615 <= 0) m.c1032 = Constraint(expr= m.b614 -", "== 0) m.c1005 = Constraint(expr= 8*m.b768 + m.x858 == 0)", "+ m.b604 - m.b694 <= 0) m.c1292 = Constraint(expr= m.b605", "= Constraint(expr= m.x498 == 0) m.c697 = Constraint(expr= m.x499 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 =", "- m.b606 <= 0) m.c1023 = Constraint(expr= m.b605 - m.b607", "15) m.c915 = Constraint(expr= m.x564 + 15*m.b684 <= 15) m.c916", "== 0) m.c910 = Constraint(expr= m.x211 - m.x592 - m.x595", "= Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0) m.c76 = Constraint(expr=", "- log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634) <= 0)", "<= 1) m.c1143 = Constraint(expr= m.b704 + m.b706 <= 1)", "m.x295 == 0) m.c242 = Constraint(expr= m.x65 - m.x326 -", "Constraint(expr= - m.b620 + m.b638 >= 0) m.c1395 = Constraint(expr=", "0) m.c999 = Constraint(expr= 8*m.b762 + m.x852 == 0) m.c1000", "0.999*m.b634) <= 0) m.c410 = Constraint(expr= m.x335 == 0) m.c411", "Constraint(expr= - m.b659 + m.b660 - m.b750 <= 0) m.c1348", "- m.b626 >= 0) m.c1428 = Constraint(expr= m.b609 - m.b627", "m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280 == 0) m.c167", "15*m.b623 <= 15) m.c330 = Constraint(expr= m.x306 + 15*m.b624 <=", "m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36", "<= 0) m.c918 = Constraint(expr= m.x591 - 9*m.b684 <= 0)", "<= 1) m.c1201 = Constraint(expr= m.b734 + m.b735 <= 1)", "- 2.30162356062425*m.b640 <= 0) m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638", "Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= - m.b671 - m.b672 + m.b673 - m.b763 <=", "= Constraint(expr= m.x510 == 0) m.c754 = Constraint(expr= m.x511 ==", "m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617", "<= 0) m.c1063 = Constraint(expr= m.b645 - m.b646 <= 0)", "0) m.c383 = Constraint(expr= m.x86 - m.x374 - m.x377 ==", "m.x317 - 1.83548069293539*m.b629 <= 0) m.c390 = Constraint(expr= m.x318 -", "1.26558121681553*m.b636 <= 0) m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0)", "4.45628648004517*m.b602 <= 0) m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999*m.b656) <= 0) m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1", "m.b635 - m.b725 <= 0) m.c1323 = Constraint(expr= - m.b635", "m.c1156 = Constraint(expr= m.b711 + m.b712 <= 1) m.c1157 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 =", "m.x468 == 0) m.c619 = Constraint(expr= m.x469 == 0) m.c620", "- m.b668 + m.b669 - m.b759 <= 0) m.c1357 =", "== 0) m.c595 = Constraint(expr= m.x106 - m.x406 - m.x409", "<= 0) m.c1319 = Constraint(expr= m.b632 - m.b722 <= 0)", "m.x177 - m.x525 - m.x531 == 0) m.c733 = Constraint(expr=", "m.b694 <= 0) m.c1292 = Constraint(expr= m.b605 - m.b695 <=", "Reformulation has removed 1 variable and 1 equation from pyomo.environ", "Constraint(expr= m.x5 - m.x212 - m.x215 == 0) m.c63 =", "== 0) m.c652 = Constraint(expr= m.x148 - m.x484 - m.x487", "Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 =", "m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553) m.c283 = Constraint(expr= m.x343 +", "m.c299 = Constraint(expr= m.x296 - 15*m.b620 <= 0) m.c300 =", "- m.x370 - m.x373 == 0) m.c479 = Constraint(expr= m.x92", "m.c1441 = Constraint(expr= m.b619 - m.b640 >= 0) m.c1442 =", "4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758 -", "<= 4.45628648004517) m.c191 = Constraint(expr= m.x254 - 30*m.b608 <= 0)", "Constraint(expr= m.b746 + m.b747 <= 1) m.c1224 = Constraint(expr= m.b746", "m.c518 = Constraint(expr= m.x386 - 9*m.b641 <= 0) m.c519 =", "Constraint(expr= m.b608 - m.b623 >= 0) m.c1425 = Constraint(expr= m.b609", "1.83548069293539*m.b611 <= 1.83548069293539) m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <=", "0.999* m.b626) <= 0) m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) -", "m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 =", "Constraint(expr= m.x259 == 0) m.c173 = Constraint(expr= m.x281 == 0)", "m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848", "0.994083415506506*m.b678 <= 0) m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <=", "0) m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586 == 0)", "m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377", "Constraint(expr= m.x413 == 0) m.c615 = Constraint(expr= m.x414 == 0)", "m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0) m.c811 =", "= Constraint(expr= 4*m.b775 + m.x865 == 0) m.c1013 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.34221486003388*m.b611 <= 3.34221486003388) m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <=", "m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405", "m.b656 - m.b658 <= 0) m.c1075 = Constraint(expr= m.b657 -", "Constraint(expr= - m.b647 - m.b648 + m.b649 - m.b739 <=", "m.b651 - m.b652 <= 0) m.c1070 = Constraint(expr= m.b653 -", "m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924) m.c712 =", "m.b715 <= 0) m.c1313 = Constraint(expr= m.b626 - m.b716 <=", "m.x70 - m.x340 - m.x343 == 0) m.c272 = Constraint(expr=", "<= 1.26558121681553) m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553)", "+ m.b633 >= 0) m.c1384 = Constraint(expr= - m.b616 +", "= Constraint(expr= m.b767 + m.b769 <= 1) m.c1270 = Constraint(expr=", "+ 33.5*m.b640 <= 33.5) m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797 =", "<= 0) m.c914 = Constraint(expr= m.x563 + 15*m.b683 <= 15)", "m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537 == 0) m.c751", "- 4*m.b735 - 3*m.b736 - 5*m.b737 - 7*m.b738 - 6*m.b739", "Constraint(expr= m.x216 == 0) m.c58 = Constraint(expr= m.x217 == 0)", "- m.b643 <= 0) m.c1060 = Constraint(expr= m.b642 - m.b643", "- 2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774", "- m.x405 - m.x408 == 0) m.c595 = Constraint(expr= m.x106", "m.c845 = Constraint(expr= m.x533 == 0) m.c846 = Constraint(expr= m.x534", "- m.x310 - m.x313 == 0) m.c350 = Constraint(expr= m.x77", "+ m.b678 >= 0) m.c1465 = Constraint(expr= - m.b667 +", "Constraint(expr= m.x122 - m.x440 - m.x443 == 0) m.c516 =", "4*m.b761 - 8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682 =", "m.c1144 = Constraint(expr= m.b705 + m.b706 <= 1) m.c1145 =", "= Constraint(expr= m.x7 - m.x214 - m.x217 == 0) m.c65", "m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52", "m.x792 == 0) m.c940 = Constraint(expr= 4*m.b703 + m.x793 ==", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853 =", "m.x340 - m.x343 == 0) m.c272 = Constraint(expr= m.x269 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 =", "m.c261 = Constraint(expr= m.x276 == 0) m.c262 = Constraint(expr= m.x277", "- m.x565 == 0) m.c908 = Constraint(expr= m.x209 - m.x590", "Constraint(expr= - m.b612 + m.b630 >= 0) m.c1381 = Constraint(expr=", "<= 1.32154609891348) m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0)", "== 0) m.c973 = Constraint(expr= 3*m.b736 + m.x826 == 0)", "- m.b645 + m.b646 - m.b736 <= 0) m.c1334 =", "m.x213 - 40*m.b597 <= 0) m.c70 = Constraint(expr= m.x214 -", "Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0) m.c737 = Constraint(expr= m.x503", "== 0) m.c142 = Constraint(expr= m.x274 == 0) m.c143 =", "Constraint(expr= m.b761 + m.b763 <= 1) m.c1258 = Constraint(expr= m.b762", "- log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619) <= 0)", "m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506) m.c861 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 =", ">= 0) m.c1417 = Constraint(expr= m.b604 - m.b616 >= 0)", "40*m.b601 <= 40) m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <=", "9*m.b683 <= 0) m.c918 = Constraint(expr= m.x591 - 9*m.b684 <=", "Constraint(expr= 8*m.b741 + m.x831 == 0) m.c979 = Constraint(expr= 4*m.b742", "= Constraint(expr= - m.b602 - m.b603 + m.b604 - m.b694", "Constraint(expr= 3*m.b734 + m.x824 == 0) m.c972 = Constraint(expr= 4*m.b735", "m.x445 == 0) m.c512 = Constraint(expr= m.x95 - m.x386 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 =", "Constraint(expr= - m.b617 + m.b635 + m.b638 >= 0) m.c1389", "Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1133 = Constraint(expr= m.b701 + m.b702 <= 1) m.c1134 =", "+ 0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656)", "+ m.b696 <= 1) m.c1124 = Constraint(expr= m.b696 + m.b697", "- m.x346 - m.x349 == 0) m.c299 = Constraint(expr= m.x296", "m.c1308 = Constraint(expr= - m.b620 + m.b621 - m.b711 <=", "1.83548069293539*m.b630 <= 0) m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <=", "0) m.c910 = Constraint(expr= m.x211 - m.x592 - m.x595 ==", "Constraint(expr= 7*m.b709 + m.x799 == 0) m.c947 = Constraint(expr= 2*m.b710", "== 0) m.c179 = Constraint(expr= m.x35 - m.x254 - m.x257", "0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598) <=", "- 15*m.b622 <= 0) m.c302 = Constraint(expr= m.x299 + 15*m.b620", "+ 13.5*m.b680 <= 13.5) m.c894 = Constraint(expr= m.x588 + 13.5*m.b681", "integer sos1 sos2 scont sint # 865 685 180 0", "- m.x526 - m.x532 == 0) m.c734 = Constraint(expr= m.x500", "m.b598 <= 0) m.c1016 = Constraint(expr= m.b599 - m.b600 <=", "Constraint(expr= m.b638 - m.b640 <= 0) m.c1057 = Constraint(expr= m.b639", "m.x245 == 0) m.c138 = Constraint(expr= m.x246 == 0) m.c139", "Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376) m.c363 = Constraint(expr= m.x360", "= Constraint(expr= m.b608 - m.b609 <= 0) m.c1026 = Constraint(expr=", "- m.b611 + m.b612 - m.b702 <= 0) m.c1300 =", "m.b619 + m.b637 + m.b640 >= 0) m.c1391 = Constraint(expr=", "m.c954 = Constraint(expr= 9*m.b717 + m.x807 == 0) m.c955 =", "m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774", "= Constraint(expr= m.x590 - 9*m.b683 <= 0) m.c918 = Constraint(expr=", "m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263", "Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672) m.c867 = Constraint(expr= m.x582", "- m.b619 <= 0) m.c1037 = Constraint(expr= m.b620 - m.b621", "m.b633 <= 0) m.c1050 = Constraint(expr= m.b632 - m.b634 <=", "= Constraint(expr= m.x2 - m.x5 - m.x8 == 0) m.c3", "Constraint(expr= m.x509 == 0) m.c753 = Constraint(expr= m.x510 == 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538 ==", "m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489", "m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633) <= 0) m.c409 = Constraint(expr=(m.x424/(0.001", "0) m.c513 = Constraint(expr= m.x96 - m.x387 - m.x390 ==", "m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328", "+ m.x821 == 0) m.c969 = Constraint(expr= 5*m.b732 + m.x822", "m.b702 <= 1) m.c1136 = Constraint(expr= m.b702 + m.b703 <=", "1) m.c1187 = Constraint(expr= m.b728 + m.b729 <= 1) m.c1188", "0) m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001", "= Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001", "Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553) m.c491 = Constraint(expr= m.x380", "<= 0) m.c1014 = Constraint(expr= m.b596 - m.b598 <= 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171) m.c658 =", "m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463", "m.x510 == 0) m.c763 = Constraint(expr= m.x169 - m.x508 -", "m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539) m.c393 = Constraint(expr= m.x324 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b686 + m.b688 <= 1) m.c1105 = Constraint(expr= m.b686 +", "+ m.x93 == 0) m.c25 = Constraint(expr= - m.x73 -", "m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786", "1) m.c1153 = Constraint(expr= m.b710 + m.b711 <= 1) m.c1154", "0) m.c238 = Constraint(expr= m.x334 == 0) m.c239 = Constraint(expr=", "== 0) m.c375 = Constraint(expr= m.x378 == 0) m.c376 =", "- m.b708 <= 0) m.c1306 = Constraint(expr= - m.b617 -", "m.b743 - 4*m.b744 - m.b745 - 2*m.b746 - 5*m.b747 -", "m.c1208 = Constraint(expr= m.b738 + m.b739 <= 1) m.c1209 =", "0) m.c1078 = Constraint(expr= m.b660 - m.b661 <= 0) m.c1079", "m.b705 + m.b706 <= 1) m.c1143 = Constraint(expr= m.b704 +", "m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72", "m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376) m.c577 =", "m.c881 = Constraint(expr= m.x206 - m.x584 - m.x587 == 0)", "0) m.c1305 = Constraint(expr= - m.b617 + m.b618 - m.b708", "0) m.c952 = Constraint(expr= 4*m.b715 + m.x805 == 0) m.c953", "0) m.c182 = Constraint(expr= m.x44 - m.x278 - m.x281 ==", "m.c1401 = Constraint(expr= - m.b627 + m.b648 + m.b651 +", "Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c758 = Constraint(expr= m.x539 == 0) m.c759 = Constraint(expr= m.x540", "Constraint(expr= m.b683 - m.b685 <= 0) m.c1102 = Constraint(expr= m.b684", "== 0) m.c764 = Constraint(expr= m.x170 - m.x512 - m.x515", "0.78338879230327*m.b657 <= 0) m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 =", "1.32154609891348*m.b615 <= 0) m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <=", "15*m.b621 <= 0) m.c301 = Constraint(expr= m.x298 - 15*m.b622 <=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827 =", "m.b769 <= 0) m.c1367 = Constraint(expr= m.b680 - m.b770 <=", "<= 9) m.c530 = Constraint(expr= - m.x392 + m.x446 ==", "<= 0) m.c668 = Constraint(expr= m.x479 == 0) m.c669 =", "0) m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0) m.c281", "0) m.c1025 = Constraint(expr= m.b608 - m.b609 <= 0) m.c1026", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 =", "Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0) m.c430 = Constraint(expr= m.x424", "= Constraint(expr= m.x557 == 0) m.c873 = Constraint(expr= m.x558 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x160 == 0) m.c44 = Constraint(expr= m.x158 - m.x161 -", "<= 0) m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 +", "m.c316 = Constraint(expr= m.x307 == 0) m.c317 = Constraint(expr= m.x353", "== 0) m.c439 = Constraint(expr= m.x367 == 0) m.c440 =", "+ m.b612 + m.b615 >= 0) m.c1378 = Constraint(expr= -", "+ 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999*", "m.x791 == 0) m.c939 = Constraint(expr= 7*m.b702 + m.x792 ==", "= Constraint(expr= m.x4 - m.x7 - m.x10 == 0) m.c5", "- 1.32154609891348*m.b634 <= 0) m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632", "m.x814 == 0) m.c962 = Constraint(expr= 2*m.b725 + m.x815 ==", "+ m.b742 <= 1) m.c1217 = Constraint(expr= m.b743 + m.b744", "0) m.c792 = Constraint(expr= m.x546 == 0) m.c793 = Constraint(expr=", "0.842233385663186*m.b633 <= 0.842233385663186) m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 =", "= Constraint(expr= m.x449 == 0) m.c537 = Constraint(expr= m.x450 ==", "= Constraint(expr= m.x343 == 0) m.c266 = Constraint(expr= m.x41 -", "== 0) m.c16 = Constraint(expr= m.x40 - m.x49 - m.x52", "m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0) m.c451 =", "15*m.x112 + 15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c1010 = Constraint(expr= 8*m.b773 + m.x863 == 0)", "= Constraint(expr= 2*m.b712 + m.x802 == 0) m.c950 = Constraint(expr=", "+ 1.04900943706034*m.b649 <= 1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 40*m.b601 <= 0) m.c98 = Constraint(expr= m.x221 + 40*m.b599", "= Constraint(expr= m.b599 - m.b600 <= 0) m.c1017 = Constraint(expr=", "9*m.b643 <= 9) m.c530 = Constraint(expr= - m.x392 + m.x446", "0) m.c763 = Constraint(expr= m.x169 - m.x508 - m.x511 ==", "= Constraint(expr= m.x407 == 0) m.c588 = Constraint(expr= m.x408 ==", "8*m.b768 - 6*m.b769 - 2*m.b770 - m.b771 - 3*m.b772 -", "- m.b627 + m.b648 + m.b651 + m.b654 >= 0)", "3*m.b722 + m.x812 == 0) m.c960 = Constraint(expr= m.b723 +", "+ 0.999* m.b660) <= 0) m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661)", "+ 20*m.b629 <= 20) m.c399 = Constraint(expr= m.x378 + 20*m.b630", "- m.b661 <= 0) m.c1079 = Constraint(expr= m.b662 - m.b663", "Constraint(expr= - m.b614 + m.b632 >= 0) m.c1383 = Constraint(expr=", "m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0) m.c864 =", "8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766 -", "1) m.c1263 = Constraint(expr= m.b764 + m.b766 <= 1) m.c1264", "0) m.c43 = Constraint(expr= m.x154 - m.x157 - m.x160 ==", "+ 0.842233385663186*m.b633 <= 0.842233385663186) m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634", "<= 0) m.c1331 = Constraint(expr= m.b644 - m.b734 <= 0)", "= Constraint(expr= m.x294 == 0) m.c235 = Constraint(expr= m.x295 ==", "m.b775 <= 1) m.c1283 = Constraint(expr= m.b596 - m.b686 <=", "m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138", "m.c88 = Constraint(expr= m.x235 == 0) m.c89 = Constraint(expr= m.x8", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 =", "Constraint(expr= m.x402 == 0) m.c562 = Constraint(expr= m.x403 == 0)", "= Constraint(expr= m.b656 - m.b657 <= 0) m.c1074 = Constraint(expr=", "0.999*m.b665)))*(0.001 + 0.999* m.b665) <= 0) m.c720 = Constraint(expr=(m.x525/(0.001 +", "Constraint(expr= m.b725 + m.b727 <= 1) m.c1186 = Constraint(expr= m.b726", "m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233", "0) m.c856 = Constraint(expr= m.x205 - m.x580 - m.x583 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c967 = Constraint(expr= m.b730 + m.x820 == 0) m.c968", "= Constraint(expr= m.b767 + m.b768 <= 1) m.c1266 = Constraint(expr=", "m.b614 - m.b616 <= 0) m.c1033 = Constraint(expr= m.b615 -", "m.c172 = Constraint(expr= m.x259 == 0) m.c173 = Constraint(expr= m.x281", "0) m.c138 = Constraint(expr= m.x246 == 0) m.c139 = Constraint(expr=", "m.c1004 = Constraint(expr= 4*m.b767 + m.x857 == 0) m.c1005 =", "Constraint(expr= m.x180 - m.x537 - m.x540 == 0) m.c769 =", "= Constraint(expr= m.x127 - m.x448 - m.x451 == 0) m.c545", "m.b656 - m.b657 <= 0) m.c1074 = Constraint(expr= m.b656 -", "m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0) m.c656 =", "m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303", "m.c1175 = Constraint(expr= m.b722 + m.b723 <= 1) m.c1176 =", "m.x106 - m.x109 == 0) m.c32 = Constraint(expr= m.x134 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 =", "m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32", "Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636", "m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15", "= Constraint(expr= m.x246 == 0) m.c139 = Constraint(expr= m.x247 ==", "= Constraint(expr= m.b773 + m.b775 <= 1) m.c1279 = Constraint(expr=", "m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0) m.c76 =", "2*m.b770 + m.x860 == 0) m.c1008 = Constraint(expr= m.b771 +", "9*m.b766 + m.x856 == 0) m.c1004 = Constraint(expr= 4*m.b767 +", "m.b665 - m.b666 + m.b667 - m.b757 <= 0) m.c1355", "<= 0) m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0)", "1.83548069293539) m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539) m.c395", "Constraint(expr= m.b734 + m.b735 <= 1) m.c1200 = Constraint(expr= m.b734", "- 3.04984759446376*m.b628 <= 0) m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626", "m.c61 = Constraint(expr= m.x229 == 0) m.c62 = Constraint(expr= m.x5", "m.x504 == 0) m.c730 = Constraint(expr= m.x166 - m.x502 -", "m.b737 + m.b738 <= 1) m.c1208 = Constraint(expr= m.b738 +", "2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86 - 5*m.x87 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x201 - m.x573 - m.x576 == 0) m.c829 =", "m.b645 >= 0) m.c1447 = Constraint(expr= m.b625 - m.b646 >=", "+ m.b768 <= 1) m.c1268 = Constraint(expr= m.b768 + m.b769", "0.999* m.b613) <= 0) m.c206 = Constraint(expr= m.x287 == 0)", "0) m.c1346 = Constraint(expr= m.b659 - m.b749 <= 0) m.c1347", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 =", "- 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999* m.b648) <=", "- m.x448 - m.x451 == 0) m.c545 = Constraint(expr= m.x392", "0.999*m.b654)))*(0.001 + 0.999* m.b654) <= 0) m.c613 = Constraint(expr=(m.x466/(0.001 +", "0) m.c881 = Constraint(expr= m.x206 - m.x584 - m.x587 ==", "m.c1230 = Constraint(expr= m.b749 + m.b751 <= 1) m.c1231 =", "m.x94 == 0) m.c26 = Constraint(expr= m.x74 - m.x95 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)", "3*m.b765 + m.x855 == 0) m.c1003 = Constraint(expr= 9*m.b766 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 =", "+ m.b654 - m.b744 <= 0) m.c1342 = Constraint(expr= -", "m.x20 - m.x23 == 0) m.c9 = Constraint(expr= m.x18 -", "m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780", "<= 0) m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 +", "0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617) <=", "<= 15) m.c304 = Constraint(expr= m.x301 + 15*m.b622 <= 15)", "1) m.c1149 = Constraint(expr= m.b707 + m.b709 <= 1) m.c1150", "Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0) m.c390 = Constraint(expr= m.x318", "<= 0) m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0)", "m.c1322 = Constraint(expr= m.b635 - m.b725 <= 0) m.c1323 =", "Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 +", "- 0.75*m.x495 + m.x519 == 0) m.c694 = Constraint(expr= -", "== 0) m.c1012 = Constraint(expr= 4*m.b775 + m.x865 == 0)", "== 0) m.c40 = Constraint(expr= - m.x148 - m.x151 +", "== 0) m.c11 = Constraint(expr= m.x23 - m.x26 - m.x29", "7*m.b701 - 7*m.b702 - 4*m.b703 - 4*m.b704 - 3*m.b705 -", "m.x390 + 9*m.b642 <= 9) m.c523 = Constraint(expr= m.x391 +", "<= 15) m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 +", "<= 3.04984759446376) m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376)", "= Constraint(expr= m.b729 + m.b730 <= 1) m.c1193 = Constraint(expr=", "<= 0) m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924)", "m.c849 = Constraint(expr= m.x582 == 0) m.c850 = Constraint(expr= m.x583", "1) m.c1237 = Constraint(expr= m.b752 + m.b753 <= 1) m.c1238", "0) m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0) m.c362", "= Constraint(expr= - m.b610 + m.b622 + m.b625 + m.b628", "+ m.b599 - m.b602 >= 0) m.c1404 = Constraint(expr= m.b597", "0.994083415506506) m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506) m.c746", "m.b601 - m.b607 >= 0) m.c1409 = Constraint(expr= m.b596 +", "- m.b643 <= 0) m.c1061 = Constraint(expr= m.b644 - m.b645", "Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x521 == 0) m.c705 = Constraint(expr= m.x174 - m.x519", "1.04900943706034*m.b648 <= 0) m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <=", "m.x415 == 0) m.c617 = Constraint(expr= m.x467 == 0) m.c618", "<= 0) m.c1064 = Constraint(expr= m.b647 - m.b648 <= 0)", "== 0) m.c876 = Constraint(expr= m.x588 == 0) m.c877 =", "Constraint(expr= m.x68 - m.x80 - m.x83 == 0) m.c21 =", "m.c551 = Constraint(expr= m.x446 - 9*m.b644 <= 0) m.c552 =", "m.b740 + m.b742 <= 1) m.c1213 = Constraint(expr= m.b740 +", "- 4.45628648004517*m.b601 <= 0) m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599", "+ m.b700 <= 1) m.c1131 = Constraint(expr= m.b698 + m.b700", "m.x541 == 0) m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36 =", "m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0) m.c423 =", "Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0) m.c579 = Constraint(expr= m.x453", "Constraint(expr= m.x124 - m.x442 - m.x445 == 0) m.c518 =", "= Constraint(expr= - m.b635 + m.b636 - m.b726 <= 0)", "m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186) m.c432 = Constraint(expr= m.x426 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 =", "m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645", "= Constraint(expr= m.x273 == 0) m.c142 = Constraint(expr= m.x274 ==", "- 13.5*m.b620 <= 0) m.c306 = Constraint(expr= m.x345 - 13.5*m.b621", "= Constraint(expr= m.x595 + 9*m.b685 <= 9) m.c923 = Constraint(expr=", "= Constraint(expr= m.b743 + m.b745 <= 1) m.c1219 = Constraint(expr=", "= Constraint(expr= 4*m.b767 + m.x857 == 0) m.c1005 = Constraint(expr=", "m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458", "m.c289 = Constraint(expr= m.x301 == 0) m.c290 = Constraint(expr= m.x347", "m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0) m.c361 =", "- m.x183 - m.x186 == 0) m.c49 = Constraint(expr= m.x175", "== 0) m.c537 = Constraint(expr= m.x450 == 0) m.c538 =", "0.842233385663186*m.b632 <= 0) m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <=", "- 4.45628648004517*m.b607 <= 0) m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605", "+ m.b597 - m.b687 <= 0) m.c1285 = Constraint(expr= -", "m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 +", "m.x117 - m.x429 - m.x432 == 0) m.c448 = Constraint(expr=", "Constraint(expr= m.x187 - m.x550 - m.x553 == 0) m.c827 =", "+ m.b697 <= 1) m.c1127 = Constraint(expr= m.b698 + m.b699", "Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 0) m.c1061 = Constraint(expr= m.b644 - m.b645 <= 0)", "= Constraint(expr= m.b665 - m.b667 <= 0) m.c1084 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 =", "Constraint(expr= 3*m.b716 + m.x806 == 0) m.c954 = Constraint(expr= 9*m.b717", "m.b684 <= 0) m.c1101 = Constraint(expr= m.b683 - m.b685 <=", "= Constraint(expr= m.x219 - 40*m.b600 <= 0) m.c97 = Constraint(expr=", "== 0) m.c757 = Constraint(expr= m.x517 == 0) m.c758 =", "m.c1099 = Constraint(expr= m.b681 - m.b682 <= 0) m.c1100 =", "m.c1003 = Constraint(expr= 9*m.b766 + m.x856 == 0) m.c1004 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 =", "2.30162356062425) m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425) m.c503", "<= 0) m.c1335 = Constraint(expr= - m.b647 + m.b648 -", "m.b755 <= 0) m.c1353 = Constraint(expr= - m.b665 + m.b666", "== 0) m.c882 = Constraint(expr= m.x207 - m.x585 - m.x588", "= Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001", "m.x4 - m.x7 - m.x10 == 0) m.c5 = Constraint(expr=", "<= 0) m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 +", "m.x114 - m.x423 - m.x426 == 0) m.c421 = Constraint(expr=", "0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001", "m.x33 == 0) m.c13 = Constraint(expr= m.x25 - m.x28 -", "== 0) m.c36 = Constraint(expr= m.x138 - m.x141 - m.x144", "<= 0) m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= - m.x506 + m.x536 == 0) m.c747 = Constraint(expr=", "== 0) m.c413 = Constraint(expr= m.x425 == 0) m.c414 =", "+ m.x832 == 0) m.c980 = Constraint(expr= m.b743 + m.x833", "Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x399 - 3.04984759446376*m.b648 <= 0) m.c574 = Constraint(expr= m.x400 -", "= Constraint(expr= m.b598 + m.b601 - m.b604 >= 0) m.c1406", "= Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425) m.c502 = Constraint(expr=", "<= 3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 +", "- 1.26558121681553*m.b617 <= 0) m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618", "Constraint(expr= - 0.9*m.x298 + m.x346 == 0) m.c287 = Constraint(expr=", "<= 3.04984759446376) m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 =", "- 1.26558121681553*m.b618 <= 0) m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619", "m.b749 <= 0) m.c1347 = Constraint(expr= - m.b659 + m.b660", "m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591 == 0) m.c898", "m.b623 >= 0) m.c1425 = Constraint(expr= m.b609 - m.b624 >=", "Constraint(expr= 2*m.b750 + m.x840 == 0) m.c988 = Constraint(expr= 9*m.b751", "== 0) m.c677 = Constraint(expr= m.x149 - m.x488 - m.x491", "m.b664 + m.b673 + m.b676 >= 0) m.c1463 = Constraint(expr=", "= Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0) m.c741 = Constraint(expr=", "m.x516 == 0) m.c757 = Constraint(expr= m.x517 == 0) m.c758", "- m.x322 == 0) m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611", "m.c1275 = Constraint(expr= m.b770 + m.b772 <= 1) m.c1276 =", "= Constraint(expr= m.x463 == 0) m.c593 = Constraint(expr= m.x104 -", "Constraint(expr= 8*m.b762 + m.x852 == 0) m.c1000 = Constraint(expr= 7*m.b763", "m.c760 = Constraint(expr= m.x541 == 0) m.c761 = Constraint(expr= m.x167", "= Constraint(expr= m.x289 == 0) m.c209 = Constraint(expr= m.x320 ==", "+ 1.04900943706034*m.b647 <= 1.04900943706034) m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648", "0) m.c756 = Constraint(expr= m.x516 == 0) m.c757 = Constraint(expr=", "m.c625 = Constraint(expr= m.x136 - m.x466 - m.x469 == 0)", "m.c1432 = Constraint(expr= m.b613 - m.b631 >= 0) m.c1433 =", "m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917) m.c840 = Constraint(expr= m.x576 +", "m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496", "m.x477 - m.x480 == 0) m.c676 = Constraint(expr= m.x145 -", "+ m.b735 <= 1) m.c1202 = Constraint(expr= m.b735 + m.b736", "m.x539 == 0) m.c759 = Constraint(expr= m.x540 == 0) m.c760", "0) m.c1032 = Constraint(expr= m.b614 - m.b616 <= 0) m.c1033", "Constraint(expr= m.b708 + m.b709 <= 1) m.c1151 = Constraint(expr= m.b710", "m.b607 - m.b619 >= 0) m.c1421 = Constraint(expr= m.b608 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x57 - m.x60 == 0) m.c19 = Constraint(expr= m.x46", "m.x512 - m.x515 == 0) m.c765 = Constraint(expr= m.x171 -", "+ m.b691 <= 1) m.c1115 = Constraint(expr= m.b692 + m.b693", "Constraint(expr= m.x320 == 0) m.c210 = Constraint(expr= m.x321 == 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633 =", "m.c403 = Constraint(expr= m.x418 - 20*m.b631 <= 0) m.c404 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 =", "<= 1) m.c1140 = Constraint(expr= m.b704 + m.b706 <= 1)", "0) m.c194 = Constraint(expr= m.x257 + 30*m.b608 <= 30) m.c195", "m.b633 - m.b634 <= 0) m.c1052 = Constraint(expr= m.b635 -", "m.b657 + m.b660 >= 0) m.c1459 = Constraint(expr= - m.b655", "m.x64 - m.x316 - m.x322 == 0) m.c218 = Constraint(expr=", "Constraint(expr= m.x450 == 0) m.c538 = Constraint(expr= m.x451 == 0)", "0) m.c728 = Constraint(expr= m.x164 - m.x500 - m.x503 ==", "+ 0.705049913072943*m.b675 <= 0.705049913072943) m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676", "m.x131 - m.x458 - m.x461 == 0) m.c597 = Constraint(expr=", "= Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92 =", "<= 0.480234946352917) m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917)", "m.x576 == 0) m.c823 = Constraint(expr= m.x577 == 0) m.c824", "4.45628648004517*m.b607 <= 0) m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <=", "m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 =", "+ 3.04984759446376*m.b653 <= 3.04984759446376) m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654", "= Constraint(expr= m.x234 == 0) m.c88 = Constraint(expr= m.x235 ==", "m.c878 = Constraint(expr= m.x188 - m.x554 - m.x557 == 0)", "m.c402 = Constraint(expr= m.x417 - 20*m.b630 <= 0) m.c403 =", "m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 =", "m.x306 == 0) m.c322 = Constraint(expr= m.x58 - m.x304 -", "0) m.c650 = Constraint(expr= m.x146 - m.x482 - m.x485 ==", "13.5*m.b681 <= 0) m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <=", "2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753 - 4*m.b754 -", "m.c196 = Constraint(expr= m.x259 + 30*m.b610 <= 30) m.c197 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 =", "- m.b635 >= 0) m.c1437 = Constraint(expr= m.b618 - m.b636", "m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616", "m.c964 = Constraint(expr= 3*m.b727 + m.x817 == 0) m.c965 =", "m.x328 - m.x334 == 0) m.c245 = Constraint(expr= m.x290 -", "Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0) m.c772 = Constraint(expr= m.x508", "0.940066550763924) m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924) m.c740", "Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388) m.c223 = Constraint(expr= m.x289", "= Constraint(expr= m.b774 + m.b775 <= 1) m.c1283 = Constraint(expr=", "m.c563 = Constraint(expr= m.x455 == 0) m.c564 = Constraint(expr= m.x456", "Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0) m.c458 = Constraint(expr= m.x431", "m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 =", "m.b699 + m.b700 <= 1) m.c1133 = Constraint(expr= m.b701 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= 6*m.b739 + m.x829 == 0) m.c977 = Constraint(expr= 2*m.b740", "<= 1) m.c1152 = Constraint(expr= m.b710 + m.b712 <= 1)", "- m.b752 <= 0) m.c1350 = Constraint(expr= - m.b662 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c325 = Constraint(expr= m.x76 - m.x352 - m.x355 ==", "0.75*m.x238 + m.x262 == 0) m.c110 = Constraint(expr= m.x239 ==", "Constraint(expr= m.x530 == 0) m.c726 = Constraint(expr= m.x531 == 0)", "m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33", "m.c264 = Constraint(expr= m.x342 == 0) m.c265 = Constraint(expr= m.x343", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630 =", "m.x361 == 0) m.c353 = Constraint(expr= m.x308 - 15*m.b626 <=", "<= 0.705049913072943) m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943)", "= Constraint(expr= 9*m.b696 + m.x786 == 0) m.c934 = Constraint(expr=", "m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739", "m.c1086 = Constraint(expr= m.b668 - m.b670 <= 0) m.c1087 =", "<= 1) m.c1213 = Constraint(expr= m.b740 + m.b741 <= 1)", "Constraint(expr= m.x535 == 0) m.c848 = Constraint(expr= m.x581 == 0)", "+ 0.999*m.b606)))*(0.001 + 0.999* m.b606) <= 0) m.c136 = Constraint(expr=(m.x268/(0.001", "3.04984759446376*m.b649 <= 0) m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <=", "= Constraint(expr= m.x210 - m.x591 - m.x594 == 0) m.c910", "+ m.b715 <= 1) m.c1163 = Constraint(expr= m.b716 + m.b717", "1) m.c1241 = Constraint(expr= m.b755 + m.b756 <= 1) m.c1242", "- m.x323 == 0) m.c381 = Constraint(expr= m.x63 - m.x318", "Constraint(expr= m.x337 == 0) m.c413 = Constraint(expr= m.x425 == 0)", "== 0) m.c291 = Constraint(expr= m.x348 == 0) m.c292 =", "== 0) m.c316 = Constraint(expr= m.x307 == 0) m.c317 =", "= Constraint(expr= m.x142 - m.x472 - m.x475 == 0) m.c650", "1.18887736200171*m.b661 <= 1.18887736200171) m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679 =", "= Constraint(expr= - m.b602 + m.b611 + m.b614 >= 0)", "m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797", "- m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35", "+ 0.999* m.b636) <= 0) m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637)", "= Constraint(expr= m.x221 == 0) m.c84 = Constraint(expr= m.x222 ==", "+ 9*m.b685 <= 9) m.c923 = Constraint(expr= 5*m.b686 + m.x776", "m.b706 <= 1) m.c1141 = Constraint(expr= m.b704 + m.b705 <=", "m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346 == 0) m.c287", "Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)", "9*m.b625 <= 0) m.c335 = Constraint(expr= m.x353 + 9*m.b623 <=", "m.x330 - 1.32154609891348*m.b633 <= 0) m.c424 = Constraint(expr= m.x331 -", "m.x556 - 15*m.b682 <= 0) m.c887 = Constraint(expr= m.x557 +", "m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416", "m.b661 >= 0) m.c1472 = Constraint(expr= m.b662 - m.b671 >=", "= Constraint(expr= 2*m.b755 + m.x845 == 0) m.c993 = Constraint(expr=", "Constraint(expr= m.x263 == 0) m.c114 = Constraint(expr= m.x264 == 0)", "<= 0) m.c1073 = Constraint(expr= m.b656 - m.b657 <= 0)", "<= 1) m.c1119 = Constraint(expr= m.b692 + m.b694 <= 1)", "1.83548069293539*m.b631 <= 0) m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <=", "<= 1) m.c1158 = Constraint(expr= m.b713 + m.b715 <= 1)", "m.x470 - 1.18887736200171*m.b656 <= 0) m.c654 = Constraint(expr= m.x471 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= 4*m.b694 + m.x784 == 0) m.c932 = Constraint(expr=", "m.x326 - m.x332 == 0) m.c243 = Constraint(expr= m.x66 -", "m.c1164 = Constraint(expr= m.b716 + m.b718 <= 1) m.c1165 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 =", "0) m.c673 = Constraint(expr= m.x493 == 0) m.c674 = Constraint(expr=", "3.34221486003388*m.b615 <= 0) m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <=", "== 0) m.c514 = Constraint(expr= m.x97 - m.x388 - m.x391", "3*m.b716 + m.x806 == 0) m.c954 = Constraint(expr= 9*m.b717 +", "<= 0.994083415506506) m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506)", "- m.b605 + m.b606 - m.b696 <= 0) m.c1294 =", "Constraint(expr= m.x178 - m.x529 - m.x535 == 0) m.c854 =", "m.c1329 = Constraint(expr= - m.b641 + m.b642 - m.b732 <=", "m.c323 = Constraint(expr= m.x74 - m.x350 - m.x353 == 0)", "= Constraint(expr= - m.b596 - m.b597 + m.b598 - m.b688", "m.b720 <= 1) m.c1172 = Constraint(expr= m.b720 + m.b721 <=", "0) m.c920 = Constraint(expr= m.x593 + 9*m.b683 <= 9) m.c921", "Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= 2*m.b731 + m.x821 == 0) m.c969 = Constraint(expr=", "m.x831 == 0) m.c979 = Constraint(expr= 4*m.b742 + m.x832 ==", "Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x361 == 0) m.c347 = Constraint(expr= m.x59 - m.x308", "= Constraint(expr= - 0.5*m.x514 + m.x538 == 0) m.c752 =", "m.x534 == 0) m.c853 = Constraint(expr= m.x178 - m.x529 -", "m.c1109 = Constraint(expr= m.b689 + m.b690 <= 1) m.c1110 =", "<= 0) m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0)", "0) m.c482 = Constraint(expr= m.x119 - m.x434 - m.x437 ==", "1.26558121681553) m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553) m.c455", "Constraint(expr= m.x389 == 0) m.c507 = Constraint(expr= m.x390 == 0)", "= Constraint(expr= m.x537 - 15*m.b669 <= 0) m.c784 = Constraint(expr=", "m.b725 + m.b726 <= 1) m.c1182 = Constraint(expr= m.b725 +", "m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381", "== 0) m.c700 = Constraint(expr= m.x523 == 0) m.c701 =", "<= 3.34221486003388) m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388)", "<= 0) m.c335 = Constraint(expr= m.x353 + 9*m.b623 <= 9)", "+ m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999* m.b652) <= 0) m.c587", "m.b772 <= 1) m.c1276 = Constraint(expr= m.b771 + m.b772 <=", "- m.b671 + m.b672 - m.b762 <= 0) m.c1360 =", "Constraint(expr= 9*m.b751 + m.x841 == 0) m.c989 = Constraint(expr= 5*m.b752", "Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376) m.c364 = Constraint(expr= m.x361", "m.x18 - m.x21 - m.x24 == 0) m.c10 = Constraint(expr=", "m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252", "Constraint(expr= m.x161 - m.x494 - m.x497 == 0) m.c702 =", "0.940066550763924*m.b670 <= 0.940066550763924) m.c776 = Constraint(expr= m.x512 - 30*m.b668 <=", "m.x517 == 0) m.c758 = Constraint(expr= m.x539 == 0) m.c759", "Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 =", "= Constraint(expr= m.x535 == 0) m.c848 = Constraint(expr= m.x581 ==", "- m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727", "+ m.b693 <= 1) m.c1118 = Constraint(expr= m.b693 + m.b694", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 =", "m.x282 == 0) m.c175 = Constraint(expr= m.x283 == 0) m.c176", "3.04984759446376*m.b647 <= 0) m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <=", "0) m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034) m.c582", "m.c982 = Constraint(expr= m.b745 + m.x835 == 0) m.c983 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 =", "= Constraint(expr= m.b597 - m.b598 <= 0) m.c1016 = Constraint(expr=", "0.994083415506506) m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506) m.c862", "= Constraint(expr= m.b671 - m.b672 <= 0) m.c1089 = Constraint(expr=", "m.b616 >= 0) m.c1418 = Constraint(expr= m.b605 - m.b617 >=", "= Constraint(expr= m.x503 == 0) m.c723 = Constraint(expr= m.x504 ==", "1.32154609891348*m.b616 <= 0) m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <=", "0.999*m.b598)))*(0.001 + 0.999*m.b598) <= 0) m.c56 = Constraint(expr= m.x215 ==", "m.x816 == 0) m.c964 = Constraint(expr= 3*m.b727 + m.x817 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999*m.b657) <= 0) m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 =", "+ m.b721 <= 1) m.c1171 = Constraint(expr= m.b719 + m.b720", "- m.b605 + m.b617 >= 0) m.c1386 = Constraint(expr= -", "Constraint(expr= - m.x72 - m.x90 + m.x93 == 0) m.c25", "+ 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999*", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 =", "m.x193 - m.x196 == 0) m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596)", "m.b613) <= 0) m.c206 = Constraint(expr= m.x287 == 0) m.c207", "- 15*m.b608 <= 0) m.c198 = Constraint(expr= m.x279 - 15*m.b609", "<= 0) m.c1354 = Constraint(expr= - m.b665 - m.b666 +", "m.c333 = Constraint(expr= m.x351 - 9*m.b624 <= 0) m.c334 =", "Constraint(expr= m.b687 + m.b688 <= 1) m.c1109 = Constraint(expr= m.b689", "Constraint(expr= - m.b619 + m.b637 + m.b640 >= 0) m.c1391", "- 1.32154609891348*m.b614 <= 0) m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615", "+ m.b754 <= 1) m.c1239 = Constraint(expr= m.b752 + m.b754", "m.x169 == 0) m.c47 = Constraint(expr= m.x173 - m.x182 -", "= Constraint(expr= m.x300 + 15*m.b621 <= 15) m.c304 = Constraint(expr=", "Constraint(expr= m.b719 + m.b720 <= 1) m.c1170 = Constraint(expr= m.b719", "m.x507 + m.x537 == 0) m.c748 = Constraint(expr= - m.x508", "m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176", "m.x400 - m.x403 == 0) m.c569 = Constraint(expr= m.x128 -", "m.x98 == 0) m.c27 = Constraint(expr= m.x75 - m.x96 -", "33.5) m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5) m.c496", "0) m.c508 = Constraint(expr= m.x391 == 0) m.c509 = Constraint(expr=", "Constraint(expr= m.x203 - m.x578 - m.x581 == 0) m.c855 =", "m.c1187 = Constraint(expr= m.b728 + m.b729 <= 1) m.c1188 =", "0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999* m.b613)", "m.c1058 = Constraint(expr= m.b641 - m.b642 <= 0) m.c1059 =", "= Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517) m.c107 = Constraint(expr=", "m.c1318 = Constraint(expr= - m.b629 - m.b630 + m.b631 -", "0.6*m.x562 + m.x592 == 0) m.c899 = Constraint(expr= m.x563 ==", "m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924) m.c740 =", "Constraint(expr= m.x60 - m.x309 - m.x312 == 0) m.c349 =", "- m.b659 + m.b660 - m.b750 <= 0) m.c1348 =", "<= 0) m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0)", "= Constraint(expr= - m.x249 + m.x279 == 0) m.c163 =", "m.c1474 = Constraint(expr= m.b664 - m.b673 >= 0) m.c1475 =", "= Constraint(expr= m.x233 == 0) m.c87 = Constraint(expr= m.x234 ==", "m.b640 >= 0) m.c1391 = Constraint(expr= - m.b608 + m.b620", "m.b700 <= 1) m.c1131 = Constraint(expr= m.b698 + m.b700 <=", "m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520 == 0) m.c695", "m.c1351 = Constraint(expr= - m.b662 - m.b663 + m.b664 -", "0) m.c235 = Constraint(expr= m.x295 == 0) m.c236 = Constraint(expr=", "0) m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431) m.c78", "+ 1.18887736200171*m.b654 <= 1.18887736200171) m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655", "m.c41 = Constraint(expr= m.x152 - m.x155 - m.x158 == 0)", "+ 0.999*m.b596) <= 0) m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) -", "= Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376) m.c603 = Constraint(expr=", "0) m.c875 = Constraint(expr= m.x587 == 0) m.c876 = Constraint(expr=", "1) m.c1171 = Constraint(expr= m.b719 + m.b720 <= 1) m.c1172", "Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c375 = Constraint(expr= m.x378 == 0) m.c376 = Constraint(expr=", "0) m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0) m.c130", "== 0) m.c937 = Constraint(expr= 6*m.b700 + m.x790 == 0)", "0) m.c797 = Constraint(expr= m.x182 - m.x542 - m.x545 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <=", "m.b699 <= 0) m.c1297 = Constraint(expr= - m.b608 - m.b609", "- m.b599 - m.b600 + m.b601 - m.b691 <= 0)", "1) m.c1246 = Constraint(expr= m.b756 + m.b757 <= 1) m.c1247", "<= 2.30162356062425) m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425)", "Constraint(expr= m.x41 - m.x266 - m.x272 == 0) m.c147 =", "0 # # Variable counts # x b i s1s", "m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679) <= 0) m.c845 = Constraint(expr=", "m.c380 = Constraint(expr= m.x62 - m.x317 - m.x323 == 0)", "- m.b720 <= 0) m.c1318 = Constraint(expr= - m.b629 -", "m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376) m.c365 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c986 = Constraint(expr= 9*m.b749 + m.x839 == 0) m.c987 =", "- 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999* m.b666) <=", "m.x362 - m.x365 == 0) m.c444 = Constraint(expr= m.x81 -", "m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314", "Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0) m.c662 = Constraint(expr= m.x485", "Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0) m.c450 = Constraint(expr=", "0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999* m.b671)", "9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727 - 4*m.b728 -", "+ 1.83548069293539*m.b630 <= 1.83548069293539) m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631", "m.x584 - m.x587 == 0) m.c882 = Constraint(expr= m.x207 -", "0) m.c899 = Constraint(expr= m.x563 == 0) m.c900 = Constraint(expr=", "m.x278 == 0) m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279", "Constraint(expr= m.x385 == 0) m.c473 = Constraint(expr= m.x437 == 0)", "0.999* m.b607) <= 0) m.c137 = Constraint(expr= m.x245 == 0)", "m.b628 >= 0) m.c1394 = Constraint(expr= - m.b620 + m.b638", "0) m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0) m.c124", "+ m.b622 + m.b625 + m.b628 >= 0) m.c1394 =", "m.b673 - m.b763 <= 0) m.c1361 = Constraint(expr= m.b674 -", "- 7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694", "Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)", "1.18887736200171) m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171) m.c686", "m.b730 <= 1) m.c1189 = Constraint(expr= m.b728 + m.b729 <=", "<= 0) m.c1055 = Constraint(expr= m.b638 - m.b639 <= 0)", "1.26558121681553) m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553) m.c491", "Constraint(expr= m.x279 - 15*m.b609 <= 0) m.c199 = Constraint(expr= m.x280", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 =", "3193 180 0 # # Reformulation has removed 1 variable", "Constraint(expr= m.x222 + 40*m.b600 <= 40) m.c100 = Constraint(expr= m.x223", "0) m.c404 = Constraint(expr= m.x419 + 20*m.b629 <= 20) m.c405", "m.x389 + 9*m.b641 <= 9) m.c522 = Constraint(expr= m.x390 +", "Constraint(expr= m.x462 == 0) m.c592 = Constraint(expr= m.x463 == 0)", "m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103", "Constraint(expr= m.b606 - m.b618 >= 0) m.c1420 = Constraint(expr= m.b607", "= Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0) m.c709 = Constraint(expr=", "Constraint(expr= m.x69 - m.x81 - m.x84 == 0) m.c22 =", "Constraint(expr= m.b666 - m.b678 >= 0) m.c1480 = Constraint(expr= m.b667", "Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x40 - m.x49 - m.x52 == 0) m.c17 =", "+ m.b615 >= 0) m.c1378 = Constraint(expr= - m.b604 +", "m.b732 + m.b733 <= 1) m.c1197 = Constraint(expr= m.b731 +", "+ m.b676 - m.b766 <= 0) m.c1364 = Constraint(expr= m.b677", "= Constraint(expr= m.x87 - m.x375 - m.x378 == 0) m.c385", "m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001 +", "Constraint(expr= m.x220 - 40*m.b601 <= 0) m.c98 = Constraint(expr= m.x221", "= Constraint(expr= m.x517 + 30*m.b670 <= 30) m.c782 = Constraint(expr=", "m.c824 = Constraint(expr= m.x185 - m.x548 - m.x551 == 0)", "m.c1071 = Constraint(expr= m.b653 - m.b655 <= 0) m.c1072 =", "+ m.b603 - m.b693 <= 0) m.c1291 = Constraint(expr= -", "+ 0.999*m.b618) <= 0) m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) -", "= Constraint(expr= 7*m.b690 + m.x780 == 0) m.c928 = Constraint(expr=", "m.x147 - m.x150 + m.x153 == 0) m.c40 = Constraint(expr=", "== 0) m.c974 = Constraint(expr= 5*m.b737 + m.x827 == 0)", "0) m.c1454 = Constraint(expr= m.b626 - m.b653 >= 0) m.c1455", "0.9*m.x554 + m.x584 == 0) m.c870 = Constraint(expr= - 0.9*m.x555", "Constraint(expr= m.x547 == 0) m.c794 = Constraint(expr= m.x569 == 0)", "+ m.b677 >= 0) m.c1464 = Constraint(expr= - m.b666 +", "m.b655 >= 0) m.c1403 = Constraint(expr= m.b596 + m.b599 -", "+ 15*m.b626 <= 15) m.c357 = Constraint(expr= m.x312 + 15*m.b627", "m.c934 = Constraint(expr= 5*m.b697 + m.x787 == 0) m.c935 =", "Constraint(expr= m.b701 + m.b703 <= 1) m.c1138 = Constraint(expr= m.b702", "+ 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999*", "Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c468 = Constraint(expr= m.x372 == 0) m.c469 =", "m.c1131 = Constraint(expr= m.b698 + m.b700 <= 1) m.c1132 =", "4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717 -", "- m.x462 == 0) m.c598 = Constraint(expr= m.x133 - m.x460", "m.x561 - m.x564 == 0) m.c907 = Constraint(expr= m.x193 -", "Constraint(expr= m.b717 + m.b718 <= 1) m.c1167 = Constraint(expr= m.b716", "<= 1) m.c1198 = Constraint(expr= m.b732 + m.b733 <= 1)", "m.c1404 = Constraint(expr= m.b597 + m.b600 - m.b603 >= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c1287 = Constraint(expr= - m.b599 + m.b600 -", "== 0) m.c905 = Constraint(expr= m.x191 - m.x560 - m.x563", "m.x580 - 0.690184503917672*m.b679 <= 0) m.c866 = Constraint(expr= m.x581 +", "<= 0) m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517)", "+ 0.999*m.b654)))*(0.001 + 0.999* m.b654) <= 0) m.c613 = Constraint(expr=(m.x466/(0.001", "= Constraint(expr= - m.x375 + m.x417 == 0) m.c370 =", "m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671", "Constraint(expr= m.x7 - m.x214 - m.x217 == 0) m.c65 =", "Constraint(expr= - m.b650 - m.b651 + m.b652 - m.b742 <=", "m.c1430 = Constraint(expr= m.b611 - m.b629 >= 0) m.c1431 =", "= Constraint(expr= m.x299 + 15*m.b620 <= 15) m.c303 = Constraint(expr=", "== 0) m.c878 = Constraint(expr= m.x188 - m.x554 - m.x557", "3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726 -", "0) m.c1314 = Constraint(expr= - m.b626 + m.b627 - m.b717", "Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1141 = Constraint(expr= m.b704 + m.b705 <= 1) m.c1142", "m.b729 <= 1) m.c1188 = Constraint(expr= m.b728 + m.b730 <=", "0.705049913072943) m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943) m.c808", "m.b627 - m.b654 >= 0) m.c1456 = Constraint(expr= m.b628 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620 =", "- m.x214 - m.x217 == 0) m.c65 = Constraint(expr= m.x11", "Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268", "+ 0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598)", "Constraint(expr= m.x258 == 0) m.c172 = Constraint(expr= m.x259 == 0)", "<= 2.54515263975353) m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353)", "m.x789 == 0) m.c937 = Constraint(expr= 6*m.b700 + m.x790 ==", "== 0) m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278 ==", "m.c706 = Constraint(expr= m.x175 - m.x520 - m.x523 == 0)", "- m.x308 - m.x311 == 0) m.c348 = Constraint(expr= m.x60", "= Constraint(expr= m.x511 == 0) m.c755 = Constraint(expr= m.x515 ==", "Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x393 - 9*m.b645 <= 0) m.c547 = Constraint(expr= m.x394", "m.c1015 = Constraint(expr= m.b597 - m.b598 <= 0) m.c1016 =", "Constraint(expr= - 0.5*m.x254 + m.x278 == 0) m.c165 = Constraint(expr=", "m.x259 + 30*m.b610 <= 30) m.c197 = Constraint(expr= m.x278 -", "+ 0.999* m.b649) <= 0) m.c560 = Constraint(expr= m.x401 ==", "+ m.x440 == 0) m.c504 = Constraint(expr= - m.x387 +", "m.c1216 = Constraint(expr= m.b741 + m.b742 <= 1) m.c1217 =", "0) m.c1438 = Constraint(expr= m.b619 - m.b637 >= 0) m.c1439", "+ 280*m.x203 + 400*m.x204 + 430*m.x205 + 290*m.x206 + 300*m.x207", "+ m.b691 <= 1) m.c1111 = Constraint(expr= m.b689 + m.b690", "m.x236 - 4.45628648004517*m.b602 <= 0) m.c123 = Constraint(expr= m.x237 -", "1.32154609891348*m.b634 <= 0) m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <=", "m.x397 == 0) m.c536 = Constraint(expr= m.x449 == 0) m.c537", "= Constraint(expr= m.x108 - m.x411 - m.x414 == 0) m.c622", "m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707", "m.x836 == 0) m.c984 = Constraint(expr= 5*m.b747 + m.x837 ==", "m.x226 - 3.71357206670431*m.b598 <= 0) m.c77 = Constraint(expr= m.x227 +", "0) m.c1393 = Constraint(expr= - m.b610 + m.b622 + m.b625", "m.x468 == 0) m.c625 = Constraint(expr= m.x136 - m.x466 -", "Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348) m.c428 = Constraint(expr= m.x422", "m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486", "- m.b625 <= 0) m.c1043 = Constraint(expr= m.b626 - m.b627", "571 111 804 0 0 0 0 # # Variable", "m.x58 - m.x61 == 0) m.c20 = Constraint(expr= m.x68 -", "3*m.b727 + m.x817 == 0) m.c965 = Constraint(expr= 4*m.b728 +", "Constraint(expr= m.b762 + m.b763 <= 1) m.c1257 = Constraint(expr= m.b761", "m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95", "m.c1301 = Constraint(expr= m.b614 - m.b704 <= 0) m.c1302 =", "0) m.c1303 = Constraint(expr= - m.b614 - m.b615 + m.b616", "+ 0.999*m.b618)))*(0.001 + 0.999*m.b618) <= 0) m.c259 = Constraint(expr=(m.x340/(0.001 +", "m.x385 == 0) m.c473 = Constraint(expr= m.x437 == 0) m.c474", "- 3.04984759446376*m.b649 <= 0) m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647", "Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5) m.c309 = Constraint(expr=", "m.x322 == 0) m.c212 = Constraint(expr= m.x47 - m.x284 -", "m.c946 = Constraint(expr= 7*m.b709 + m.x799 == 0) m.c947 =", "m.c24 = Constraint(expr= - m.x72 - m.x90 + m.x93 ==", "Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0) m.c832 = Constraint(expr= m.x550", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 =", "m.c592 = Constraint(expr= m.x463 == 0) m.c593 = Constraint(expr= m.x104", "= Constraint(expr= m.x197 - m.x566 - m.x569 == 0) m.c801", "m.b663 - m.b664 <= 0) m.c1082 = Constraint(expr= m.b665 -", "804 0 0 0 0 # # Variable counts #", "= Constraint(expr= 10*m.b758 + m.x848 == 0) m.c996 = Constraint(expr=", "1) m.c1260 = Constraint(expr= m.b764 + m.b766 <= 1) m.c1261", "m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301", "+ m.b709 <= 1) m.c1149 = Constraint(expr= m.b707 + m.b709", "- m.x511 == 0) m.c764 = Constraint(expr= m.x170 - m.x512", "Constraint(expr= m.b734 + m.b736 <= 1) m.c1204 = Constraint(expr= m.b735", "290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204 +", "<= 15) m.c917 = Constraint(expr= m.x590 - 9*m.b683 <= 0)", "<= 0) m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 +", "0) m.c1459 = Constraint(expr= - m.b655 + m.b658 + m.b661", "m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672) m.c869 = Constraint(expr= - 0.9*m.x554", "m.b742 <= 1) m.c1213 = Constraint(expr= m.b740 + m.b741 <=", "Constraint(expr= 4*m.b703 + m.x793 == 0) m.c941 = Constraint(expr= 4*m.b704", "Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)", "4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732 -", "m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0) m.c247 =", "- 9*m.b693 - 4*m.b694 - 10*m.b695 - 9*m.b696 - 5*m.b697", "m.c99 = Constraint(expr= m.x222 + 40*m.b600 <= 40) m.c100 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539) m.c394 = Constraint(expr=", "m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634", "+ 0.999*m.b657)))*(0.001 + 0.999*m.b657) <= 0) m.c640 = Constraint(expr=(m.x484/(0.001 +", "<= 0) m.c1051 = Constraint(expr= m.b633 - m.b634 <= 0)", "0) m.c38 = Constraint(expr= - m.x146 - m.x149 + m.x152", "0) m.c913 = Constraint(expr= m.x562 - 15*m.b685 <= 0) m.c914", "1) m.c1234 = Constraint(expr= m.b750 + m.b751 <= 1) m.c1235", "Constraint(expr= m.x280 - 15*m.b610 <= 0) m.c200 = Constraint(expr= m.x281", "m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505", "+ m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999* m.b599) <= 0) m.c81", "Constraint(expr= m.x158 - m.x161 - m.x164 - m.x167 == 0)", "- m.x425 == 0) m.c420 = Constraint(expr= m.x114 - m.x423", "+ 80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198", "Constraint(expr= m.b609 - m.b627 >= 0) m.c1429 = Constraint(expr= m.b610", "Constraint(expr= m.x145 - m.x478 - m.x481 == 0) m.c677 =", "m.x3 - m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22 -", ">= 0) m.c1422 = Constraint(expr= m.b609 - m.b621 >= 0)", "<= 0) m.c1322 = Constraint(expr= m.b635 - m.b725 <= 0)", "m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431) m.c78 =", "0.480234946352917) m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917) m.c841", "Constraint(expr= m.b632 - m.b722 <= 0) m.c1320 = Constraint(expr= -", "m.b622 - m.b712 <= 0) m.c1310 = Constraint(expr= m.b623 -", "<= 0) m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539)", "+ 0.999* m.b673) <= 0) m.c791 = Constraint(expr= m.x545 ==", "+ m.x824 == 0) m.c972 = Constraint(expr= 4*m.b735 + m.x825", "0 0 0 0 # # Nonzero counts # Total", "= Constraint(expr= m.b699 + m.b700 <= 1) m.c1131 = Constraint(expr=", "= Constraint(expr= m.b597 + m.b600 - m.b609 >= 0) m.c1411", "0) m.c1293 = Constraint(expr= - m.b605 + m.b606 - m.b696", "<= 0) m.c1369 = Constraint(expr= - m.b680 - m.b681 +", "+ m.x833 == 0) m.c981 = Constraint(expr= 4*m.b744 + m.x834", "m.c1296 = Constraint(expr= - m.b608 + m.b609 - m.b699 <=", "B # 1486 571 111 804 0 0 0 0", "- m.x403 == 0) m.c569 = Constraint(expr= m.x128 - m.x452", ">= 0) m.c1421 = Constraint(expr= m.b608 - m.b620 >= 0)", "= Constraint(expr= m.b614 - m.b632 >= 0) m.c1434 = Constraint(expr=", "0.940066550763924) m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518 == 0)", "m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388) m.c133 = Constraint(expr= m.x265 +", "1) m.c1104 = Constraint(expr= m.b686 + m.b688 <= 1) m.c1105", "m.b757 <= 1) m.c1246 = Constraint(expr= m.b756 + m.b757 <=", "m.c174 = Constraint(expr= m.x282 == 0) m.c175 = Constraint(expr= m.x283", "= Constraint(expr= 4*m.b742 + m.x832 == 0) m.c980 = Constraint(expr=", "Constraint(expr= 2*m.b748 + m.x838 == 0) m.c986 = Constraint(expr= 9*m.b749", "0.940066550763924*m.b665 <= 0) m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <=", "m.c1205 = Constraint(expr= m.b737 + m.b738 <= 1) m.c1206 =", "0) m.c1468 = Constraint(expr= m.b655 - m.b658 >= 0) m.c1469", "Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0) m.c656 = Constraint(expr= m.x473", "Constraint(expr= m.x353 == 0) m.c318 = Constraint(expr= m.x354 == 0)", "m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693", "m.c329 = Constraint(expr= m.x305 + 15*m.b623 <= 15) m.c330 =", "m.b723 + m.b724 <= 1) m.c1181 = Constraint(expr= m.b725 +", "- m.b753 <= 0) m.c1351 = Constraint(expr= - m.b662 -", "= Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001", "- 1.18887736200171*m.b659 <= 0) m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660", "+ m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999* m.b601) <= 0) m.c83", "7*m.b763 + m.x853 == 0) m.c1001 = Constraint(expr= 7*m.b764 +", "0) m.c211 = Constraint(expr= m.x322 == 0) m.c212 = Constraint(expr=", "= Constraint(expr= m.x451 == 0) m.c539 = Constraint(expr= m.x98 -", "- m.x522 == 0) m.c706 = Constraint(expr= m.x175 - m.x520", "m.b717 <= 1) m.c1164 = Constraint(expr= m.b716 + m.b718 <=", "m.x312 == 0) m.c343 = Constraint(expr= m.x313 == 0) m.c344", "m.c536 = Constraint(expr= m.x449 == 0) m.c537 = Constraint(expr= m.x450", "<= 1) m.c1116 = Constraint(expr= m.b692 + m.b694 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 =", "m.c65 = Constraint(expr= m.x11 - m.x224 - m.x227 == 0)", "Constraint(expr= m.x416 - 20*m.b629 <= 0) m.c402 = Constraint(expr= m.x417", "+ 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c465 = Constraint(expr=(m.x435/(0.001", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 =", "- m.x494 - m.x497 == 0) m.c702 = Constraint(expr= m.x162", "- m.b602 + m.b603 - m.b693 <= 0) m.c1291 =", "m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434", "3*m.b716 - 9*m.b717 - 3*m.b718 - 7*m.b719 - 2*m.b720 -", "9*m.b641 <= 0) m.c525 = Constraint(expr= m.x441 - 9*m.b642 <=", "- m.x552 == 0) m.c826 = Constraint(expr= m.x187 - m.x550", "m.x240 == 0) m.c112 = Constraint(expr= m.x241 == 0) m.c113", "m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728", "N X C B # 1486 571 111 804 0", "m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402", "0.480234946352917*m.b676 <= 0) m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <=", "m.x590 == 0) m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591", "Constraint(expr= m.b707 + m.b708 <= 1) m.c1148 = Constraint(expr= m.b708", "m.x520 == 0) m.c695 = Constraint(expr= m.x497 == 0) m.c696", "+ m.x782 == 0) m.c930 = Constraint(expr= 9*m.b693 + m.x783", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x100 == 0) m.c29 = Constraint(expr= m.x77 - m.x101", "= Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5) m.c310 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 =", "0) m.c143 = Constraint(expr= m.x29 - m.x242 - m.x245 ==", "m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 =", "- m.x27 - m.x30 - m.x33 == 0) m.c13 =", "m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0) m.c736 =", "m.x496 - m.x499 == 0) m.c704 = Constraint(expr= m.x173 -", "1) m.c1191 = Constraint(expr= m.b728 + m.b730 <= 1) m.c1192", "- m.x171 - m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196", "Constraint(expr= - m.x11 - m.x14 + m.x17 == 0) m.c6", "- m.x361 == 0) m.c353 = Constraint(expr= m.x308 - 15*m.b626", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 =", "<= 1) m.c1172 = Constraint(expr= m.b720 + m.b721 <= 1)", "m.c1314 = Constraint(expr= - m.b626 + m.b627 - m.b717 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999*m.b666)))*(0.001 + 0.999* m.b666) <= 0) m.c721 = Constraint(expr=(m.x526/(0.001 +", "== 0) m.c767 = Constraint(expr= m.x179 - m.x536 - m.x539", "Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001 +", "0.705049913072943*m.b674 <= 0.705049913072943) m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <=", "2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716 -", "+ 4.45628648004517*m.b601 <= 4.45628648004517) m.c107 = Constraint(expr= - 0.75*m.x236 +", "<= 0) m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539)", "- m.x15 + m.x18 == 0) m.c7 = Constraint(expr= -", "0) m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346 == 0)", ">= 0) m.c1380 = Constraint(expr= - m.b612 + m.b630 >=", "0) m.c113 = Constraint(expr= m.x263 == 0) m.c114 = Constraint(expr=", "0) m.c335 = Constraint(expr= m.x353 + 9*m.b623 <= 9) m.c336", "m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104", "= Constraint(expr= m.x446 - 9*m.b644 <= 0) m.c552 = Constraint(expr=", "== 0) m.c216 = Constraint(expr= m.x63 - m.x315 - m.x321", "m.x287 == 0) m.c213 = Constraint(expr= m.x48 - m.x285 -", "m.b643 + m.b646 >= 0) m.c1400 = Constraint(expr= - m.b626", "= Constraint(expr= m.x11 - m.x224 - m.x227 == 0) m.c66", "- m.x292 - m.x295 == 0) m.c242 = Constraint(expr= m.x65", "m.x371 == 0) m.c468 = Constraint(expr= m.x372 == 0) m.c469", "m.x171 - m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 =", "m.x441 - m.x444 == 0) m.c517 = Constraint(expr= m.x124 -", "m.x190 - m.x556 - m.x559 == 0) m.c881 = Constraint(expr=", "m.x253 == 0) m.c170 = Constraint(expr= m.x257 == 0) m.c171", "log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633) <= 0) m.c409", "= Constraint(expr= m.x83 - m.x368 - m.x371 == 0) m.c477", "= Constraint(expr= m.b618 - m.b639 >= 0) m.c1441 = Constraint(expr=", "<= 0) m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0)", "m.x244 - 4.45628648004517*m.b607 <= 0) m.c152 = Constraint(expr= m.x245 +", "m.x291 - m.x294 == 0) m.c241 = Constraint(expr= m.x52 -", "Constraint(expr= m.x174 - m.x183 - m.x186 == 0) m.c49 =", "m.x353 + 9*m.b623 <= 9) m.c336 = Constraint(expr= m.x354 +", "+ 0.999* m.b615) <= 0) m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616)", "m.c992 = Constraint(expr= 2*m.b755 + m.x845 == 0) m.c993 =", "== 0) m.c1006 = Constraint(expr= 6*m.b769 + m.x859 == 0)", "m.b641 - m.b731 <= 0) m.c1329 = Constraint(expr= - m.b641", "<= 0) m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0)", "0) m.c965 = Constraint(expr= 4*m.b728 + m.x818 == 0) m.c966", "<= 0) m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0)", "m.x311 == 0) m.c348 = Constraint(expr= m.x60 - m.x309 -", "Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506) m.c745 = Constraint(expr= m.x532", ">= 0) m.c1432 = Constraint(expr= m.b613 - m.b631 >= 0)", "m.c1163 = Constraint(expr= m.b716 + m.b717 <= 1) m.c1164 =", "0) m.c982 = Constraint(expr= m.b745 + m.x835 == 0) m.c983", "m.x24 - m.x27 - m.x30 - m.x33 == 0) m.c13", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674 =", "3*m.b760 + m.x850 == 0) m.c998 = Constraint(expr= 4*m.b761 +", "m.c72 = Constraint(expr= m.x216 + 40*m.b597 <= 40) m.c73 =", "m.x218 - m.x221 == 0) m.c90 = Constraint(expr= m.x9 -", "<= 0) m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0)", "<= 1) m.c1139 = Constraint(expr= m.b704 + m.b705 <= 1)", "m.x168 - m.x507 - m.x510 == 0) m.c763 = Constraint(expr=", "m.c1168 = Constraint(expr= m.b717 + m.b718 <= 1) m.c1169 =", "Constraint(expr= - m.b662 + m.b671 + m.b674 >= 0) m.c1461", "+ 1.26558121681553*m.b638 <= 1.26558121681553) m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639", "<= 0) m.c1368 = Constraint(expr= - m.b680 + m.b681 -", "- 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999* m.b650) <=", ">= 0) m.c1479 = Constraint(expr= m.b666 - m.b678 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 =", "Constraint(expr= m.b608 - m.b610 <= 0) m.c1027 = Constraint(expr= m.b609", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 =", "- m.x447 - m.x450 == 0) m.c544 = Constraint(expr= m.x127", "1) m.c1124 = Constraint(expr= m.b696 + m.b697 <= 1) m.c1125", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 =", "<= 1.26558121681553) m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553)", "= Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425) m.c503 = Constraint(expr=", "- m.b662 - m.b663 + m.b664 - m.b754 <= 0)", "- m.x549 - m.x552 == 0) m.c826 = Constraint(expr= m.x187", "<= 1) m.c1142 = Constraint(expr= m.b705 + m.b706 <= 1)", "Constraint(expr= m.b628 - m.b649 >= 0) m.c1451 = Constraint(expr= m.b626", "= Constraint(expr= 3*m.b736 + m.x826 == 0) m.c974 = Constraint(expr=", "+ 20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128", "+ 0.999* m.b671) <= 0) m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672)", "m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917) m.c840 =", "<= 1) m.c1208 = Constraint(expr= m.b738 + m.b739 <= 1)", "<= 1.18887736200171) m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171)", "m.c1343 = Constraint(expr= m.b656 - m.b746 <= 0) m.c1344 =", "m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553) m.c282 =", "- m.b608 + m.b609 - m.b699 <= 0) m.c1297 =", "m.x515 + 30*m.b668 <= 30) m.c780 = Constraint(expr= m.x516 +", "m.b717 <= 0) m.c1315 = Constraint(expr= - m.b626 - m.b627", "<= 0) m.c410 = Constraint(expr= m.x335 == 0) m.c411 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.b766 <= 1) m.c1263 = Constraint(expr= m.b764 + m.b766", "= Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353) m.c277 = Constraint(expr=", "<= 0) m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 +", "3.04984759446376*m.b654 <= 0) m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <=", "- 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999* m.b637) <=", "m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353) m.c161 = Constraint(expr= - m.x248", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b621 + m.b624 + m.b627 >= 0) m.c1393 = Constraint(expr=", "Constraint(expr= m.x299 + 15*m.b620 <= 15) m.c303 = Constraint(expr= m.x300", "0) m.c945 = Constraint(expr= 6*m.b708 + m.x798 == 0) m.c946", "m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679", "<= 4.45628648004517) m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260 ==", "Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553) m.c282 = Constraint(expr= m.x342", "0) m.c1364 = Constraint(expr= m.b677 - m.b767 <= 0) m.c1365", "m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186) m.c433 = Constraint(expr= m.x427 +", "= Constraint(expr= m.b615 - m.b616 <= 0) m.c1034 = Constraint(expr=", "3.04984759446376) m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376) m.c605", "= Constraint(expr= - 0.5*m.x513 + m.x537 == 0) m.c751 =", "m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832", "<= 0) m.c1041 = Constraint(expr= m.b623 - m.b625 <= 0)", "m.b760 <= 1) m.c1253 = Constraint(expr= m.b761 + m.b762 <=", "m.b758 + m.b759 <= 1) m.c1248 = Constraint(expr= m.b758 +", "0) m.c244 = Constraint(expr= m.x67 - m.x328 - m.x334 ==", "Constraint(expr= m.x3 - m.x6 - m.x9 == 0) m.c4 =", "<= 0) m.c1067 = Constraint(expr= m.b650 - m.b651 <= 0)", "= Constraint(expr= m.x62 - m.x314 - m.x320 == 0) m.c216", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 =", "m.c587 = Constraint(expr= m.x407 == 0) m.c588 = Constraint(expr= m.x408", "- m.x253 == 0) m.c179 = Constraint(expr= m.x35 - m.x254", "= Constraint(expr= m.x305 + 15*m.b623 <= 15) m.c330 = Constraint(expr=", "m.c1063 = Constraint(expr= m.b645 - m.b646 <= 0) m.c1064 =", "<= 0) m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0)", "m.x456 == 0) m.c571 = Constraint(expr= m.x130 - m.x454 -", "+ 0.999*m.b600)))*(0.001 + 0.999* m.b600) <= 0) m.c82 = Constraint(expr=(m.x232/(0.001", "m.b740 + m.b741 <= 1) m.c1212 = Constraint(expr= m.b740 +", "- 3.34221486003388*m.b613 <= 0) m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611", "0.5*m.x512 + m.x536 == 0) m.c750 = Constraint(expr= - 0.5*m.x513", "0.999*m.b676)))*(0.001 + 0.999* m.b676) <= 0) m.c818 = Constraint(expr= m.x551", "m.c52 = Constraint(expr= m.x181 - m.x190 - m.x193 - m.x196", "m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586", "Constraint(expr= m.x29 - m.x242 - m.x245 == 0) m.c144 =", "<= 1) m.c1234 = Constraint(expr= m.b750 + m.b751 <= 1)", "m.b670 <= 0) m.c1088 = Constraint(expr= m.b671 - m.b672 <=", "log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618) <= 0) m.c259", "m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576", "== 0) m.c347 = Constraint(expr= m.x59 - m.x308 - m.x311", "m.x323 == 0) m.c372 = Constraint(expr= m.x324 == 0) m.c373", "0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0)", "== 0) m.c705 = Constraint(expr= m.x174 - m.x519 - m.x522", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 =", "m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549", "0) m.c37 = Constraint(expr= m.x139 - m.x142 - m.x145 ==", "= Constraint(expr= 4*m.b761 + m.x851 == 0) m.c999 = Constraint(expr=", "+ 9*m.b643 <= 9) m.c524 = Constraint(expr= m.x440 - 9*m.b641", "m.c765 = Constraint(expr= m.x171 - m.x513 - m.x516 == 0)", "0) m.c1463 = Constraint(expr= - m.b665 + m.b677 >= 0)", "Constraint(expr= m.x40 - m.x262 - m.x265 == 0) m.c122 =", "<= 1) m.c1219 = Constraint(expr= m.b743 + m.b744 <= 1)", "= Constraint(expr= - m.b611 + m.b629 >= 0) m.c1380 =", "+ m.b696 <= 1) m.c1122 = Constraint(expr= m.b695 + m.b697", "5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715 -", "<= 0) m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5)", "- m.x239 == 0) m.c117 = Constraint(expr= m.x27 - m.x237", "<= 1) m.c1215 = Constraint(expr= m.b740 + m.b742 <= 1)", "- m.b635 - m.b636 + m.b637 - m.b727 <= 0)", "m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0) m.c152 =", "m.x450 == 0) m.c538 = Constraint(expr= m.x451 == 0) m.c539", "+ m.x154 == 0) m.c41 = Constraint(expr= m.x152 - m.x155", "m.x865 == 0) m.c1013 = Constraint(expr= m.b596 - m.b597 <=", "m.b634 - m.b724 <= 0) m.c1322 = Constraint(expr= m.b635 -", "1) m.c1256 = Constraint(expr= m.b762 + m.b763 <= 1) m.c1257", "m.b619 - m.b640 >= 0) m.c1442 = Constraint(expr= m.b623 -", "<= 0) m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0)", "= Constraint(expr= m.x512 - 30*m.b668 <= 0) m.c777 = Constraint(expr=", "m.b635) <= 0) m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 =", "0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597) <=", "<= 0) m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 +", "+ 15*m.b628 <= 15) m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626", "m.b745 <= 1) m.c1221 = Constraint(expr= m.b743 + m.b745 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 =", "<= 3.71357206670431) m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431)", "Constraint(expr= m.b599 - m.b601 <= 0) m.c1018 = Constraint(expr= m.b600", "m.x144 - m.x477 - m.x480 == 0) m.c676 = Constraint(expr=", "m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425) m.c501 =", "= Constraint(expr= m.x584 - 13.5*m.b680 <= 0) m.c891 = Constraint(expr=", "m.b618 + m.b636 + m.b639 >= 0) m.c1390 = Constraint(expr=", "0) m.c1385 = Constraint(expr= - m.b605 + m.b617 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 =", "0) m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0) m.c716", "+ m.b675 >= 0) m.c1462 = Constraint(expr= - m.b664 +", "0.9*m.x556 + m.x586 == 0) m.c872 = Constraint(expr= m.x557 ==", "= Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924) m.c712 = Constraint(expr=", "0) m.c1096 = Constraint(expr= m.b678 - m.b679 <= 0) m.c1097", "- m.b680 - m.b681 + m.b682 - m.b772 <= 0)", "m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0) m.c246 =", "<= 0.940066550763924) m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 =", "= Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5) m.c896 = Constraint(expr=", "== 0) m.c591 = Constraint(expr= m.x462 == 0) m.c592 =", "+ 0.666992981045719*m.b671 <= 0.666992981045719) m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672", "m.x280 == 0) m.c167 = Constraint(expr= m.x251 == 0) m.c168", "m.c645 = Constraint(expr= m.x486 == 0) m.c646 = Constraint(expr= m.x487", "Constraint(expr= - m.b613 + m.b631 >= 0) m.c1382 = Constraint(expr=", "m.c120 = Constraint(expr= m.x39 - m.x261 - m.x264 == 0)", "1) m.c1133 = Constraint(expr= m.b701 + m.b702 <= 1) m.c1134", "Constraint(expr= m.x75 - m.x351 - m.x354 == 0) m.c325 =", "- m.x413 == 0) m.c621 = Constraint(expr= m.x108 - m.x411", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 =", "0) m.c48 = Constraint(expr= m.x174 - m.x183 - m.x186 ==", "m.x383 + 33.5*m.b638 <= 33.5) m.c495 = Constraint(expr= m.x384 +", "m.c791 = Constraint(expr= m.x545 == 0) m.c792 = Constraint(expr= m.x546", "m.b620 - m.b622 <= 0) m.c1039 = Constraint(expr= m.b621 -", "Constraint(expr= - m.b625 + m.b643 + m.b646 >= 0) m.c1400", "<= 3.04984759446376) m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376)", "<= 0.994083415506506) m.c746 = Constraint(expr= - m.x506 + m.x536 ==", "Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 +", "m.b649 <= 0) m.c1067 = Constraint(expr= m.b650 - m.b651 <=", "= Constraint(expr= m.x360 == 0) m.c346 = Constraint(expr= m.x361 ==", "m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 +", "== 0) m.c30 = Constraint(expr= m.x78 - m.x102 - m.x105", "- m.x355 == 0) m.c326 = Constraint(expr= m.x302 - 15*m.b623", "m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001 +", "m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362", "Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x353 == 0) m.c318 = Constraint(expr= m.x354 ==", "<= 1) m.c1182 = Constraint(expr= m.b725 + m.b727 <= 1)", "m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348) m.c428 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 =", "m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519", "== 0) m.c287 = Constraint(expr= m.x299 == 0) m.c288 =", "0) m.c328 = Constraint(expr= m.x304 - 15*m.b625 <= 0) m.c329", "Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1) m.c1129 = Constraint(expr= m.b698 + m.b699 <= 1)", "m.x489 - m.x492 == 0) m.c679 = Constraint(expr= m.x151 -", "== 0) m.c976 = Constraint(expr= 6*m.b739 + m.x829 == 0)", "m.c69 = Constraint(expr= m.x213 - 40*m.b597 <= 0) m.c70 =", "0) m.c68 = Constraint(expr= m.x212 - 40*m.b596 <= 0) m.c69", "40*m.b596 <= 40) m.c72 = Constraint(expr= m.x216 + 40*m.b597 <=", "Constraint(expr= m.b765 + m.b766 <= 1) m.c1263 = Constraint(expr= m.b764", "Constraint(expr= - m.b635 + m.b636 - m.b726 <= 0) m.c1324", "m.c672 = Constraint(expr= m.x492 == 0) m.c673 = Constraint(expr= m.x493", "15*m.b625 <= 0) m.c329 = Constraint(expr= m.x305 + 15*m.b623 <=", "m.b604 - m.b694 <= 0) m.c1292 = Constraint(expr= m.b605 -", "m.x461 == 0) m.c597 = Constraint(expr= m.x132 - m.x459 -", "m.x210 - m.x591 - m.x594 == 0) m.c910 = Constraint(expr=", "+ m.b661 >= 0) m.c1460 = Constraint(expr= - m.b662 +", "<= 3.34221486003388) m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388)", "m.x153 == 0) m.c40 = Constraint(expr= - m.x148 - m.x151", "= Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376) m.c364 = Constraint(expr=", "1.26558121681553) m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <= 0) m.c492", "+ 20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157 - m.x170", "m.b645 >= 0) m.c1399 = Constraint(expr= - m.b625 + m.b643", "m.c853 = Constraint(expr= m.x178 - m.x529 - m.x535 == 0)", "= Constraint(expr= - m.b605 + m.b606 - m.b696 <= 0)", "0) m.c262 = Constraint(expr= m.x277 == 0) m.c263 = Constraint(expr=", "m.b706 <= 1) m.c1145 = Constraint(expr= m.b707 + m.b708 <=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x426 == 0) m.c415 = Constraint(expr= m.x427 == 0)", "m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943) m.c718 =", "m.x249 - m.x252 == 0) m.c178 = Constraint(expr= m.x34 -", ">= 0) m.c1378 = Constraint(expr= - m.b604 + m.b613 +", "m.b610 + m.b622 + m.b625 + m.b628 >= 0) m.c1394", ">= 0) m.c1481 = Constraint(expr= m.b668 - m.b680 >= 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= - m.x2 - m.x3 -", "m.x548 - 0.705049913072943*m.b674 <= 0) m.c831 = Constraint(expr= m.x549 -", "== 0) m.c954 = Constraint(expr= 9*m.b717 + m.x807 == 0)", "== 0) m.c7 = Constraint(expr= - m.x13 - m.x16 +", "1) m.c1105 = Constraint(expr= m.b686 + m.b687 <= 1) m.c1106", "= Constraint(expr= m.x174 - m.x183 - m.x186 == 0) m.c49", "m.x538 - 15*m.b670 <= 0) m.c785 = Constraint(expr= m.x539 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 =", "m.c1255 = Constraint(expr= m.b761 + m.b762 <= 1) m.c1256 =", "m.x143 - m.x476 - m.x479 == 0) m.c675 = Constraint(expr=", "m.x365 == 0) m.c444 = Constraint(expr= m.x81 - m.x363 -", "= Constraint(expr= - m.b656 + m.b657 - m.b747 <= 0)", "= Constraint(expr= m.b628 - m.b649 >= 0) m.c1451 = Constraint(expr=", "m.x351 - m.x354 == 0) m.c325 = Constraint(expr= m.x76 -", "m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400", "m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668", "Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0)", "m.b599) <= 0) m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1", "m.b606 + m.b607 - m.b697 <= 0) m.c1295 = Constraint(expr=", "0.940066550763924) m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0) m.c741", "m.b676 - m.b766 <= 0) m.c1364 = Constraint(expr= m.b677 -", "m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999* m.b676) <= 0) m.c818 =", "m.b616 - m.b706 <= 0) m.c1304 = Constraint(expr= m.b617 -", "== 0) m.c290 = Constraint(expr= m.x347 == 0) m.c291 =", "<= 0) m.c1096 = Constraint(expr= m.b678 - m.b679 <= 0)", "m.c952 = Constraint(expr= 4*m.b715 + m.x805 == 0) m.c953 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 =", "m.x284 - m.x287 == 0) m.c213 = Constraint(expr= m.x48 -", "m.x499 == 0) m.c698 = Constraint(expr= m.x521 == 0) m.c699", "m.c1319 = Constraint(expr= m.b632 - m.b722 <= 0) m.c1320 =", "= Constraint(expr= - m.x392 + m.x446 == 0) m.c531 =", "= Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001", "m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517) m.c128 = Constraint(expr= m.x260 -", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833 =", "== 0) m.c884 = Constraint(expr= m.x554 - 15*m.b680 <= 0)", "= Constraint(expr= m.x450 == 0) m.c538 = Constraint(expr= m.x451 ==", "+ m.b598 - m.b688 <= 0) m.c1286 = Constraint(expr= m.b599", "m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622", "m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539) m.c394 =", "<= 0) m.c778 = Constraint(expr= m.x514 - 30*m.b670 <= 0)", "0) m.c483 = Constraint(expr= m.x120 - m.x435 - m.x438 ==", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804 =", "== 0) m.c672 = Constraint(expr= m.x492 == 0) m.c673 =", "<= 1) m.c1189 = Constraint(expr= m.b728 + m.b729 <= 1)", "== 0) m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0)", "<= 1.18887736200171) m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693 =", "- m.x476 - m.x479 == 0) m.c675 = Constraint(expr= m.x144", "m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0) m.c687 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 =", "m.c1454 = Constraint(expr= m.b626 - m.b653 >= 0) m.c1455 =", "30) m.c197 = Constraint(expr= m.x278 - 15*m.b608 <= 0) m.c198", "m.x357 - 3.04984759446376*m.b627 <= 0) m.c361 = Constraint(expr= m.x358 -", "- m.x432 == 0) m.c448 = Constraint(expr= m.x118 - m.x430", "Constraint(expr= m.x301 + 15*m.b622 <= 15) m.c305 = Constraint(expr= m.x344", "m.c1024 = Constraint(expr= m.b606 - m.b607 <= 0) m.c1025 =", "= Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0) m.c362 = Constraint(expr=", "<= 0) m.c1028 = Constraint(expr= m.b611 - m.b612 <= 0)", "+ 3.04984759446376*m.b649 <= 3.04984759446376) m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647", "Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0) m.c129 =", "m.b604 + m.b613 + m.b616 >= 0) m.c1379 = Constraint(expr=", ">= 0) m.c1468 = Constraint(expr= m.b655 - m.b658 >= 0)", "m.x61 == 0) m.c20 = Constraint(expr= m.x68 - m.x80 -", "m.x558 + 15*m.b681 <= 15) m.c889 = Constraint(expr= m.x559 +", "- 4.45628648004517*m.b610 <= 0) m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608", "Constraint(expr= m.x125 - m.x446 - m.x449 == 0) m.c543 =", "m.x470 - m.x473 == 0) m.c648 = Constraint(expr= m.x141 -", "+ 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999*", "m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0) m.c157 =", "0) m.c1448 = Constraint(expr= m.b626 - m.b647 >= 0) m.c1449", "m.b668 - m.b683 >= 0) m.c1485 = Constraint(expr= m.b669 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= - m.b608 + m.b620 + m.b623 + m.b626", "m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245", "FX 0 0 0 0 0 0 0 0 #", "Constraint(expr= m.b758 + m.b760 <= 1) m.c1252 = Constraint(expr= m.b759", "m.c1048 = Constraint(expr= m.b630 - m.b631 <= 0) m.c1049 =", "m.b775 <= 0) m.c1373 = Constraint(expr= m.b596 + m.b599 ==", "m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 +", "0) m.c614 = Constraint(expr= m.x413 == 0) m.c615 = Constraint(expr=", "Constraint(expr= m.b641 - m.b643 <= 0) m.c1060 = Constraint(expr= m.b642", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 =", "m.x251 == 0) m.c168 = Constraint(expr= m.x252 == 0) m.c169", "m.c673 = Constraint(expr= m.x493 == 0) m.c674 = Constraint(expr= m.x143", "== 0) m.c121 = Constraint(expr= m.x40 - m.x262 - m.x265", "Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c260 = Constraint(expr= m.x275 == 0) m.c261 = Constraint(expr= m.x276", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 =", "Constraint(expr= - m.x388 + m.x442 == 0) m.c506 = Constraint(expr=", "- 40*m.b598 <= 0) m.c71 = Constraint(expr= m.x215 + 40*m.b596", "Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0)", "15) m.c332 = Constraint(expr= m.x350 - 9*m.b623 <= 0) m.c333", "- m.b630 <= 0) m.c1047 = Constraint(expr= m.b629 - m.b631", "m.x578 - 0.690184503917672*m.b677 <= 0) m.c864 = Constraint(expr= m.x579 -", "m.x776 == 0) m.c924 = Constraint(expr= 4*m.b687 + m.x777 ==", "m.c724 = Constraint(expr= m.x505 == 0) m.c725 = Constraint(expr= m.x530", "+ 40*m.b600 <= 40) m.c100 = Constraint(expr= m.x223 + 40*m.b601", "+ m.b645 - m.b735 <= 0) m.c1333 = Constraint(expr= -", "3.34221486003388*m.b602 <= 3.34221486003388) m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <=", "- 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999* m.b651) <=", "<= 0) m.c525 = Constraint(expr= m.x441 - 9*m.b642 <= 0)", "= Constraint(expr= m.b633 - m.b634 <= 0) m.c1052 = Constraint(expr=", "= Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001", "m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47", "+ 30*m.b609 <= 30) m.c196 = Constraint(expr= m.x259 + 30*m.b610", "m.c399 = Constraint(expr= m.x378 + 20*m.b630 <= 20) m.c400 =", "Constraint(expr= m.x437 == 0) m.c474 = Constraint(expr= m.x438 == 0)", "+ m.b754 <= 1) m.c1240 = Constraint(expr= m.b753 + m.b754", "m.b676) <= 0) m.c818 = Constraint(expr= m.x551 == 0) m.c819", "Constraint(expr= m.x420 == 0) m.c379 = Constraint(expr= m.x421 == 0)", "= Constraint(expr= - m.x388 + m.x442 == 0) m.c506 =", "Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0) m.c608 = Constraint(expr= m.x461", "= Constraint(expr= m.x43 - m.x268 - m.x274 == 0) m.c149", "Constraint(expr= m.b704 + m.b706 <= 1) m.c1144 = Constraint(expr= m.b705", "m.x305 == 0) m.c321 = Constraint(expr= m.x57 - m.x303 -", "m.c733 = Constraint(expr= m.x178 - m.x526 - m.x532 == 0)", "m.b622 + m.b640 >= 0) m.c1397 = Constraint(expr= - m.b623", "- m.b616 + m.b634 >= 0) m.c1385 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x344 - 13.5*m.b620 <= 0) m.c306 = Constraint(expr= m.x345 -", "m.b623 - m.b625 <= 0) m.c1042 = Constraint(expr= m.b624 -", "15) m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001", "2.30162356062425*m.b639 <= 2.30162356062425) m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <=", "m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676", "- m.x383 == 0) m.c480 = Constraint(expr= m.x93 - m.x381", "Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x128 - m.x452 - m.x455 == 0) m.c570", "Constraint(expr= - m.x73 - m.x91 + m.x94 == 0) m.c26", "= Constraint(expr= m.x344 - 13.5*m.b620 <= 0) m.c306 = Constraint(expr=", "0) m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0) m.c608", "+ m.x796 == 0) m.c944 = Constraint(expr= 5*m.b707 + m.x797", "m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863 =", "1.32154609891348) m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0) m.c429", "= Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001", "- 9*m.b641 <= 0) m.c519 = Constraint(expr= m.x387 - 9*m.b642", "= Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001", "Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c886 = Constraint(expr= m.x556 - 15*m.b682 <= 0) m.c887 =", "m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214", "== 0) m.c941 = Constraint(expr= 4*m.b704 + m.x794 == 0)", "0) m.c758 = Constraint(expr= m.x539 == 0) m.c759 = Constraint(expr=", "= Constraint(expr= m.b695 + m.b696 <= 1) m.c1124 = Constraint(expr=", "== 0) m.c676 = Constraint(expr= m.x145 - m.x478 - m.x481", "m.b626 + m.b647 + m.b650 + m.b653 >= 0) m.c1401", "= Constraint(expr= - m.x248 + m.x278 == 0) m.c162 =", "+ m.b763 <= 1) m.c1259 = Constraint(expr= m.b764 + m.b765", "Constraint(expr= - 0.9*m.x319 + m.x418 == 0) m.c368 = Constraint(expr=", "0.705049913072943*m.b662 <= 0.705049913072943) m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <=", "= Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943) m.c836 = Constraint(expr=", "m.b612 - m.b702 <= 0) m.c1300 = Constraint(expr= - m.b611", "= Constraint(expr= m.x402 == 0) m.c562 = Constraint(expr= m.x403 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641 =", "20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128 + 50*m.x129 +", "m.b705 <= 1) m.c1142 = Constraint(expr= m.b705 + m.b706 <=", "m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 =", "== 0) m.c215 = Constraint(expr= m.x62 - m.x314 - m.x320", "m.x89 + m.x92 == 0) m.c24 = Constraint(expr= - m.x72", "- m.x250 + m.x280 == 0) m.c164 = Constraint(expr= -", "m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348) m.c256 =", "m.b623 + m.b626 >= 0) m.c1392 = Constraint(expr= - m.b609", "m.x419 == 0) m.c378 = Constraint(expr= m.x420 == 0) m.c379", "m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821", "- 3*m.b734 - 4*m.b735 - 3*m.b736 - 5*m.b737 - 7*m.b738", "Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= - 0.9*m.x296 + m.x344 == 0) m.c285 = Constraint(expr=", "m.c92 = Constraint(expr= m.x14 - m.x230 - m.x233 == 0)", "15) m.c304 = Constraint(expr= m.x301 + 15*m.b622 <= 15) m.c305", "= Constraint(expr= - 0.6*m.x562 + m.x592 == 0) m.c899 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 =", "m.x457 == 0) m.c566 = Constraint(expr= m.x101 - m.x398 -", "= Constraint(expr= m.b773 + m.b774 <= 1) m.c1280 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388) m.c223 = Constraint(expr= m.x289 +", "m.b663 - m.b675 >= 0) m.c1477 = Constraint(expr= m.b664 -", "Constraint(expr= m.x31 - m.x244 - m.x247 == 0) m.c146 =", "3.04984759446376*m.b649 <= 3.04984759446376) m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <=", "+ m.b733 <= 1) m.c1198 = Constraint(expr= m.b732 + m.b733", "m.x237 - m.x240 == 0) m.c118 = Constraint(expr= m.x28 -", "Constraint(expr= m.x95 - m.x386 - m.x389 == 0) m.c513 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 =", "Constraint(expr= m.b675 - m.b676 <= 0) m.c1094 = Constraint(expr= m.b677", "Constraint(expr= m.b704 + m.b706 <= 1) m.c1141 = Constraint(expr= m.b704", "= Constraint(expr= m.b768 + m.b769 <= 1) m.c1269 = Constraint(expr=", "= Constraint(expr= m.x588 == 0) m.c877 = Constraint(expr= m.x589 ==", "2.30162356062425) m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425) m.c502", "m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425) m.c503 = Constraint(expr= - m.x386", "0) m.c988 = Constraint(expr= 9*m.b751 + m.x841 == 0) m.c989", "+ m.b651 - m.b741 <= 0) m.c1339 = Constraint(expr= -", "m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= -", "m.b612 - m.b630 >= 0) m.c1432 = Constraint(expr= m.b613 -", "= Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517) m.c155 = Constraint(expr=", "2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736 - 5*m.b737 -", "<= 1) m.c1184 = Constraint(expr= m.b726 + m.b727 <= 1)", "0) m.c1422 = Constraint(expr= m.b609 - m.b621 >= 0) m.c1423", "+ 0.999* m.b639) <= 0) m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x135 - m.x138 == 0) m.c34 = Constraint(expr=", "= Constraint(expr= m.b719 + m.b721 <= 1) m.c1171 = Constraint(expr=", "m.x173 - m.x182 - m.x185 == 0) m.c48 = Constraint(expr=", "m.c778 = Constraint(expr= m.x514 - 30*m.b670 <= 0) m.c779 =", "<= 15) m.c202 = Constraint(expr= m.x283 + 15*m.b610 <= 15)", "Constraint(expr= m.b629 - m.b719 <= 0) m.c1317 = Constraint(expr= -", "m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584", "m.b635 - m.b636 + m.b637 - m.b727 <= 0) m.c1325", "Constraint(expr= m.x221 + 40*m.b599 <= 40) m.c99 = Constraint(expr= m.x222", "0) m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0) m.c254", "m.c825 = Constraint(expr= m.x186 - m.x549 - m.x552 == 0)", "m.c956 = Constraint(expr= 7*m.b719 + m.x809 == 0) m.c957 =", "== 0) m.c981 = Constraint(expr= 4*m.b744 + m.x834 == 0)", "2.54515263975353) m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353) m.c277", "1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999* m.b673) <= 0)", "+ 0.999*m.b626)))*(0.001 + 0.999* m.b626) <= 0) m.c339 = Constraint(expr=(m.x357/(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b612 - m.b613 <= 0) m.c1031 = Constraint(expr= m.b614", "m.x269 - 2.54515263975353*m.b617 <= 0) m.c273 = Constraint(expr= m.x270 -", "0.572481933717686*m.b635 <= 0.572481933717686) m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <=", "0) m.c545 = Constraint(expr= m.x392 - 9*m.b644 <= 0) m.c546", "3.04984759446376) m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376) m.c577", "m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841", "m.x515 == 0) m.c756 = Constraint(expr= m.x516 == 0) m.c757", "- m.x527 - m.x533 == 0) m.c852 = Constraint(expr= m.x177", "m.b707 + m.b709 <= 1) m.c1147 = Constraint(expr= m.b707 +", "- m.b701 <= 0) m.c1299 = Constraint(expr= - m.b611 +", "- m.b638 - m.b639 + m.b640 - m.b730 <= 0)", "= Constraint(expr= m.b604 - m.b616 >= 0) m.c1418 = Constraint(expr=", "15*m.b680 <= 0) m.c885 = Constraint(expr= m.x555 - 15*m.b681 <=", "m.x28 - m.x238 - m.x241 == 0) m.c119 = Constraint(expr=", "+ m.b759 <= 1) m.c1248 = Constraint(expr= m.b758 + m.b760", "m.x779 == 0) m.c927 = Constraint(expr= 7*m.b690 + m.x780 ==", "= Constraint(expr= m.x257 == 0) m.c171 = Constraint(expr= m.x258 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x16 + m.x19 == 0) m.c8 = Constraint(expr= m.x17 -", "= Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001", "Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b628 + m.b649 + m.b652 + m.b655 >= 0) m.c1403", "Constraint(expr= m.b645 - m.b646 <= 0) m.c1064 = Constraint(expr= m.b647", "m.b729 <= 1) m.c1190 = Constraint(expr= m.b729 + m.b730 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 =", "<= 0) m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 +", "+ 9*m.b646 <= 9) m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) -", "m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 +", "0) m.c802 = Constraint(expr= m.x199 - m.x568 - m.x571 ==", "0) m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <= 0) m.c893", "m.x506 + m.x536 == 0) m.c747 = Constraint(expr= - m.x507", "m.b707 + m.b708 <= 1) m.c1148 = Constraint(expr= m.b708 +", "m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184", "m.x300 == 0) m.c295 = Constraint(expr= m.x55 - m.x298 -", "= Constraint(expr= m.b722 + m.b724 <= 1) m.c1180 = Constraint(expr=", "0) m.c1082 = Constraint(expr= m.b665 - m.b666 <= 0) m.c1083", ">= 0) m.c1418 = Constraint(expr= m.b605 - m.b617 >= 0)", "m.b626 - m.b716 <= 0) m.c1314 = Constraint(expr= - m.b626", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 =", "m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344 == 0) m.c285", "= Constraint(expr= - m.x506 + m.x536 == 0) m.c747 =", "m.x439 == 0) m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1356 = Constraint(expr= - m.b668 + m.b669 - m.b759", "- 3*m.b718 - 7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722", "= Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0) m.c220 = Constraint(expr=", "m.x427 == 0) m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x331 - 1.32154609891348*m.b634 <= 0) m.c425 = Constraint(expr= m.x335 +", "= Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353) m.c159 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 =", "3.04984759446376*m.b652 <= 0) m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <=", "m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175", "+ m.b739 <= 1) m.c1209 = Constraint(expr= m.b737 + m.b739", "+ m.x825 == 0) m.c973 = Constraint(expr= 3*m.b736 + m.x826", "0) m.c561 = Constraint(expr= m.x402 == 0) m.c562 = Constraint(expr=", "+ 0.999* m.b628) <= 0) m.c341 = Constraint(expr= m.x311 ==", "1) m.c1238 = Constraint(expr= m.b753 + m.b754 <= 1) m.c1239", "m.b645 - m.b735 <= 0) m.c1333 = Constraint(expr= - m.b644", "0) m.c698 = Constraint(expr= m.x521 == 0) m.c699 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 =", "m.b611 - m.b701 <= 0) m.c1299 = Constraint(expr= - m.b611", "3.71357206670431*m.b598 <= 3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1", "= Constraint(expr= m.x389 == 0) m.c507 = Constraint(expr= m.x390 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 =", "- m.x498 == 0) m.c703 = Constraint(expr= m.x163 - m.x496", "m.b749 + m.b751 <= 1) m.c1231 = Constraint(expr= m.b749 +", "= Constraint(expr= m.x334 == 0) m.c239 = Constraint(expr= m.x50 -", "m.c984 = Constraint(expr= 5*m.b747 + m.x837 == 0) m.c985 =", "m.x804 == 0) m.c952 = Constraint(expr= 4*m.b715 + m.x805 ==", "- 10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761 - 8*m.b762", "+ m.x350 == 0) m.c312 = Constraint(expr= - 0.6*m.x303 +", "Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5) m.c495 = Constraint(expr= m.x384", "m.b688 <= 1) m.c1107 = Constraint(expr= m.b686 + m.b688 <=", "+ m.b745 <= 1) m.c1222 = Constraint(expr= m.b744 + m.b745", "m.x588 == 0) m.c877 = Constraint(expr= m.x589 == 0) m.c878", "m.x782 == 0) m.c930 = Constraint(expr= 9*m.b693 + m.x783 ==", "m.c1381 = Constraint(expr= - m.b613 + m.b631 >= 0) m.c1382", "1) m.c1268 = Constraint(expr= m.b768 + m.b769 <= 1) m.c1269", "Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c381 = Constraint(expr= m.x63 - m.x318 - m.x324 == 0)", "m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859", "m.x375 - 20*m.b630 <= 0) m.c397 = Constraint(expr= m.x376 -", "<= 20) m.c406 = Constraint(expr= m.x421 + 20*m.b631 <= 20)", "m.x158 == 0) m.c42 = Constraint(expr= m.x153 - m.x156 -", "0 0 # # Variable counts # x b i", "1) m.c1154 = Constraint(expr= m.b711 + m.b712 <= 1) m.c1155", "<= 0) m.c1333 = Constraint(expr= - m.b644 - m.b645 +", "m.b665 - m.b755 <= 0) m.c1353 = Constraint(expr= - m.b665", "m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924) m.c775 =", "m.x535 == 0) m.c848 = Constraint(expr= m.x581 == 0) m.c849", "0) m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0) m.c451", "= Constraint(expr= m.b693 + m.b694 <= 1) m.c1121 = Constraint(expr=", "= Constraint(expr= m.b725 + m.b726 <= 1) m.c1182 = Constraint(expr=", "0) m.c1479 = Constraint(expr= m.b666 - m.b678 >= 0) m.c1480", "0.999*m.b673)))*(0.001 + 0.999* m.b673) <= 0) m.c791 = Constraint(expr= m.x545", "= Constraint(expr= m.b716 + m.b718 <= 1) m.c1168 = Constraint(expr=", "m.x537 - 15*m.b669 <= 0) m.c784 = Constraint(expr= m.x538 -", "m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128", "1.18887736200171) m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0) m.c687", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661 =", "0) m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261 == 0)", "= Constraint(expr= m.b651 - m.b652 <= 0) m.c1070 = Constraint(expr=", "m.c1247 = Constraint(expr= m.b758 + m.b759 <= 1) m.c1248 =", "m.b603 - m.b612 >= 0) m.c1414 = Constraint(expr= m.b604 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 1) m.c1160 = Constraint(expr= m.b714 + m.b715 <= 1)", "- m.x36 - 2*m.x37 - 10*m.x86 - 5*m.x87 - 5*m.x88", "m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449", "== 0) m.c384 = Constraint(expr= m.x87 - m.x375 - m.x378", "== 0) m.c902 = Constraint(expr= m.x593 == 0) m.c903 =", "Constraint(expr= - m.x147 - m.x150 + m.x153 == 0) m.c40", "m.x473 == 0) m.c642 = Constraint(expr= m.x474 == 0) m.c643", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x593 == 0) m.c903 = Constraint(expr= m.x594 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.34221486003388*m.b612 <= 3.34221486003388) m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <=", "= Constraint(expr= m.b675 - m.b676 <= 0) m.c1094 = Constraint(expr=", "m.c44 = Constraint(expr= m.x158 - m.x161 - m.x164 - m.x167", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660 =", "== 0) m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x325 == 0) m.c383 = Constraint(expr= m.x86 - m.x374 -", "1.32154609891348*m.b633 <= 1.32154609891348) m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <=", "5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36 -", "m.c928 = Constraint(expr= 6*m.b691 + m.x781 == 0) m.c929 =", "3.04984759446376*m.b651 <= 0) m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <=", "m.c377 = Constraint(expr= m.x419 == 0) m.c378 = Constraint(expr= m.x420", "Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)", "3*m.b772 + m.x862 == 0) m.c1010 = Constraint(expr= 8*m.b773 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c977 = Constraint(expr= 2*m.b740 + m.x830 == 0)", "15*m.b609 <= 15) m.c202 = Constraint(expr= m.x283 + 15*m.b610 <=", "m.b754 <= 1) m.c1240 = Constraint(expr= m.b753 + m.b754 <=", "0.705049913072943*m.b675 <= 0) m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <=", "= Constraint(expr= m.b752 + m.b754 <= 1) m.c1240 = Constraint(expr=", "m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806", "15*m.b620 <= 0) m.c300 = Constraint(expr= m.x297 - 15*m.b621 <=", "m.x43 - m.x268 - m.x274 == 0) m.c149 = Constraint(expr=", "0) m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943) m.c834", "== 0) m.c111 = Constraint(expr= m.x240 == 0) m.c112 =", "<= 4.45628648004517) m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517)", "+ m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634) <= 0) m.c410 =", "+ 0.999*m.b628)))*(0.001 + 0.999* m.b628) <= 0) m.c341 = Constraint(expr=", "- m.b771 <= 0) m.c1369 = Constraint(expr= - m.b680 -", "= Constraint(expr= m.x253 == 0) m.c170 = Constraint(expr= m.x257 ==", "Constraint(expr= m.b713 + m.b715 <= 1) m.c1159 = Constraint(expr= m.b713", "m.x223 == 0) m.c86 = Constraint(expr= m.x233 == 0) m.c87", "- m.x190 - m.x193 - m.x196 == 0) m.c53 =", "Constraint(expr= m.b722 + m.b723 <= 1) m.c1178 = Constraint(expr= m.b723", "+ 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c463 = Constraint(expr=(m.x436/(0.001", "= Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001", "= Constraint(expr= m.b761 + m.b762 <= 1) m.c1254 = Constraint(expr=", "+ m.b654 >= 0) m.c1402 = Constraint(expr= - m.b628 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37 =", "= Constraint(expr= m.b604 - m.b613 >= 0) m.c1415 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0) m.c772 = Constraint(expr=", "30*m.x111 + 15*m.x112 + 15*m.x113 + 20*m.x114 + 25*m.x115 +", "5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110 +", "30) m.c196 = Constraint(expr= m.x259 + 30*m.b610 <= 30) m.c197", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779 =", "m.x11 - m.x224 - m.x227 == 0) m.c66 = Constraint(expr=", "m.x395 == 0) m.c534 = Constraint(expr= m.x396 == 0) m.c535", "m.b596 - m.b597 <= 0) m.c1014 = Constraint(expr= m.b596 -", "m.c1352 = Constraint(expr= m.b665 - m.b755 <= 0) m.c1353 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ 20*m.b631 <= 20) m.c401 = Constraint(expr= m.x416 - 20*m.b629", "m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0) m.c689 =", "m.c918 = Constraint(expr= m.x591 - 9*m.b684 <= 0) m.c919 =", "<= 0) m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506)", "- m.b679 >= 0) m.c1481 = Constraint(expr= m.b668 - m.b680", "m.b730 <= 1) m.c1191 = Constraint(expr= m.b728 + m.b730 <=", "m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553", "m.x139 - m.x142 - m.x145 == 0) m.c38 = Constraint(expr=", "0) m.c1386 = Constraint(expr= - m.b606 + m.b618 >= 0)", "== 0) m.c726 = Constraint(expr= m.x531 == 0) m.c727 =", "= Constraint(expr= m.x175 - m.x184 - m.x187 == 0) m.c50", "+ m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999* m.b651) <= 0) m.c586", "Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 +", "- m.x320 == 0) m.c216 = Constraint(expr= m.x63 - m.x315", "m.c885 = Constraint(expr= m.x555 - 15*m.b681 <= 0) m.c886 =", "= Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0) m.c425 = Constraint(expr=", "m.b608 - m.b609 <= 0) m.c1026 = Constraint(expr= m.b608 -", "3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760 -", "m.b610 - m.b628 >= 0) m.c1430 = Constraint(expr= m.b611 -", "+ 0.999* m.b637) <= 0) m.c437 = Constraint(expr= m.x365 ==", "written by GAMS Convert at 01/15/21 11:37:33 # # Equation", "= Constraint(expr= m.x354 + 9*m.b624 <= 9) m.c337 = Constraint(expr=", "<= 1) m.c1113 = Constraint(expr= m.b689 + m.b691 <= 1)", "15*m.b625 <= 15) m.c332 = Constraint(expr= m.x350 - 9*m.b623 <=", "m.x354 + 9*m.b624 <= 9) m.c337 = Constraint(expr= m.x355 +", "1) m.c1214 = Constraint(expr= m.b741 + m.b742 <= 1) m.c1215", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c261 = Constraint(expr= m.x276 == 0) m.c262 = Constraint(expr=", "Total const NL DLL # 3373 3193 180 0 #", "m.b608 - m.b609 + m.b610 - m.b700 <= 0) m.c1298", "m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517) m.c189 =", "0) m.c475 = Constraint(expr= m.x439 == 0) m.c476 = Constraint(expr=", "Constraint(expr= m.x44 - m.x278 - m.x281 == 0) m.c183 =", "0) m.c1475 = Constraint(expr= m.b662 - m.b674 >= 0) m.c1476", "Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.b707 + m.b708 <= 1) m.c1148 = Constraint(expr=", "+ m.b599 - m.b608 >= 0) m.c1410 = Constraint(expr= m.b597", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 =", "0.480234946352917*m.b675 <= 0) m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <=", "= Constraint(expr= m.b687 + m.b688 <= 1) m.c1107 = Constraint(expr=", "== 0) m.c369 = Constraint(expr= - m.x375 + m.x417 ==", "m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110", "+ 30*m.x155 + 40*m.x156 + 40*m.x157 - m.x170 - m.x171", "m.b737 <= 0) m.c1335 = Constraint(expr= - m.b647 + m.b648", "m.c993 = Constraint(expr= 3*m.b756 + m.x846 == 0) m.c994 =", "<= 1) m.c1151 = Constraint(expr= m.b710 + m.b711 <= 1)", "0.999*m.b677)))*(0.001 + 0.999*m.b677) <= 0) m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678)", "<= 1) m.c1146 = Constraint(expr= m.b707 + m.b709 <= 1)", "m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727 -", "= Constraint(expr= m.x532 == 0) m.c728 = Constraint(expr= m.x164 -", "9) m.c522 = Constraint(expr= m.x390 + 9*m.b642 <= 9) m.c523", "Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.b635 - m.b725 <= 0) m.c1323 = Constraint(expr=", "Constraint(expr= m.x537 - 15*m.b669 <= 0) m.c784 = Constraint(expr= m.x538", "Constraint(expr= m.x389 + 9*m.b641 <= 9) m.c522 = Constraint(expr= m.x390", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c1030 = Constraint(expr= m.b612 - m.b613 <= 0) m.c1031 =", "Constraint(expr= m.b656 - m.b658 <= 0) m.c1075 = Constraint(expr= m.b657", "+ m.b664 - m.b754 <= 0) m.c1352 = Constraint(expr= m.b665", "== 0) m.c119 = Constraint(expr= m.x38 - m.x260 - m.x263", "Constraint(expr= m.x304 - 15*m.b625 <= 0) m.c329 = Constraint(expr= m.x305", "0) m.c1326 = Constraint(expr= - m.b638 + m.b639 - m.b729", "Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0) m.c837 = Constraint(expr= m.x573", "== 0) m.c170 = Constraint(expr= m.x257 == 0) m.c171 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c32 = Constraint(expr= m.x134 - m.x137 == 0) m.c33 =", "+ m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999* m.b607) <= 0) m.c137", "m.x265 == 0) m.c116 = Constraint(expr= m.x26 - m.x236 -", "Constraint(expr= m.x444 + 9*m.b642 <= 9) m.c529 = Constraint(expr= m.x445", "== 0) m.c856 = Constraint(expr= m.x205 - m.x580 - m.x583", "= Constraint(expr= 5*m.b737 + m.x827 == 0) m.c975 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 =", "0) m.c468 = Constraint(expr= m.x372 == 0) m.c469 = Constraint(expr=", "m.x411 - 3.04984759446376*m.b654 <= 0) m.c628 = Constraint(expr= m.x412 -", "m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620", "= Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0) m.c131 = Constraint(expr=", "m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719) m.c814 =", "m.c171 = Constraint(expr= m.x258 == 0) m.c172 = Constraint(expr= m.x259", "= Constraint(expr= - 0.6*m.x561 + m.x591 == 0) m.c898 =", "0.940066550763924*m.b667 <= 0) m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <=", "m.b753 <= 0) m.c1351 = Constraint(expr= - m.b662 - m.b663", "m.c208 = Constraint(expr= m.x289 == 0) m.c209 = Constraint(expr= m.x320", "- m.x255 - m.x258 == 0) m.c181 = Constraint(expr= m.x37", "0.999* m.b628) <= 0) m.c341 = Constraint(expr= m.x311 == 0)", "m.c1236 = Constraint(expr= m.b752 + m.b754 <= 1) m.c1237 =", "+ m.b641 + m.b644 >= 0) m.c1398 = Constraint(expr= -", "m.b689 + m.b690 <= 1) m.c1110 = Constraint(expr= m.b689 +", "- m.x90 + m.x93 == 0) m.c25 = Constraint(expr= -", "0) m.c301 = Constraint(expr= m.x298 - 15*m.b622 <= 0) m.c302", "= Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943) m.c718 = Constraint(expr=", "= Constraint(expr= m.x75 - m.x96 - m.x99 == 0) m.c28", "m.b610 <= 0) m.c1027 = Constraint(expr= m.b609 - m.b610 <=", "m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692", "Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr=", "m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506) m.c744 = Constraint(expr= m.x531 +", "m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327) m.c664 = Constraint(expr= m.x487 +", "Constraint(expr= m.b617 - m.b635 >= 0) m.c1437 = Constraint(expr= m.b618", "Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1199 = Constraint(expr= m.b734 + m.b735 <= 1) m.c1200", "Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 =", "== 0) m.c569 = Constraint(expr= m.x128 - m.x452 - m.x455", "= Constraint(expr= m.x103 - m.x400 - m.x403 == 0) m.c569", "= Constraint(expr= - m.b623 + m.b641 + m.b644 >= 0)", "== 0) m.c727 = Constraint(expr= m.x532 == 0) m.c728 =", "m.x503 == 0) m.c729 = Constraint(expr= m.x165 - m.x501 -", "0) m.c1429 = Constraint(expr= m.b610 - m.b628 >= 0) m.c1430", "- log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678) <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 =", "+ 300*m.x211 - 5*m.b686 - 4*m.b687 - 6*m.b688 - 8*m.b689", "- m.x593 == 0) m.c909 = Constraint(expr= m.x210 - m.x591", "= Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0) m.c708 = Constraint(expr=", "= Constraint(expr= - m.b596 + m.b597 - m.b687 <= 0)", "m.b754 <= 1) m.c1239 = Constraint(expr= m.b752 + m.b754 <=", "- m.x424 - m.x427 == 0) m.c422 = Constraint(expr= m.x329", "== 0) m.c725 = Constraint(expr= m.x530 == 0) m.c726 =", "8*m.b773 + m.x863 == 0) m.c1011 = Constraint(expr= 3*m.b774 +", "0) m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001", "== 0) m.c900 = Constraint(expr= m.x564 == 0) m.c901 =", "m.b768 + m.b769 <= 1) m.c1271 = Constraint(expr= m.b770 +", "m.c1194 = Constraint(expr= m.b731 + m.b733 <= 1) m.c1195 =", "0.690184503917672*m.b679 <= 0.690184503917672) m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584", "0) m.c1477 = Constraint(expr= m.b664 - m.b676 >= 0) m.c1478", "- m.b676 >= 0) m.c1478 = Constraint(expr= m.b665 - m.b677", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616 =", "<= 0) m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <= 0)", "m.c342 = Constraint(expr= m.x312 == 0) m.c343 = Constraint(expr= m.x313", "m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656)", "1) m.c1270 = Constraint(expr= m.b768 + m.b769 <= 1) m.c1271", "m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230", ">= 0) m.c1407 = Constraint(expr= m.b597 + m.b600 - m.b606", "m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669", "- m.x341 == 0) m.c270 = Constraint(expr= m.x69 - m.x339", "m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0) m.c252 =", "m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227", "= Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001", "m.c350 = Constraint(expr= m.x77 - m.x356 - m.x359 == 0)", "<= 1) m.c1260 = Constraint(expr= m.b764 + m.b766 <= 1)", "+ 0.999* m.b601) <= 0) m.c83 = Constraint(expr= m.x221 ==", "+ 0.999*m.b633)))*(0.001 + 0.999*m.b633) <= 0) m.c409 = Constraint(expr=(m.x424/(0.001 +", "= Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0) m.c743 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1) m.c1233 = Constraint(expr= m.b749 + m.b751 <= 1)", ">= 0) m.c1412 = Constraint(expr= m.b602 - m.b611 >= 0)", "25*m.x128 + 50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132 +", "0.480234946352917) m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917) m.c842", "m.b748 <= 1) m.c1229 = Constraint(expr= m.b749 + m.b750 <=", "<= 0) m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 +", "m.x526 - 0.994083415506506*m.b667 <= 0) m.c743 = Constraint(expr= m.x530 +", "+ 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999*", "- log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657) <= 0)", "= Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0) m.c804 = Constraint(expr=", "== 0) m.c647 = Constraint(expr= m.x140 - m.x470 - m.x473", "+ m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c467", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 =", "+ 1.11894339953103*m.b651 <= 1.11894339953103) m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652", "30*m.b608 <= 30) m.c195 = Constraint(expr= m.x258 + 30*m.b609 <=", "0.999* m.b612) <= 0) m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) -", "<= 0) m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943)", "- m.x534 == 0) m.c853 = Constraint(expr= m.x178 - m.x529", "- m.b699 <= 0) m.c1297 = Constraint(expr= - m.b608 -", "= Constraint(expr= m.b609 - m.b621 >= 0) m.c1423 = Constraint(expr=", "m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034) m.c582 = Constraint(expr= m.x456 +", "m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733", "- m.b623 + m.b624 - m.b714 <= 0) m.c1312 =", "m.b650 >= 0) m.c1452 = Constraint(expr= m.b627 - m.b651 >=", "+ m.b732 <= 1) m.c1196 = Constraint(expr= m.b732 + m.b733", "+ 0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618)", "0) m.c533 = Constraint(expr= m.x395 == 0) m.c534 = Constraint(expr=", "m.x396 == 0) m.c535 = Constraint(expr= m.x397 == 0) m.c536", "m.x399 - m.x402 == 0) m.c568 = Constraint(expr= m.x103 -", "m.x521 == 0) m.c699 = Constraint(expr= m.x522 == 0) m.c700", "m.b638 - m.b640 <= 0) m.c1057 = Constraint(expr= m.b639 -", "80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198 +", "m.c959 = Constraint(expr= 3*m.b722 + m.x812 == 0) m.c960 =", ">= 0) m.c1420 = Constraint(expr= m.b607 - m.b619 >= 0)", "m.b673 >= 0) m.c1475 = Constraint(expr= m.b662 - m.b674 >=", "0) m.c747 = Constraint(expr= - m.x507 + m.x537 == 0)", "m.x77 - m.x101 - m.x104 - m.x107 == 0) m.c30", "<= 3.34221486003388) m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0)", "- m.x306 == 0) m.c322 = Constraint(expr= m.x58 - m.x304", "= Constraint(expr= m.x174 - m.x519 - m.x522 == 0) m.c706", "0.999*m.b607)))*(0.001 + 0.999* m.b607) <= 0) m.c137 = Constraint(expr= m.x245", "2.54515263975353*m.b617 <= 0) m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <=", "13.5*m.b621 <= 0) m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <=", "= Constraint(expr= m.x65 - m.x329 - m.x335 == 0) m.c417", "= Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0) m.c457 = Constraint(expr=", "= Constraint(expr= m.x178 - m.x526 - m.x532 == 0) m.c734", "m.b680 - m.b681 + m.b682 - m.b772 <= 0) m.c1370", "Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x394 - 9*m.b646 <= 0) m.c548 = Constraint(expr= m.x395", "- 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999* m.b612) <=", "= Constraint(expr= m.b617 - m.b638 >= 0) m.c1440 = Constraint(expr=", "= Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553) m.c455 = Constraint(expr=", "Constraint(expr= m.b608 - m.b698 <= 0) m.c1296 = Constraint(expr= -", "m.c516 = Constraint(expr= m.x123 - m.x441 - m.x444 == 0)", "m.x259 == 0) m.c173 = Constraint(expr= m.x281 == 0) m.c174", "= Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0) m.c573 = Constraint(expr=", "+ 5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36", "m.x389 == 0) m.c507 = Constraint(expr= m.x390 == 0) m.c508", "- m.b626 + m.b627 - m.b717 <= 0) m.c1315 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0)", "Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 +", "15) m.c916 = Constraint(expr= m.x565 + 15*m.b685 <= 15) m.c917", "0) m.c1071 = Constraint(expr= m.b653 - m.b655 <= 0) m.c1072", "<= 0) m.c341 = Constraint(expr= m.x311 == 0) m.c342 =", "0) m.c725 = Constraint(expr= m.x530 == 0) m.c726 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x260 - 3.34221486003388*m.b602 <= 0) m.c129 = Constraint(expr= m.x261 -", "m.b703 <= 1) m.c1137 = Constraint(expr= m.b701 + m.b703 <=", "m.x383 == 0) m.c471 = Constraint(expr= m.x384 == 0) m.c472", "<= 0) m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0)", "0) m.c961 = Constraint(expr= 9*m.b724 + m.x814 == 0) m.c962", "= Constraint(expr= m.b653 - m.b743 <= 0) m.c1341 = Constraint(expr=", "8*m.b757 + m.x847 == 0) m.c995 = Constraint(expr= 10*m.b758 +", "Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0)", "m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657) <= 0) m.c640 = Constraint(expr=(m.x484/(0.001", "<= 0) m.c1329 = Constraint(expr= - m.b641 + m.b642 -", "m.b626 - m.b627 + m.b628 - m.b718 <= 0) m.c1316", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 =", "m.b628 - m.b652 >= 0) m.c1454 = Constraint(expr= m.b626 -", "+ 3.04984759446376*m.b628 <= 3.04984759446376) m.c365 = Constraint(expr= - 0.9*m.x317 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x251 == 0) m.c168 = Constraint(expr= m.x252 == 0)", "Constraint(expr= m.b750 + m.b751 <= 1) m.c1235 = Constraint(expr= m.b752", "m.b642 - m.b732 <= 0) m.c1330 = Constraint(expr= - m.b641", "m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516", "m.c1100 = Constraint(expr= m.b683 - m.b684 <= 0) m.c1101 =", "2*m.b748 + m.x838 == 0) m.c986 = Constraint(expr= 9*m.b749 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999* m.b611) <= 0) m.c204 =", "m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001 +", "== 0) m.c848 = Constraint(expr= m.x581 == 0) m.c849 =", "<= 0) m.c1058 = Constraint(expr= m.b641 - m.b642 <= 0)", "m.b752 <= 0) m.c1350 = Constraint(expr= - m.b662 + m.b663", "0) m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0) m.c574", "<= 1) m.c1178 = Constraint(expr= m.b723 + m.b724 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)", "9*m.b717 - 3*m.b718 - 7*m.b719 - 2*m.b720 - 9*m.b721 -", "m.c480 = Constraint(expr= m.x93 - m.x381 - m.x384 == 0)", "Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x259 + 30*m.b610 <= 30) m.c197 = Constraint(expr= m.x278", "m.b600 == 1) m.c1375 = Constraint(expr= m.b598 + m.b601 ==", "Constraint(expr= m.x171 - m.x513 - m.x516 == 0) m.c766 =", "<= 1) m.c1232 = Constraint(expr= m.b750 + m.b751 <= 1)", "+ 1.83548069293539*m.b629 <= 1.83548069293539) m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630", "- 3*m.b716 - 9*m.b717 - 3*m.b718 - 7*m.b719 - 2*m.b720", "m.c9 = Constraint(expr= m.x18 - m.x21 - m.x24 == 0)", "0) m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585 == 0)", "9*m.b643 <= 9) m.c524 = Constraint(expr= m.x440 - 9*m.b641 <=", "m.x555 - 15*m.b681 <= 0) m.c886 = Constraint(expr= m.x556 -", "Constraint(expr= m.x283 + 15*m.b610 <= 15) m.c203 = Constraint(expr=(m.x314/(0.001 +", "+ 2.54515263975353*m.b605 <= 2.54515263975353) m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606", "= Constraint(expr= m.x426 == 0) m.c415 = Constraint(expr= m.x427 ==", "= Constraint(expr= m.b665 - m.b666 <= 0) m.c1083 = Constraint(expr=", "0.999* m.b648) <= 0) m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) -", "m.b598 + m.b601 - m.b610 >= 0) m.c1412 = Constraint(expr=", "m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421", "- m.b613 + m.b631 >= 0) m.c1382 = Constraint(expr= -", "m.x42 - m.x267 - m.x273 == 0) m.c148 = Constraint(expr=", "m.x377 + 20*m.b629 <= 20) m.c399 = Constraint(expr= m.x378 +", "== 0) m.c989 = Constraint(expr= 5*m.b752 + m.x842 == 0)", "+ m.b721 <= 1) m.c1173 = Constraint(expr= m.b719 + m.b721", "+ m.b715 <= 1) m.c1161 = Constraint(expr= m.b713 + m.b715", "m.x409 == 0) m.c590 = Constraint(expr= m.x461 == 0) m.c591", "- m.x508 - m.x511 == 0) m.c764 = Constraint(expr= m.x170", "+ m.b736 <= 1) m.c1205 = Constraint(expr= m.b737 + m.b738", "m.x252 == 0) m.c178 = Constraint(expr= m.x34 - m.x250 -", "2.54515263975353*m.b619 <= 0) m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <=", "<= 1) m.c1236 = Constraint(expr= m.b752 + m.b754 <= 1)", "0) m.c1319 = Constraint(expr= m.b632 - m.b722 <= 0) m.c1320", "m.b618 + m.b619 - m.b709 <= 0) m.c1307 = Constraint(expr=", "- m.b772 <= 0) m.c1370 = Constraint(expr= m.b683 - m.b773", "m.c396 = Constraint(expr= m.x375 - 20*m.b630 <= 0) m.c397 =", "0) m.c877 = Constraint(expr= m.x589 == 0) m.c878 = Constraint(expr=", "m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0) m.c151 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 =", "Constraint(expr= m.b597 + m.b600 - m.b609 >= 0) m.c1411 =", "== 0) m.c615 = Constraint(expr= m.x414 == 0) m.c616 =", "m.c1134 = Constraint(expr= m.b701 + m.b703 <= 1) m.c1135 =", "m.x790 == 0) m.c938 = Constraint(expr= 7*m.b701 + m.x791 ==", "m.c1303 = Constraint(expr= - m.b614 - m.b615 + m.b616 -", "Constraint(expr= m.x511 == 0) m.c755 = Constraint(expr= m.x515 == 0)", "0) m.c973 = Constraint(expr= 3*m.b736 + m.x826 == 0) m.c974", "- m.b659 >= 0) m.c1470 = Constraint(expr= m.b654 - m.b660", "m.b606 - m.b618 >= 0) m.c1420 = Constraint(expr= m.b607 -", "Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539) m.c228 = Constraint(expr= m.x321", "1) m.c1127 = Constraint(expr= m.b698 + m.b699 <= 1) m.c1128", "<= 0) m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0)", "m.c723 = Constraint(expr= m.x504 == 0) m.c724 = Constraint(expr= m.x505", "m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353) m.c159 = Constraint(expr= m.x273 +", "+ m.x447 == 0) m.c532 = Constraint(expr= - m.x394 +", "Constraint(expr= m.x390 + 9*m.b642 <= 9) m.c523 = Constraint(expr= m.x391", "= Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0) m.c429 = Constraint(expr=", "== 0) m.c899 = Constraint(expr= m.x563 == 0) m.c900 =", "m.b629 - m.b719 <= 0) m.c1317 = Constraint(expr= - m.b629", "<= 0) m.c302 = Constraint(expr= m.x299 + 15*m.b620 <= 15)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x4 - m.x7 - m.x10 == 0) m.c5 =", "m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 =", "m.x440 - 9*m.b641 <= 0) m.c525 = Constraint(expr= m.x441 -", "7*m.b738 + m.x828 == 0) m.c976 = Constraint(expr= 6*m.b739 +", "Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0) m.c75 = Constraint(expr= m.x225", "m.b723 <= 0) m.c1321 = Constraint(expr= - m.b632 - m.b633", "7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723 -", "== 0) m.c17 = Constraint(expr= m.x44 - m.x53 - m.x56", "m.b775 <= 1) m.c1279 = Constraint(expr= m.b773 + m.b774 <=", "0) m.c86 = Constraint(expr= m.x233 == 0) m.c87 = Constraint(expr=", "0) m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001", "<= 1) m.c1194 = Constraint(expr= m.b731 + m.b733 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 =", "0) m.c641 = Constraint(expr= m.x473 == 0) m.c642 = Constraint(expr=", "== 0) m.c792 = Constraint(expr= m.x546 == 0) m.c793 =", "Constraint(expr= 3*m.b774 + m.x864 == 0) m.c1012 = Constraint(expr= 4*m.b775", "= Constraint(expr= m.b623 - m.b624 <= 0) m.c1041 = Constraint(expr=", "m.x508 + m.x538 == 0) m.c749 = Constraint(expr= - 0.5*m.x512", "m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509", "+ 40*m.b598 <= 40) m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596", "9) m.c551 = Constraint(expr= m.x446 - 9*m.b644 <= 0) m.c552", "0) m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ m.x280 == 0) m.c167 = Constraint(expr= m.x251 == 0)", "0.999*m.b619) <= 0) m.c260 = Constraint(expr= m.x275 == 0) m.c261", "m.b770 + m.b771 <= 1) m.c1272 = Constraint(expr= m.b770 +", "<= 0.994083415506506) m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 =", "Constraint(expr= m.b705 + m.b706 <= 1) m.c1143 = Constraint(expr= m.b704", "- m.x564 == 0) m.c907 = Constraint(expr= m.x193 - m.x562", "Constraint(expr= m.x177 - m.x525 - m.x531 == 0) m.c733 =", "Constraint(expr= - m.b624 + m.b642 + m.b645 >= 0) m.c1399", "Constraint(expr= m.x531 == 0) m.c727 = Constraint(expr= m.x532 == 0)", "m.c875 = Constraint(expr= m.x587 == 0) m.c876 = Constraint(expr= m.x588", "m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818", "+ m.b697 <= 1) m.c1123 = Constraint(expr= m.b695 + m.b696", "= Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 =", "<= 1) m.c1165 = Constraint(expr= m.b716 + m.b717 <= 1)", "const NL DLL # 3373 3193 180 0 # #", "m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793", "= Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376) m.c365 = Constraint(expr=", "0) m.c1060 = Constraint(expr= m.b642 - m.b643 <= 0) m.c1061", "0) m.c146 = Constraint(expr= m.x41 - m.x266 - m.x272 ==", "15) m.c331 = Constraint(expr= m.x307 + 15*m.b625 <= 15) m.c332", "= Constraint(expr= m.b603 - m.b604 <= 0) m.c1022 = Constraint(expr=", "- m.b617 - m.b618 + m.b619 - m.b709 <= 0)", "m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0) m.c681 =", "m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389", "13.5*m.b682 <= 0) m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <=", "m.x435 - m.x438 == 0) m.c484 = Constraint(expr= m.x121 -", "Convert at 01/15/21 11:37:33 # # Equation counts # Total", "= Constraint(expr= m.b626 - m.b627 <= 0) m.c1044 = Constraint(expr=", "m.x350 == 0) m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351", "= Constraint(expr= m.x441 - 9*m.b642 <= 0) m.c526 = Constraint(expr=", "= Constraint(expr= m.b648 - m.b649 <= 0) m.c1067 = Constraint(expr=", "- m.x185 == 0) m.c48 = Constraint(expr= m.x174 - m.x183", "Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1360 = Constraint(expr= - m.b671 - m.b672 + m.b673", "0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618) <=", "- 5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111", "0.999* m.b638) <= 0) m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) -", "= Constraint(expr= m.b626 - m.b653 >= 0) m.c1455 = Constraint(expr=", "m.x9 - m.x219 - m.x222 == 0) m.c91 = Constraint(expr=", "1) m.c1258 = Constraint(expr= m.b762 + m.b763 <= 1) m.c1259", "- m.b653 + m.b654 - m.b744 <= 0) m.c1342 =", "m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681", "== 0) m.c376 = Constraint(expr= m.x379 == 0) m.c377 =", "0) m.c476 = Constraint(expr= m.x83 - m.x368 - m.x371 ==", "0) m.c564 = Constraint(expr= m.x456 == 0) m.c565 = Constraint(expr=", "= Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0) m.c655 = Constraint(expr=", "0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999* m.b601)", "Constraint(expr= m.b756 + m.b757 <= 1) m.c1247 = Constraint(expr= m.b758", "= Constraint(expr= m.b689 + m.b691 <= 1) m.c1114 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745", "Constraint(expr= m.x27 - m.x237 - m.x240 == 0) m.c118 =", "m.c648 = Constraint(expr= m.x141 - m.x471 - m.x474 == 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 =", "Constraint(expr= m.x59 - m.x308 - m.x311 == 0) m.c348 =", "0) m.c373 = Constraint(expr= m.x325 == 0) m.c374 = Constraint(expr=", "<= 0) m.c1092 = Constraint(expr= m.b674 - m.b676 <= 0)", "== 0) m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607", "<= 1) m.c1162 = Constraint(expr= m.b714 + m.b715 <= 1)", "<= 20) m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 +", "m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0) m.c859 =", "- m.b719 <= 0) m.c1317 = Constraint(expr= - m.b629 +", "+ m.b736 <= 1) m.c1201 = Constraint(expr= m.b734 + m.b735", "+ 0.999* m.b640) <= 0) m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638)", "# 3373 3193 180 0 # # Reformulation has removed", "0) m.c554 = Constraint(expr= m.x449 + 9*m.b644 <= 9) m.c555", "0) m.c1332 = Constraint(expr= - m.b644 + m.b645 - m.b735", "0.842233385663186) m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186) m.c433", "Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376) m.c604 = Constraint(expr= m.x409", "m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299", "m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999* m.b655) <= 0) m.c614 =", "Constraint(expr= m.b743 + m.x833 == 0) m.c981 = Constraint(expr= 4*m.b744", ">= 0) m.c1384 = Constraint(expr= - m.b616 + m.b634 >=", "+ 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999*", "= Constraint(expr= m.x160 - m.x163 - m.x166 - m.x169 ==", "m.c517 = Constraint(expr= m.x124 - m.x442 - m.x445 == 0)", "0.999* m.b640) <= 0) m.c467 = Constraint(expr= m.x371 == 0)", "m.b656 + m.b657 - m.b747 <= 0) m.c1345 = Constraint(expr=", "m.c1429 = Constraint(expr= m.b610 - m.b628 >= 0) m.c1430 =", "- m.x149 + m.x152 == 0) m.c39 = Constraint(expr= -", "= Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0) m.c423 = Constraint(expr=", "m.c1344 = Constraint(expr= - m.b656 + m.b657 - m.b747 <=", "m.x354 == 0) m.c325 = Constraint(expr= m.x76 - m.x352 -", "= Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0) m.c281 = Constraint(expr=", "m.c699 = Constraint(expr= m.x522 == 0) m.c700 = Constraint(expr= m.x523", "Constraint(expr= m.x164 - m.x500 - m.x503 == 0) m.c729 =", "+ m.b609 - m.b699 <= 0) m.c1297 = Constraint(expr= -", "Constraint(expr= m.x135 - m.x465 - m.x468 == 0) m.c625 =", "m.c855 = Constraint(expr= m.x204 - m.x579 - m.x582 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= - m.x2", "m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171) m.c686 =", "m.c525 = Constraint(expr= m.x441 - 9*m.b642 <= 0) m.c526 =", "== 0) m.c112 = Constraint(expr= m.x241 == 0) m.c113 =", "m.c533 = Constraint(expr= m.x395 == 0) m.c534 = Constraint(expr= m.x396", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 =", "Constraint(expr= m.x432 == 0) m.c442 = Constraint(expr= m.x433 == 0)", "= Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0) m.c627 = Constraint(expr=", "m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435", "m.c349 = Constraint(expr= m.x61 - m.x310 - m.x313 == 0)", "+ 0.705049913072943*m.b674 <= 0.705049913072943) m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675", "m.x34 == 0) m.c14 = Constraint(expr= m.x38 - m.x47 -", "m.x238 - 4.45628648004517*m.b604 <= 0) m.c125 = Constraint(expr= m.x239 +", "Constraint(expr= m.x48 - m.x285 - m.x288 == 0) m.c214 =", "1486 571 111 804 0 0 0 0 # #", "0) m.c516 = Constraint(expr= m.x123 - m.x441 - m.x444 ==", "= Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943) m.c834 = Constraint(expr=", "m.b747 + m.b748 <= 1) m.c1229 = Constraint(expr= m.b749 +", "m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517) m.c106 = Constraint(expr= m.x235 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 =", "Constraint(expr= m.b710 + m.b711 <= 1) m.c1152 = Constraint(expr= m.b710", "m.b730 <= 1) m.c1192 = Constraint(expr= m.b729 + m.b730 <=", "m.b613 + m.b631 >= 0) m.c1382 = Constraint(expr= - m.b614", "+ m.b655 >= 0) m.c1403 = Constraint(expr= m.b596 + m.b599", "m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999* m.b653) <= 0) m.c612 =", "m.b742 <= 1) m.c1217 = Constraint(expr= m.b743 + m.b744 <=", "m.b746 <= 0) m.c1344 = Constraint(expr= - m.b656 + m.b657", "+ 3.04984759446376*m.b651 <= 3.04984759446376) m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652", "1) m.c1229 = Constraint(expr= m.b749 + m.b750 <= 1) m.c1230", "Constraint(expr= m.b692 + m.b694 <= 1) m.c1117 = Constraint(expr= m.b692", "- 13.5*m.b622 <= 0) m.c308 = Constraint(expr= m.x347 + 13.5*m.b620", "m.b613 - m.b631 >= 0) m.c1433 = Constraint(expr= m.b614 -", "0) m.c414 = Constraint(expr= m.x426 == 0) m.c415 = Constraint(expr=", "= Constraint(expr= m.x145 - m.x478 - m.x481 == 0) m.c677", "= Constraint(expr= m.b738 + m.b739 <= 1) m.c1211 = Constraint(expr=", "- m.x352 - m.x355 == 0) m.c326 = Constraint(expr= m.x302", "m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <= 0) m.c494 =", "== 0) m.c929 = Constraint(expr= 6*m.b692 + m.x782 == 0)", "m.x266 - 2.54515263975353*m.b605 <= 0) m.c156 = Constraint(expr= m.x267 -", "+ 0.999*m.b617) <= 0) m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) -", "m.x109 - m.x412 - m.x415 == 0) m.c623 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0)", "0) m.c1292 = Constraint(expr= m.b605 - m.b695 <= 0) m.c1293", "m.x433 == 0) m.c443 = Constraint(expr= m.x80 - m.x362 -", "+ m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657) <= 0) m.c640 =", "15*m.b626 <= 0) m.c354 = Constraint(expr= m.x309 - 15*m.b627 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.b702 + m.b703 <= 1) m.c1139 = Constraint(expr=", "m.b603 + m.b604 - m.b694 <= 0) m.c1292 = Constraint(expr=", "+ m.b669 - m.b759 <= 0) m.c1357 = Constraint(expr= -", "m.c1453 = Constraint(expr= m.b628 - m.b652 >= 0) m.c1454 =", "0) m.c1031 = Constraint(expr= m.b614 - m.b615 <= 0) m.c1032", "m.c1464 = Constraint(expr= - m.b666 + m.b678 >= 0) m.c1465", "1) m.c1145 = Constraint(expr= m.b707 + m.b708 <= 1) m.c1146", "+ 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999*", "m.b649 + m.b652 + m.b655 >= 0) m.c1403 = Constraint(expr=", "Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0) m.c450 = Constraint(expr= m.x363", "m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270", "m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514", "m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= - m.x2 - m.x3", "5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112 + 15*m.x113 +", "<= 0) m.c1371 = Constraint(expr= - m.b683 + m.b684 -", "Constraint(expr= m.b626 - m.b647 >= 0) m.c1449 = Constraint(expr= m.b627", "= Constraint(expr= - 0.5*m.x512 + m.x536 == 0) m.c750 =", "= Constraint(expr= m.b717 + m.b718 <= 1) m.c1169 = Constraint(expr=", "+ m.b730 <= 1) m.c1191 = Constraint(expr= m.b728 + m.b730", "m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194", "m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737", "m.c514 = Constraint(expr= m.x97 - m.x388 - m.x391 == 0)", "m.b695 + m.b696 <= 1) m.c1122 = Constraint(expr= m.b695 +", "m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468", "<= 0) m.c527 = Constraint(expr= m.x443 + 9*m.b641 <= 9)", "1.18887736200171) m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0) m.c660", "= Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0) m.c866 = Constraint(expr=", "m.x199 - m.x568 - m.x571 == 0) m.c803 = Constraint(expr=", "m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999* m.b650) <= 0) m.c585 =", "<= 0) m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0)", "= Constraint(expr= m.x432 == 0) m.c442 = Constraint(expr= m.x433 ==", "m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 =", "m.c1257 = Constraint(expr= m.b761 + m.b763 <= 1) m.c1258 =", "m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475", "<= 0) m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0)", "m.c949 = Constraint(expr= 2*m.b712 + m.x802 == 0) m.c950 =", "m.b768 <= 0) m.c1366 = Constraint(expr= - m.b677 - m.b678", "m.x544 - m.x547 == 0) m.c800 = Constraint(expr= m.x197 -", "- 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999* m.b627) <=", "= Constraint(expr= m.b596 - m.b686 <= 0) m.c1284 = Constraint(expr=", "m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999* m.b666) <= 0) m.c721 =", "Constraint(expr= - m.b629 + m.b630 - m.b720 <= 0) m.c1318", "= Constraint(expr= m.b722 + m.b723 <= 1) m.c1178 = Constraint(expr=", "m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0) m.c629 =", "m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318", "m.c194 = Constraint(expr= m.x257 + 30*m.b608 <= 30) m.c195 =", "Constraint(expr= m.b734 + m.b735 <= 1) m.c1202 = Constraint(expr= m.b735", "= Constraint(expr= m.b755 + m.b757 <= 1) m.c1246 = Constraint(expr=", "- m.b732 <= 0) m.c1330 = Constraint(expr= - m.b641 -", "0) m.c1444 = Constraint(expr= m.b625 - m.b643 >= 0) m.c1445", "m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827", "m.c764 = Constraint(expr= m.x170 - m.x512 - m.x515 == 0)", "m.c43 = Constraint(expr= m.x154 - m.x157 - m.x160 == 0)", "m.c538 = Constraint(expr= m.x451 == 0) m.c539 = Constraint(expr= m.x98", "m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553) m.c491 =", "m.c1042 = Constraint(expr= m.b624 - m.b625 <= 0) m.c1043 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 =", "== 0) m.c12 = Constraint(expr= m.x24 - m.x27 - m.x30", "35*m.x127 + 25*m.x128 + 50*m.x129 + 10*m.x130 + 15*m.x131 +", "m.c669 = Constraint(expr= m.x480 == 0) m.c670 = Constraint(expr= m.x481", "0) m.c1358 = Constraint(expr= m.b671 - m.b761 <= 0) m.c1359", "- m.b644 - m.b645 + m.b646 - m.b736 <= 0)", "Constraint(expr= - m.b626 + m.b647 + m.b650 + m.b653 >=", "= Constraint(expr= - m.b611 - m.b612 + m.b613 - m.b703", "m.c31 = Constraint(expr= m.x79 - m.x103 - m.x106 - m.x109", "m.x233 == 0) m.c87 = Constraint(expr= m.x234 == 0) m.c88", "Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103) m.c609 = Constraint(expr= m.x462", "0) m.c1289 = Constraint(expr= m.b602 - m.b692 <= 0) m.c1290", "0) m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539) m.c228", "+ 0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634)", "m.c763 = Constraint(expr= m.x169 - m.x508 - m.x511 == 0)", "m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0) m.c248 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 =", "m.c1101 = Constraint(expr= m.b683 - m.b685 <= 0) m.c1102 =", "m.x79 - m.x358 - m.x361 == 0) m.c353 = Constraint(expr=", "m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356", "<= 1) m.c1257 = Constraint(expr= m.b761 + m.b763 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)", "8*m.b741 + m.x831 == 0) m.c979 = Constraint(expr= 4*m.b742 +", "m.c1005 = Constraint(expr= 8*m.b768 + m.x858 == 0) m.c1006 =", "m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0) m.c607 =", "== 0) m.c65 = Constraint(expr= m.x11 - m.x224 - m.x227", "m.b615 + m.b633 >= 0) m.c1384 = Constraint(expr= - m.b616", "0.994083415506506*m.b665 <= 0.994083415506506) m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <=", "= Constraint(expr= m.x320 == 0) m.c210 = Constraint(expr= m.x321 ==", "= Constraint(expr= m.x383 == 0) m.c471 = Constraint(expr= m.x384 ==", "0) m.c852 = Constraint(expr= m.x177 - m.x528 - m.x534 ==", "m.b647 - m.b649 <= 0) m.c1066 = Constraint(expr= m.b648 -", "- m.x428 - m.x431 == 0) m.c447 = Constraint(expr= m.x117", "Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654 =", "= Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0) m.c634 = Constraint(expr=", "m.x52 == 0) m.c17 = Constraint(expr= m.x44 - m.x53 -", "0) m.c1399 = Constraint(expr= - m.b625 + m.b643 + m.b646", "Constraint(expr= m.x355 + 9*m.b625 <= 9) m.c338 = Constraint(expr=(m.x356/(0.001 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670 =", "Constraint(expr= m.x223 + 40*m.b601 <= 40) m.c101 = Constraint(expr= m.x230", "m.x335 == 0) m.c417 = Constraint(expr= m.x66 - m.x330 -", "= Constraint(expr= m.b608 - m.b620 >= 0) m.c1422 = Constraint(expr=", "= Constraint(expr= m.x136 - m.x139 == 0) m.c35 = Constraint(expr=", "- m.x263 == 0) m.c120 = Constraint(expr= m.x39 - m.x261", "= Constraint(expr= m.x73 - m.x346 - m.x349 == 0) m.c299", "0) m.c925 = Constraint(expr= 6*m.b688 + m.x778 == 0) m.c926", "Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*", "m.c1473 = Constraint(expr= m.b663 - m.b672 >= 0) m.c1474 =", "m.b761 + m.b763 <= 1) m.c1258 = Constraint(expr= m.b762 +", "m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35 -", "+ 40*m.x157 - m.x170 - m.x171 - m.x172 + 80*m.x194", "Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.b640 >= 0) m.c1397 = Constraint(expr= - m.b623 +", "Constraint(expr= - m.b628 + m.b649 + m.b652 + m.b655 >=", "1.83548069293539*m.b629 <= 0) m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <=", "- m.x34 == 0) m.c14 = Constraint(expr= m.x38 - m.x47", "Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x456 == 0) m.c565 = Constraint(expr= m.x457 == 0) m.c566", "Constraint(expr= - m.b668 - m.b669 + m.b670 - m.b760 <=", "m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78", "190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205 + 290*m.x206 +", "0) m.c679 = Constraint(expr= m.x151 - m.x490 - m.x493 ==", "m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188", "<= 0) m.c69 = Constraint(expr= m.x213 - 40*m.b597 <= 0)", "Constraint(expr= - m.x392 + m.x446 == 0) m.c531 = Constraint(expr=", "0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999* m.b647)", "<= 1) m.c1259 = Constraint(expr= m.b764 + m.b765 <= 1)", "m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807", "0) m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001", "0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638)", "m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388) m.c251 = Constraint(expr= m.x326 -", "Constraint(expr= m.x18 - m.x21 - m.x24 == 0) m.c10 =", "= Constraint(expr= m.b755 + m.b757 <= 1) m.c1243 = Constraint(expr=", "Constraint(expr= m.b647 - m.b737 <= 0) m.c1335 = Constraint(expr= -", "+ 0.999* m.b675) <= 0) m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676)", ">= 0) m.c1460 = Constraint(expr= - m.b662 + m.b671 +", "- 0.9*m.x296 + m.x344 == 0) m.c285 = Constraint(expr= -", "0.999*m.b661)))*(0.001 + 0.999* m.b661) <= 0) m.c668 = Constraint(expr= m.x479", "Constraint(expr= m.b741 + m.b742 <= 1) m.c1215 = Constraint(expr= m.b740", "0) m.c726 = Constraint(expr= m.x531 == 0) m.c727 = Constraint(expr=", "m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452", "0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999* m.b614) <= 0)", "m.c933 = Constraint(expr= 9*m.b696 + m.x786 == 0) m.c934 =", "m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436", "m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0) m.c498 =", "= Constraint(expr= m.b612 - m.b613 <= 0) m.c1031 = Constraint(expr=", "Constraint(expr= 6*m.b769 + m.x859 == 0) m.c1007 = Constraint(expr= 2*m.b770", "m.x134 - m.x137 == 0) m.c33 = Constraint(expr= m.x135 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 =", "m.c388 = Constraint(expr= m.x112 - m.x418 - m.x421 == 0)", "m.x65 - m.x329 - m.x335 == 0) m.c417 = Constraint(expr=", "== 0) m.c212 = Constraint(expr= m.x47 - m.x284 - m.x287", "+ m.x787 == 0) m.c935 = Constraint(expr= 6*m.b698 + m.x788", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 =", "m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518", "m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35", "m.x349 == 0) m.c299 = Constraint(expr= m.x296 - 15*m.b620 <=", "Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0) m.c487 = Constraint(expr= m.x370", "- m.x388 + m.x442 == 0) m.c506 = Constraint(expr= m.x389", "0) m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0) m.c656", "0) m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0) m.c246", "= Constraint(expr= m.b638 - m.b640 <= 0) m.c1057 = Constraint(expr=", "+ 0.999* m.b665) <= 0) m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666)", "0.999*m.b656)))*(0.001 + 0.999*m.b656) <= 0) m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657)", "m.c47 = Constraint(expr= m.x173 - m.x182 - m.x185 == 0)", "m.x293 == 0) m.c240 = Constraint(expr= m.x51 - m.x291 -", "m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517) m.c155 =", "- m.x109 == 0) m.c32 = Constraint(expr= m.x134 - m.x137", "m.c1353 = Constraint(expr= - m.b665 + m.b666 - m.b756 <=", "m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99", "6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 =", "Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0) m.c226 = Constraint(expr= m.x316", "= Constraint(expr= m.x311 + 15*m.b626 <= 15) m.c357 = Constraint(expr=", "<= 1) m.c1207 = Constraint(expr= m.b737 + m.b738 <= 1)", "= Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001", "- m.x336 == 0) m.c418 = Constraint(expr= m.x67 - m.x331", "0.6*m.x303 + m.x351 == 0) m.c313 = Constraint(expr= - 0.6*m.x304", "= Constraint(expr= m.x522 == 0) m.c700 = Constraint(expr= m.x523 ==", "- m.x543 - m.x546 == 0) m.c799 = Constraint(expr= m.x184", "= Constraint(expr= m.b624 - m.b645 >= 0) m.c1447 = Constraint(expr=", "= Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376) m.c577 = Constraint(expr=", "m.x193 - m.x562 - m.x565 == 0) m.c908 = Constraint(expr=", "- 6*m.b759 - 3*m.b760 - 4*m.b761 - 8*m.b762 - 7*m.b763", "Constraint(expr= m.x395 + 9*m.b644 <= 9) m.c549 = Constraint(expr= m.x396", "+ 0.999*m.b679) <= 0) m.c845 = Constraint(expr= m.x533 == 0)", "- m.b615 >= 0) m.c1417 = Constraint(expr= m.b604 - m.b616", "0) m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0) m.c425", "- m.x415 == 0) m.c623 = Constraint(expr= m.x134 - m.x464", "m.x564 == 0) m.c907 = Constraint(expr= m.x193 - m.x562 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.34221486003388*m.b604 <= 0) m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <=", "- m.x146 - m.x149 + m.x152 == 0) m.c39 =", "= Constraint(expr= - 0.9*m.x318 + m.x417 == 0) m.c367 =", "m.x781 == 0) m.c929 = Constraint(expr= 6*m.b692 + m.x782 ==", "Constraint(expr= m.x332 == 0) m.c237 = Constraint(expr= m.x333 == 0)", "m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824", "= Constraint(expr= m.x371 == 0) m.c468 = Constraint(expr= m.x372 ==", "Constraint(expr= m.x306 + 15*m.b624 <= 15) m.c331 = Constraint(expr= m.x307", "m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0) m.c831 =", "m.c1130 = Constraint(expr= m.b699 + m.b700 <= 1) m.c1131 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924) m.c690", "m.c384 = Constraint(expr= m.x87 - m.x375 - m.x378 == 0)", "1.18887736200171*m.b654 <= 0) m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <=", "m.c1136 = Constraint(expr= m.b702 + m.b703 <= 1) m.c1137 =", "m.x272 == 0) m.c147 = Constraint(expr= m.x42 - m.x267 -", "- m.x394 + m.x448 == 0) m.c533 = Constraint(expr= m.x395", "m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68", "- m.x473 == 0) m.c648 = Constraint(expr= m.x141 - m.x471", "= Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943) m.c717 = Constraint(expr=", "= Constraint(expr= m.x228 == 0) m.c61 = Constraint(expr= m.x229 ==", "0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999* m.b607)", "Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0) m.c457 = Constraint(expr= m.x430", "m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539) m.c229 =", "m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665)", "405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205 +", "== 0) m.c819 = Constraint(expr= m.x552 == 0) m.c820 =", "m.x183 - m.x186 == 0) m.c49 = Constraint(expr= m.x175 -", "= Constraint(expr= m.x64 - m.x316 - m.x322 == 0) m.c218", "Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x361 == 0) m.c347 = Constraint(expr= m.x59 - m.x308 -", "+ m.b659 >= 0) m.c1458 = Constraint(expr= - m.b654 +", "m.b704 + m.b705 <= 1) m.c1142 = Constraint(expr= m.b705 +", "m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113", "m.b602 - m.b611 >= 0) m.c1413 = Constraint(expr= m.b603 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327) m.c664 =", "<= 30) m.c782 = Constraint(expr= m.x536 - 15*m.b668 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 =", "- m.b609 + m.b621 + m.b624 + m.b627 >= 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 =", "Constraint(expr= m.x395 == 0) m.c534 = Constraint(expr= m.x396 == 0)", "+ m.x779 == 0) m.c927 = Constraint(expr= 7*m.b690 + m.x780", "Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c351 = Constraint(expr= m.x78 - m.x357 - m.x360", "Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1) m.c1130 = Constraint(expr= m.b699 + m.b700 <= 1)", "3.34221486003388*m.b612 <= 0) m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <=", "== 0) m.c321 = Constraint(expr= m.x57 - m.x303 - m.x306", "Constraint(expr= m.b728 + m.b730 <= 1) m.c1192 = Constraint(expr= m.b729", "m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854", "m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626", "= Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b596 + m.b597 - m.b687 <= 0) m.c1285 =", "== 0) m.c171 = Constraint(expr= m.x258 == 0) m.c172 =", "Constraint(expr= m.b728 + m.b730 <= 1) m.c1189 = Constraint(expr= m.b728", "- 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999* m.b675) <=", "m.c1406 = Constraint(expr= m.b596 + m.b599 - m.b605 >= 0)", "== 0) m.c975 = Constraint(expr= 7*m.b738 + m.x828 == 0)", "m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208", "- m.x396 == 0) m.c541 = Constraint(expr= m.x100 - m.x394", "m.b614 + m.b632 >= 0) m.c1383 = Constraint(expr= - m.b615", "m.x448 - m.x451 == 0) m.c545 = Constraint(expr= m.x392 -", "1) m.c1132 = Constraint(expr= m.b699 + m.b700 <= 1) m.c1133", "m.c1331 = Constraint(expr= m.b644 - m.b734 <= 0) m.c1332 =", "+ m.b727 <= 1) m.c1187 = Constraint(expr= m.b728 + m.b729", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 =", "Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x831 == 0) m.c979 = Constraint(expr= 4*m.b742 + m.x832", "Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x82 - m.x364 - m.x367 == 0) m.c446 =", "m.c1466 = Constraint(expr= m.b653 - m.b656 >= 0) m.c1467 =", "m.c1225 = Constraint(expr= m.b746 + m.b747 <= 1) m.c1226 =", "0) m.c264 = Constraint(expr= m.x342 == 0) m.c265 = Constraint(expr=", "Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 +", "0) m.c645 = Constraint(expr= m.x486 == 0) m.c646 = Constraint(expr=", "1) m.c1117 = Constraint(expr= m.b692 + m.b693 <= 1) m.c1118", "0) m.c1395 = Constraint(expr= - m.b621 + m.b639 >= 0)", "0) m.c1379 = Constraint(expr= - m.b611 + m.b629 >= 0)", "1.32154609891348*m.b614 <= 0) m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <=", "0) m.c620 = Constraint(expr= m.x107 - m.x410 - m.x413 ==", "30*m.b670 <= 0) m.c779 = Constraint(expr= m.x515 + 30*m.b668 <=", "<= 1) m.c1123 = Constraint(expr= m.b695 + m.b696 <= 1)", "= Constraint(expr= m.x514 - 30*m.b670 <= 0) m.c779 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695 =", "m.x163 - m.x166 - m.x169 == 0) m.c47 = Constraint(expr=", "= Constraint(expr= m.b654 - m.b657 >= 0) m.c1468 = Constraint(expr=", "+ 0.999*m.b599)))*(0.001 + 0.999* m.b599) <= 0) m.c81 = Constraint(expr=(m.x231/(0.001", "- m.b746 <= 0) m.c1344 = Constraint(expr= - m.b656 +", "<= 1) m.c1270 = Constraint(expr= m.b768 + m.b769 <= 1)", "= Constraint(expr= - m.b662 + m.b671 + m.b674 >= 0)", "== 0) m.c590 = Constraint(expr= m.x461 == 0) m.c591 =", "0) m.c799 = Constraint(expr= m.x184 - m.x544 - m.x547 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x14 + m.x17 == 0) m.c6 = Constraint(expr= -", "m.b728 + m.b730 <= 1) m.c1189 = Constraint(expr= m.b728 +", "Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517) m.c191 = Constraint(expr= m.x254", "m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924) m.c690 = Constraint(expr= m.x492 +", ">= 0) m.c1382 = Constraint(expr= - m.b614 + m.b632 >=", "Constraint(expr= - m.b626 + m.b627 - m.b717 <= 0) m.c1315", "<= 0) m.c1026 = Constraint(expr= m.b608 - m.b610 <= 0)", "<= 0) m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <= 0)", "+ m.x776 == 0) m.c924 = Constraint(expr= 4*m.b687 + m.x777", "m.c1032 = Constraint(expr= m.b614 - m.b616 <= 0) m.c1033 =", "Constraint(expr= m.b755 + m.b757 <= 1) m.c1246 = Constraint(expr= m.b756", "0) m.c3 = Constraint(expr= m.x3 - m.x6 - m.x9 ==", "Constraint(expr= m.b606 - m.b607 <= 0) m.c1025 = Constraint(expr= m.b608", "- m.x148 - m.x151 + m.x154 == 0) m.c41 =", "9*m.b685 <= 0) m.c920 = Constraint(expr= m.x593 + 9*m.b683 <=", "Constraint(expr= m.b745 + m.x835 == 0) m.c983 = Constraint(expr= 2*m.b746", "Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0)", "<= 1) m.c1121 = Constraint(expr= m.b695 + m.b696 <= 1)", "m.b621 >= 0) m.c1423 = Constraint(expr= m.b610 - m.b622 >=", "1.18887736200171*m.b660 <= 1.18887736200171) m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <=", "= Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0) m.c280 = Constraint(expr=", "Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0) m.c452 = Constraint(expr= m.x365", "+ m.x261 == 0) m.c109 = Constraint(expr= - 0.75*m.x238 +", "m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376) m.c630 = Constraint(expr= m.x414 +", "- m.b677 - m.b678 + m.b679 - m.b769 <= 0)", "m.b677 - m.b678 + m.b679 - m.b769 <= 0) m.c1367", "Constraint(expr= m.b767 + m.b768 <= 1) m.c1266 = Constraint(expr= m.b767", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 =", "0) m.c260 = Constraint(expr= m.x275 == 0) m.c261 = Constraint(expr=", "m.x423 - m.x426 == 0) m.c421 = Constraint(expr= m.x115 -", "+ m.x841 == 0) m.c989 = Constraint(expr= 5*m.b752 + m.x842", "1) m.c1121 = Constraint(expr= m.b695 + m.b696 <= 1) m.c1122", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 =", "- m.x342 == 0) m.c271 = Constraint(expr= m.x70 - m.x340", "m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348) m.c255 = Constraint(expr= m.x333 +", "= Constraint(expr= m.x469 == 0) m.c620 = Constraint(expr= m.x107 -", "= Constraint(expr= m.x366 == 0) m.c439 = Constraint(expr= m.x367 ==", "- m.b729 <= 0) m.c1327 = Constraint(expr= - m.b638 -", "- 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999* m.b652) <=", "0) m.c853 = Constraint(expr= m.x178 - m.x529 - m.x535 ==", "m.x594 == 0) m.c904 = Constraint(expr= m.x595 == 0) m.c905", "Constraint(expr= m.x354 + 9*m.b624 <= 9) m.c337 = Constraint(expr= m.x355", "Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425) m.c501 = Constraint(expr= m.x438", "m.c1273 = Constraint(expr= m.b770 + m.b771 <= 1) m.c1274 =", "== 0) m.c374 = Constraint(expr= m.x377 == 0) m.c375 =", "0) m.c590 = Constraint(expr= m.x461 == 0) m.c591 = Constraint(expr=", "= Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001", "m.c341 = Constraint(expr= m.x311 == 0) m.c342 = Constraint(expr= m.x312", "<= 0) m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0)", "<= 1) m.c1277 = Constraint(expr= m.b773 + m.b774 <= 1)", "+ m.x441 == 0) m.c505 = Constraint(expr= - m.x388 +", "= Constraint(expr= - m.b603 + m.b612 + m.b615 >= 0)", "0) m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924) m.c738", "Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b655 <= 0) m.c1072 = Constraint(expr= m.b654 - m.b655", "- m.x298 - m.x301 == 0) m.c296 = Constraint(expr= m.x71", "m.b711 + m.b712 <= 1) m.c1155 = Constraint(expr= m.b710 +", "+ 15*m.b669 <= 15) m.c787 = Constraint(expr= m.x541 + 15*m.b670", "5*m.b711 + m.x801 == 0) m.c949 = Constraint(expr= 2*m.b712 +", "m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688", "Constraint(expr= m.b768 + m.b769 <= 1) m.c1271 = Constraint(expr= m.b770", "= Constraint(expr= m.x391 + 9*m.b643 <= 9) m.c524 = Constraint(expr=", "= Constraint(expr= m.x504 == 0) m.c724 = Constraint(expr= m.x505 ==", "<= 0) m.c1303 = Constraint(expr= - m.b614 - m.b615 +", "+ m.b626 >= 0) m.c1392 = Constraint(expr= - m.b609 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 =", "+ 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999*", "- m.x364 - m.x367 == 0) m.c446 = Constraint(expr= m.x116", "0) m.c1316 = Constraint(expr= m.b629 - m.b719 <= 0) m.c1317", "m.x485 == 0) m.c651 = Constraint(expr= m.x147 - m.x483 -", "9) m.c550 = Constraint(expr= m.x397 + 9*m.b646 <= 9) m.c551", ">= 0) m.c1463 = Constraint(expr= - m.b665 + m.b677 >=", "30) m.c195 = Constraint(expr= m.x258 + 30*m.b609 <= 30) m.c196", "Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592 ==", "+ 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999*", "m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595", "Constraint(expr= m.b597 - m.b598 <= 0) m.c1016 = Constraint(expr= m.b599", "m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353) m.c278 = Constraint(expr= m.x338 -", "9*m.b641 <= 9) m.c522 = Constraint(expr= m.x390 + 9*m.b642 <=", "0) m.c1408 = Constraint(expr= m.b598 + m.b601 - m.b607 >=", "Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 +", "<= 0) m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <= 0)", "= Constraint(expr= m.x58 - m.x304 - m.x307 == 0) m.c323", "Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x45 - m.x279 - m.x282 == 0) m.c184 = Constraint(expr=", "m.b670 - m.b760 <= 0) m.c1358 = Constraint(expr= m.b671 -", "m.x474 == 0) m.c649 = Constraint(expr= m.x142 - m.x472 -", "+ m.b742 <= 1) m.c1215 = Constraint(expr= m.b740 + m.b742", "<= 0) m.c1042 = Constraint(expr= m.b624 - m.b625 <= 0)", "- m.b698 <= 0) m.c1296 = Constraint(expr= - m.b608 +", "+ m.b684 - m.b774 <= 0) m.c1372 = Constraint(expr= -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x17 - m.x20 - m.x23 == 0) m.c9 =", "- m.b656 + m.b657 - m.b747 <= 0) m.c1345 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 =", "Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001 +", "m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277", "4*m.b761 + m.x851 == 0) m.c999 = Constraint(expr= 8*m.b762 +", "0) m.c1480 = Constraint(expr= m.b667 - m.b679 >= 0) m.c1481", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 =", "= Constraint(expr= - m.b628 + m.b649 + m.b652 + m.b655", "0.705049913072943*m.b675 <= 0.705049913072943) m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <=", "m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584 == 0) m.c870", "Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c925 = Constraint(expr= 6*m.b688 + m.x778 == 0)", "1.04900943706034) m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034) m.c583", "0.842233385663186*m.b632 <= 0.842233385663186) m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <=", "m.x323 == 0) m.c381 = Constraint(expr= m.x63 - m.x318 -", "m.x823 == 0) m.c971 = Constraint(expr= 3*m.b734 + m.x824 ==", "m.b686 <= 0) m.c1284 = Constraint(expr= - m.b596 + m.b597", "- m.x225 - m.x228 == 0) m.c67 = Constraint(expr= m.x13", "Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b628 <= 0) m.c1045 = Constraint(expr= m.b627 - m.b628", "m.c1243 = Constraint(expr= m.b755 + m.b756 <= 1) m.c1244 =", "m.c614 = Constraint(expr= m.x413 == 0) m.c615 = Constraint(expr= m.x414", "Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 =", "= Constraint(expr= m.x161 - m.x494 - m.x497 == 0) m.c702", "0) m.c22 = Constraint(expr= m.x70 - m.x82 - m.x85 ==", "Constraint(expr= m.x85 - m.x370 - m.x373 == 0) m.c479 =", "+ 3.34221486003388*m.b612 <= 3.34221486003388) m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613", "Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)", "8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize) m.c2 = Constraint(expr= m.x2", "Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x499 == 0) m.c704 = Constraint(expr= m.x173 - m.x518 -", "m.b651 <= 0) m.c1068 = Constraint(expr= m.b650 - m.b652 <=", "0.999*m.b633) <= 0) m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1", "= Constraint(expr= m.x77 - m.x356 - m.x359 == 0) m.c351", "9*m.b683 <= 9) m.c921 = Constraint(expr= m.x594 + 9*m.b684 <=", "= Constraint(expr= m.x24 - m.x27 - m.x30 - m.x33 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943) m.c834 = Constraint(expr= m.x552", "1) m.c1217 = Constraint(expr= m.b743 + m.b744 <= 1) m.c1218", "m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257", "m.x433 == 0) m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <=", "m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653)", "- m.b646 <= 0) m.c1064 = Constraint(expr= m.b647 - m.b648", "m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348) m.c255 =", "m.x419 == 0) m.c387 = Constraint(expr= m.x111 - m.x417 -", "m.b751 <= 1) m.c1234 = Constraint(expr= m.b750 + m.b751 <=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b706 <= 1) m.c1143 = Constraint(expr= m.b704 + m.b706 <=", "15) m.c303 = Constraint(expr= m.x300 + 15*m.b621 <= 15) m.c304", "Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1472 = Constraint(expr= m.b662 - m.b671 >= 0) m.c1473 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x25 == 0) m.c11 = Constraint(expr= m.x23 - m.x26", "Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b745 <= 1) m.c1222 = Constraint(expr= m.b744 + m.b745 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x519 == 0) m.c694 = Constraint(expr= - 0.75*m.x496 +", "Constraint(expr= m.x167 - m.x506 - m.x509 == 0) m.c762 =", "m.x370 - m.x373 == 0) m.c479 = Constraint(expr= m.x92 -", "m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89", "Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 =", "m.x279 - m.x282 == 0) m.c184 = Constraint(expr= m.x46 -", "m.b674 - m.b675 + m.b676 - m.b766 <= 0) m.c1364", "== 0) m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 +", "<= 0) m.c97 = Constraint(expr= m.x220 - 40*m.b601 <= 0)", "m.x11 - m.x14 + m.x17 == 0) m.c6 = Constraint(expr=", "= Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0) m.c274 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 =", "== 0) m.c762 = Constraint(expr= m.x168 - m.x507 - m.x510", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792 =", "Constraint(expr= m.b690 + m.b691 <= 1) m.c1115 = Constraint(expr= m.b692", "= Constraint(expr= - m.b614 + m.b615 - m.b705 <= 0)", "1) m.c1208 = Constraint(expr= m.b738 + m.b739 <= 1) m.c1209", "m.b623 - m.b624 <= 0) m.c1041 = Constraint(expr= m.b623 -", "m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553) m.c284 =", "== 0) m.c945 = Constraint(expr= 6*m.b708 + m.x798 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786 =", "m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539) m.c393 =", "9) m.c922 = Constraint(expr= m.x595 + 9*m.b685 <= 9) m.c923", "0 0 0 0 0 0 0 # # Nonzero", "m.c1386 = Constraint(expr= - m.b606 + m.b618 >= 0) m.c1387", "m.x183 - m.x543 - m.x546 == 0) m.c799 = Constraint(expr=", "= Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0) m.c579 = Constraint(expr=", "Constraint(expr= m.x591 - 9*m.b684 <= 0) m.c919 = Constraint(expr= m.x592", "Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 +", "= Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376) m.c605 = Constraint(expr=", "<= 0.940066550763924) m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924)", "m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537", "== 0) m.c169 = Constraint(expr= m.x253 == 0) m.c170 =", "+ m.x786 == 0) m.c934 = Constraint(expr= 5*m.b697 + m.x787", "m.c1117 = Constraint(expr= m.b692 + m.b693 <= 1) m.c1118 =", "Constraint(expr= m.b704 + m.b705 <= 1) m.c1140 = Constraint(expr= m.b704", "= Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 =", "m.x355 == 0) m.c326 = Constraint(expr= m.x302 - 15*m.b623 <=", "= Constraint(expr= m.b599 - m.b601 <= 0) m.c1018 = Constraint(expr=", "= Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001", "+ m.b621 + m.b624 + m.b627 >= 0) m.c1393 =", "== 0) m.c515 = Constraint(expr= m.x122 - m.x440 - m.x443", "Constraint(expr= m.x141 - m.x471 - m.x474 == 0) m.c649 =", "+ 0.999*m.b660)))*(0.001 + 0.999* m.b660) <= 0) m.c667 = Constraint(expr=(m.x490/(0.001", "6*m.b759 + m.x849 == 0) m.c997 = Constraint(expr= 3*m.b760 +", "m.c1139 = Constraint(expr= m.b704 + m.b705 <= 1) m.c1140 =", "<= 0.994083415506506) m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506)", "<= 3.34221486003388) m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388)", "Constraint(expr= m.b716 + m.b717 <= 1) m.c1166 = Constraint(expr= m.b717", "= Constraint(expr= 8*m.b762 + m.x852 == 0) m.c1000 = Constraint(expr=", "+ m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999* m.b627) <= 0) m.c340", "== 0) m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0)", "= Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506) m.c863 = Constraint(expr=", "Constraint(expr= m.x587 == 0) m.c876 = Constraint(expr= m.x588 == 0)", "m.b724 <= 1) m.c1177 = Constraint(expr= m.b722 + m.b723 <=", "m.c1456 = Constraint(expr= m.b628 - m.b655 >= 0) m.c1457 =", "= Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001", "0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999* m.b616)", "3.71357206670431*m.b596 <= 0) m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <=", "m.c1261 = Constraint(expr= m.b764 + m.b765 <= 1) m.c1262 =", "Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 +", "m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0) m.c628 =", "+ 0.999* m.b627) <= 0) m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841 =", "- m.x453 - m.x456 == 0) m.c571 = Constraint(expr= m.x130", "Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 +", "40) m.c99 = Constraint(expr= m.x222 + 40*m.b600 <= 40) m.c100", "+ m.b678 - m.b768 <= 0) m.c1366 = Constraint(expr= -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302", "+ 3.04984759446376*m.b654 <= 3.04984759446376) m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655", "m.x107 - m.x410 - m.x413 == 0) m.c621 = Constraint(expr=", "0) m.c553 = Constraint(expr= m.x448 - 9*m.b646 <= 0) m.c554", "== 0) m.c371 = Constraint(expr= m.x323 == 0) m.c372 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 =", "1) m.c1156 = Constraint(expr= m.b711 + m.b712 <= 1) m.c1157", "Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x186 - m.x549 - m.x552 == 0) m.c826 =", "<= 0) m.c206 = Constraint(expr= m.x287 == 0) m.c207 =", "m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304", "Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b612 + m.b630 >= 0) m.c1381 = Constraint(expr= - m.b613", "m.x40 - m.x49 - m.x52 == 0) m.c17 = Constraint(expr=", "0) m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001", "m.x863 == 0) m.c1011 = Constraint(expr= 3*m.b774 + m.x864 ==", "m.b739 <= 1) m.c1209 = Constraint(expr= m.b737 + m.b739 <=", "m.c1398 = Constraint(expr= - m.b624 + m.b642 + m.b645 >=", "m.c1220 = Constraint(expr= m.b744 + m.b745 <= 1) m.c1221 =", "- m.b642 >= 0) m.c1444 = Constraint(expr= m.b625 - m.b643", "Constraint(expr= m.x420 + 20*m.b630 <= 20) m.c406 = Constraint(expr= m.x421", "Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0) m.c633 = Constraint(expr= m.x465", "- m.x530 == 0) m.c732 = Constraint(expr= m.x177 - m.x525", "Constraint(expr= m.b773 + m.b774 <= 1) m.c1278 = Constraint(expr= m.b773", "Constraint(expr= m.x493 == 0) m.c674 = Constraint(expr= m.x143 - m.x476", "Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0) m.c810 = Constraint(expr= m.x567", "<= 0) m.c327 = Constraint(expr= m.x303 - 15*m.b624 <= 0)", "= Constraint(expr= m.b735 + m.b736 <= 1) m.c1203 = Constraint(expr=", "m.b759 + m.b760 <= 1) m.c1253 = Constraint(expr= m.b761 +", "13.5*m.b682 <= 13.5) m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590", "= Constraint(expr= m.b761 + m.b762 <= 1) m.c1256 = Constraint(expr=", "Constraint(expr= m.x255 - 30*m.b609 <= 0) m.c193 = Constraint(expr= m.x256", "= Constraint(expr= m.x36 - m.x255 - m.x258 == 0) m.c181", "1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x138 == 0) m.c34 = Constraint(expr= m.x136 - m.x139", "= Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001", "m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348) m.c426 =", "m.b667 - m.b757 <= 0) m.c1355 = Constraint(expr= m.b668 -", "m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001 +", "0) m.c538 = Constraint(expr= m.x451 == 0) m.c539 = Constraint(expr=", "m.x232 - 4.45628648004517*m.b601 <= 0) m.c104 = Constraint(expr= m.x233 +", "m.x522 == 0) m.c700 = Constraint(expr= m.x523 == 0) m.c701", "m.b599 - m.b601 <= 0) m.c1018 = Constraint(expr= m.b600 -", "<= 0) m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 +", "m.x566 - 0.666992981045719*m.b671 <= 0) m.c810 = Constraint(expr= m.x567 -", "= Constraint(expr= m.b683 - m.b684 <= 0) m.c1101 = Constraint(expr=", "m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231", "Constraint(expr= m.b686 + m.b687 <= 1) m.c1104 = Constraint(expr= m.b686", "1) m.c1175 = Constraint(expr= m.b722 + m.b723 <= 1) m.c1176", "m.x225 - 3.71357206670431*m.b597 <= 0) m.c76 = Constraint(expr= m.x226 -", "3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001", "Constraint(expr= - m.b656 - m.b657 + m.b658 - m.b748 <=", "<= 0.572481933717686) m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686)", "+ 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999*", "1.32154609891348*m.b634 <= 1.32154609891348) m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <=", "= Constraint(expr= 3*m.b705 + m.x795 == 0) m.c943 = Constraint(expr=", "m.b756 + m.b757 <= 1) m.c1245 = Constraint(expr= m.b755 +", "Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0) m.c77 = Constraint(expr= m.x227", "m.b620 - m.b621 <= 0) m.c1038 = Constraint(expr= m.b620 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 =", "m.c532 = Constraint(expr= - m.x394 + m.x448 == 0) m.c533", "0) m.c617 = Constraint(expr= m.x467 == 0) m.c618 = Constraint(expr=", "m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103) m.c610 =", "Constraint(expr= m.x397 == 0) m.c536 = Constraint(expr= m.x449 == 0)", "m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590 == 0) m.c897", "<= 1) m.c1229 = Constraint(expr= m.b749 + m.b750 <= 1)", "+ m.b673 + m.b676 >= 0) m.c1463 = Constraint(expr= -", "Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)", "120*m.x196 + 285*m.x197 + 390*m.x198 + 350*m.x199 + 290*m.x200 +", "- m.x501 - m.x504 == 0) m.c730 = Constraint(expr= m.x166", "m.b609 - m.b627 >= 0) m.c1429 = Constraint(expr= m.b610 -", "m.b602 - m.b603 + m.b604 - m.b694 <= 0) m.c1292", "- 2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752", "Constraint(expr= m.x53 - m.x296 - m.x299 == 0) m.c294 =", "<= 1) m.c1283 = Constraint(expr= m.b596 - m.b686 <= 0)", "+ 50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133", "Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b639 <= 0) m.c1056 = Constraint(expr= m.b638 - m.b640 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186) m.c433 =", "m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290", "Constraint(expr= m.b684 - m.b685 <= 0) m.c1103 = Constraint(expr= m.b686", "m.x69 - m.x81 - m.x84 == 0) m.c22 = Constraint(expr=", "= Constraint(expr= m.x335 == 0) m.c411 = Constraint(expr= m.x336 ==", "= Constraint(expr= m.b644 - m.b646 <= 0) m.c1063 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.x123 - m.x441 - m.x444 == 0) m.c517", "- m.x588 == 0) m.c883 = Constraint(expr= m.x208 - m.x586", "- m.x301 == 0) m.c296 = Constraint(expr= m.x71 - m.x344", "<= 0) m.c1079 = Constraint(expr= m.b662 - m.b663 <= 0)", "m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349", "+ m.b747 <= 1) m.c1226 = Constraint(expr= m.b747 + m.b748", "= Constraint(expr= m.b765 + m.b766 <= 1) m.c1263 = Constraint(expr=", "0) m.c531 = Constraint(expr= - m.x393 + m.x447 == 0)", "<= 0) m.c1284 = Constraint(expr= - m.b596 + m.b597 -", "+ m.x279 == 0) m.c163 = Constraint(expr= - m.x250 +", "<= 1) m.c1240 = Constraint(expr= m.b753 + m.b754 <= 1)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0)", "4.45628648004517*m.b610 <= 0) m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <=", "= Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388) m.c251 = Constraint(expr=", "5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710 - 5*m.b711 -", "<= 1) m.c1154 = Constraint(expr= m.b711 + m.b712 <= 1)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 =", "Constraint(expr= m.b599 - m.b600 <= 0) m.c1017 = Constraint(expr= m.b599", "m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0) m.c221 =", "Constraint(expr= - m.x375 + m.x417 == 0) m.c370 = Constraint(expr=", "<= 1) m.c1186 = Constraint(expr= m.b726 + m.b727 <= 1)", "Constraint(expr= - m.b627 + m.b648 + m.b651 + m.b654 >=", "4.45628648004517) m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260 == 0)", "9*m.b641 <= 0) m.c519 = Constraint(expr= m.x387 - 9*m.b642 <=", "0) m.c480 = Constraint(expr= m.x93 - m.x381 - m.x384 ==", "= Constraint(expr= m.b653 - m.b655 <= 0) m.c1072 = Constraint(expr=", "0) m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0) m.c424", "<= 1) m.c1226 = Constraint(expr= m.b747 + m.b748 <= 1)", "m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51", "<= 1) m.c1221 = Constraint(expr= m.b743 + m.b745 <= 1)", "m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804", "== 0) m.c994 = Constraint(expr= 8*m.b757 + m.x847 == 0)", "+ m.b772 <= 1) m.c1275 = Constraint(expr= m.b770 + m.b772", "+ 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999*", "<= 0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 +", "13.5) m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590 == 0)", "+ m.x857 == 0) m.c1005 = Constraint(expr= 8*m.b768 + m.x858", "0) m.c183 = Constraint(expr= m.x45 - m.x279 - m.x282 ==", "= Constraint(expr= m.x151 - m.x490 - m.x493 == 0) m.c680", "- 4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707", "m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539) m.c394 = Constraint(expr= m.x325 +", "m.c941 = Constraint(expr= 4*m.b704 + m.x794 == 0) m.c942 =", "m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999* m.b614) <= 0) m.c231 =", "m.x594 + 9*m.b684 <= 9) m.c922 = Constraint(expr= m.x595 +", "+ 0.999* m.b599) <= 0) m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600)", "= Constraint(expr= m.b714 + m.b715 <= 1) m.c1161 = Constraint(expr=", "m.x506 - 0.940066550763924*m.b668 <= 0) m.c771 = Constraint(expr= m.x507 -", "5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91 +", "30*m.b670 <= 30) m.c782 = Constraint(expr= m.x536 - 15*m.b668 <=", "m.x559 == 0) m.c881 = Constraint(expr= m.x206 - m.x584 -", "13.5*m.b681 <= 13.5) m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <=", "= Constraint(expr= m.x586 - 13.5*m.b682 <= 0) m.c893 = Constraint(expr=", "0) m.c1419 = Constraint(expr= m.b606 - m.b618 >= 0) m.c1420", "m.x47 - m.x284 - m.x287 == 0) m.c213 = Constraint(expr=", "= Constraint(expr= m.x341 == 0) m.c264 = Constraint(expr= m.x342 ==", "m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426", "350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203 +", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843 =", "== 0) m.c15 = Constraint(expr= m.x39 - m.x48 - m.x51", "Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 40*m.b597 <= 0) m.c70 = Constraint(expr= m.x214 - 40*m.b598", "0) m.c1076 = Constraint(expr= m.b659 - m.b660 <= 0) m.c1077", "m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74", "m.c1332 = Constraint(expr= - m.b644 + m.b645 - m.b735 <=", "= Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 =", "- m.x230 - m.x233 == 0) m.c93 = Constraint(expr= m.x15", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 =", "0) m.c976 = Constraint(expr= 6*m.b739 + m.x829 == 0) m.c977", "m.x465 - m.x468 == 0) m.c625 = Constraint(expr= m.x136 -", "= Constraint(expr= m.b663 - m.b672 >= 0) m.c1474 = Constraint(expr=", "m.b621 + m.b622 - m.b712 <= 0) m.c1310 = Constraint(expr=", "m.c874 = Constraint(expr= m.x559 == 0) m.c875 = Constraint(expr= m.x587", "m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0) m.c488 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c89 = Constraint(expr= m.x8 - m.x218 - m.x221 ==", "m.x570 == 0) m.c802 = Constraint(expr= m.x199 - m.x568 -", "m.x395 == 0) m.c540 = Constraint(expr= m.x99 - m.x393 -", "<= 0) m.c1054 = Constraint(expr= m.b636 - m.b637 <= 0)", "<= 0) m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376)", "+ m.b775 <= 1) m.c1279 = Constraint(expr= m.b773 + m.b774", "<= 0) m.c1350 = Constraint(expr= - m.b662 + m.b663 -", "m.b614 - m.b615 + m.b616 - m.b706 <= 0) m.c1304", "== 0) m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0)", "- m.x334 == 0) m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614", "- 0.940066550763924*m.b662 <= 0) m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663", "+ 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999*", "= Constraint(expr= - m.b607 + m.b619 >= 0) m.c1388 =", "m.x850 == 0) m.c998 = Constraint(expr= 4*m.b761 + m.x851 ==", "Constraint(expr= m.x247 == 0) m.c140 = Constraint(expr= m.x272 == 0)", "- m.b742 <= 0) m.c1340 = Constraint(expr= m.b653 - m.b743", "m.b660 >= 0) m.c1459 = Constraint(expr= - m.b655 + m.b658", "- m.b603 >= 0) m.c1405 = Constraint(expr= m.b598 + m.b601", "== 0) m.c20 = Constraint(expr= m.x68 - m.x80 - m.x83", "m.b625 - m.b643 >= 0) m.c1445 = Constraint(expr= m.b623 -", "15*m.b684 <= 0) m.c913 = Constraint(expr= m.x562 - 15*m.b685 <=", "m.c508 = Constraint(expr= m.x391 == 0) m.c509 = Constraint(expr= m.x443", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599 =", "== 0) m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537 ==", ">= 0) m.c1419 = Constraint(expr= m.b606 - m.b618 >= 0)", "1.26558121681553) m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344 == 0)", "m.b667 - m.b679 >= 0) m.c1481 = Constraint(expr= m.b668 -", "m.b659 - m.b660 + m.b661 - m.b751 <= 0) m.c1349", "<= 0) m.c1097 = Constraint(expr= m.b680 - m.b681 <= 0)", "= Constraint(expr= m.b692 + m.b693 <= 1) m.c1116 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 =", "== 0) m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0)", "Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0) m.c362 = Constraint(expr= m.x359", "0) m.c1046 = Constraint(expr= m.b629 - m.b630 <= 0) m.c1047", "- 15*m.b624 <= 0) m.c328 = Constraint(expr= m.x304 - 15*m.b625", "= Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388) m.c133 = Constraint(expr=", "- m.x461 == 0) m.c597 = Constraint(expr= m.x132 - m.x459", "1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999* m.b661) <= 0)", "Constraint(expr= m.x176 - m.x524 - m.x530 == 0) m.c732 =", "0) m.c931 = Constraint(expr= 4*m.b694 + m.x784 == 0) m.c932", "+ 0.999*m.b636)))*(0.001 + 0.999* m.b636) <= 0) m.c436 = Constraint(expr=(m.x430/(0.001", "- m.b744 <= 0) m.c1342 = Constraint(expr= - m.b653 -", "m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348) m.c426 = Constraint(expr= m.x336 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 =", "m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376) m.c364 = Constraint(expr= m.x361 +", "m.x446 - 9*m.b644 <= 0) m.c552 = Constraint(expr= m.x447 -", "0.940066550763924*m.b663 <= 0.940066550763924) m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <=", "- m.x573 - m.x576 == 0) m.c829 = Constraint(expr= m.x202", "m.c1108 = Constraint(expr= m.b687 + m.b688 <= 1) m.c1109 =", "Constraint(expr= m.b716 + m.b718 <= 1) m.c1165 = Constraint(expr= m.b716", "m.b622 + m.b625 + m.b628 >= 0) m.c1394 = Constraint(expr=", "<= 1) m.c1166 = Constraint(expr= m.b717 + m.b718 <= 1)", "m.b631 >= 0) m.c1433 = Constraint(expr= m.b614 - m.b632 >=", "m.x108 - m.x411 - m.x414 == 0) m.c622 = Constraint(expr=", "Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924) m.c691 = Constraint(expr= m.x493", "Constraint(expr= m.x245 == 0) m.c138 = Constraint(expr= m.x246 == 0)", "m.b648 <= 0) m.c1065 = Constraint(expr= m.b647 - m.b649 <=", "m.b657 >= 0) m.c1468 = Constraint(expr= m.b655 - m.b658 >=", "<= 0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 +", "- m.x157 - m.x160 == 0) m.c44 = Constraint(expr= m.x158", "m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507", "= Constraint(expr= m.x130 - m.x454 - m.x457 == 0) m.c572", "0) m.c819 = Constraint(expr= m.x552 == 0) m.c820 = Constraint(expr=", "Constraint(expr= m.b655 - m.b658 >= 0) m.c1469 = Constraint(expr= m.b653", "1) m.c1189 = Constraint(expr= m.b728 + m.b729 <= 1) m.c1190", "- m.b620 - m.b621 + m.b622 - m.b712 <= 0)", "15*m.b610 <= 0) m.c200 = Constraint(expr= m.x281 + 15*m.b608 <=", "m.c177 = Constraint(expr= m.x33 - m.x249 - m.x252 == 0)", "0) m.c271 = Constraint(expr= m.x70 - m.x340 - m.x343 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 =", "m.x40 - m.x262 - m.x265 == 0) m.c122 = Constraint(expr=", "Constraint(expr= m.b664 - m.b676 >= 0) m.c1478 = Constraint(expr= m.b665", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 =", "= Constraint(expr= m.x547 == 0) m.c794 = Constraint(expr= m.x569 ==", "= Constraint(expr= m.b608 - m.b626 >= 0) m.c1428 = Constraint(expr=", "- m.x6 - m.x9 == 0) m.c4 = Constraint(expr= m.x4", "m.b695 + m.b696 <= 1) m.c1124 = Constraint(expr= m.b696 +", "m.c1118 = Constraint(expr= m.b693 + m.b694 <= 1) m.c1119 =", "m.b767 + m.b768 <= 1) m.c1268 = Constraint(expr= m.b768 +", "== 0) m.c342 = Constraint(expr= m.x312 == 0) m.c343 =", "m.b754 <= 1) m.c1241 = Constraint(expr= m.b755 + m.b756 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 =", "+ m.b753 <= 1) m.c1236 = Constraint(expr= m.b752 + m.b754", "Constraint(expr= 8*m.b753 + m.x843 == 0) m.c991 = Constraint(expr= 4*m.b754", "Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x256 - 30*m.b610 <= 0) m.c194 = Constraint(expr= m.x257", "<= 0) m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0)", "m.b611 - m.b613 <= 0) m.c1030 = Constraint(expr= m.b612 -", "m.b619 <= 0) m.c1037 = Constraint(expr= m.b620 - m.b621 <=", "+ m.b717 <= 1) m.c1164 = Constraint(expr= m.b716 + m.b718", "Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0) m.c280 = Constraint(expr= m.x340", "m.x444 == 0) m.c517 = Constraint(expr= m.x124 - m.x442 -", "== 0) m.c214 = Constraint(expr= m.x49 - m.x286 - m.x289", "<= 0) m.c1038 = Constraint(expr= m.b620 - m.b622 <= 0)", "- 3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726", "- m.b636 >= 0) m.c1438 = Constraint(expr= m.b619 - m.b637", "m.c756 = Constraint(expr= m.x516 == 0) m.c757 = Constraint(expr= m.x517", "15*m.b681 <= 0) m.c886 = Constraint(expr= m.x556 - 15*m.b682 <=", "m.x856 == 0) m.c1004 = Constraint(expr= 4*m.b767 + m.x857 ==", "m.b623 - m.b624 + m.b625 - m.b715 <= 0) m.c1313", "m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708", "m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553) m.c284 = Constraint(expr= - 0.9*m.x296", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj =", "m.x426 == 0) m.c421 = Constraint(expr= m.x115 - m.x424 -", "Constraint(expr= m.x275 == 0) m.c261 = Constraint(expr= m.x276 == 0)", "m.x405 - m.x408 == 0) m.c595 = Constraint(expr= m.x106 -", "m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651 =", "m.c827 = Constraint(expr= m.x200 - m.x572 - m.x575 == 0)", "<= 4.45628648004517) m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0)", "m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598) <= 0) m.c56 = Constraint(expr=", "+ m.b709 <= 1) m.c1147 = Constraint(expr= m.b707 + m.b708", "= Constraint(expr= m.b625 - m.b646 >= 0) m.c1448 = Constraint(expr=", "m.x315 - 1.83548069293539*m.b612 <= 0) m.c226 = Constraint(expr= m.x316 -", "Constraint(expr= - 0.75*m.x236 + m.x260 == 0) m.c108 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0) m.c150 = Constraint(expr= m.x243", "m.x345 == 0) m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346", "== 0) m.c967 = Constraint(expr= m.b730 + m.x820 == 0)", "= Constraint(expr= - m.b668 - m.b669 + m.b670 - m.b760", "- m.x162 - m.x165 - m.x168 == 0) m.c46 =", "Constraint(expr= m.x63 - m.x318 - m.x324 == 0) m.c382 =", "= Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001", "- m.b768 <= 0) m.c1366 = Constraint(expr= - m.b677 -", "<= 0.842233385663186) m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186)", "m.c1411 = Constraint(expr= m.b598 + m.b601 - m.b610 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 =", "m.x101 - m.x104 - m.x107 == 0) m.c30 = Constraint(expr=", "Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353) m.c276 = Constraint(expr= m.x276", "0) m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0) m.c575", "m.c931 = Constraint(expr= 4*m.b694 + m.x784 == 0) m.c932 =", "= Constraint(expr= m.b723 + m.x813 == 0) m.c961 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 =", "m.x92 - m.x380 - m.x383 == 0) m.c480 = Constraint(expr=", "m.b766 <= 1) m.c1263 = Constraint(expr= m.b764 + m.b766 <=", ">= 0) m.c1459 = Constraint(expr= - m.b655 + m.b658 +", "0) m.c350 = Constraint(expr= m.x77 - m.x356 - m.x359 ==", "== 0) m.c344 = Constraint(expr= m.x359 == 0) m.c345 =", "+ m.x863 == 0) m.c1011 = Constraint(expr= 3*m.b774 + m.x864", "Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5) m.c310 = Constraint(expr= m.x349", "m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221", "m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587", "Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)", "4.45628648004517*m.b604 <= 4.45628648004517) m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <=", "+ m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c465", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698 =", "log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657) <= 0) m.c640", "+ m.b705 <= 1) m.c1142 = Constraint(expr= m.b705 + m.b706", "0) m.c317 = Constraint(expr= m.x353 == 0) m.c318 = Constraint(expr=", "<= 0) m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376)", "== 0) m.c545 = Constraint(expr= m.x392 - 9*m.b644 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 =", "== 0) m.c703 = Constraint(expr= m.x163 - m.x496 - m.x499", "<= 1) m.c1181 = Constraint(expr= m.b725 + m.b726 <= 1)", "m.x23 == 0) m.c9 = Constraint(expr= m.x18 - m.x21 -", "<= 1) m.c1245 = Constraint(expr= m.b755 + m.b757 <= 1)", "m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660", "= Constraint(expr= m.x215 + 40*m.b596 <= 40) m.c72 = Constraint(expr=", "Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 +", "0) m.c1426 = Constraint(expr= m.b610 - m.b625 >= 0) m.c1427", "+ 0.572481933717686*m.b636 <= 0.572481933717686) m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637", "- m.b604 <= 0) m.c1021 = Constraint(expr= m.b603 - m.b604", "0) m.c1093 = Constraint(expr= m.b675 - m.b676 <= 0) m.c1094", "<= 0) m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425)", "= Constraint(expr= m.b668 - m.b680 >= 0) m.c1482 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x571 == 0) m.c797 = Constraint(expr= m.x182 -", "Constraint(expr= m.x114 - m.x423 - m.x426 == 0) m.c421 =", "m.b762 <= 1) m.c1254 = Constraint(expr= m.b761 + m.b763 <=", "m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376) m.c604 = Constraint(expr= m.x409 +", "0) m.c989 = Constraint(expr= 5*m.b752 + m.x842 == 0) m.c990", "m.b600 <= 0) m.c1017 = Constraint(expr= m.b599 - m.b601 <=", "m.c1285 = Constraint(expr= - m.b596 - m.b597 + m.b598 -", "Constraint(expr= m.x281 + 15*m.b608 <= 15) m.c201 = Constraint(expr= m.x282", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 =", "0) m.c199 = Constraint(expr= m.x280 - 15*m.b610 <= 0) m.c200", "m.c1475 = Constraint(expr= m.b662 - m.b674 >= 0) m.c1476 =", "- 20*m.b630 <= 0) m.c403 = Constraint(expr= m.x418 - 20*m.b631", "m.x366 == 0) m.c445 = Constraint(expr= m.x82 - m.x364 -", "9*m.b646 <= 0) m.c548 = Constraint(expr= m.x395 + 9*m.b644 <=", "0) m.c1290 = Constraint(expr= - m.b602 + m.b603 - m.b693", "- m.b676 <= 0) m.c1093 = Constraint(expr= m.b675 - m.b676", "- m.b756 <= 0) m.c1354 = Constraint(expr= - m.b665 -", "m.x491 == 0) m.c678 = Constraint(expr= m.x150 - m.x489 -", "m.b733 <= 1) m.c1198 = Constraint(expr= m.b732 + m.b733 <=", "+ m.b652 - m.b742 <= 0) m.c1340 = Constraint(expr= m.b653", "Constraint(expr= - m.b626 - m.b627 + m.b628 - m.b718 <=", "= Constraint(expr= m.x117 - m.x429 - m.x432 == 0) m.c448", "Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0) m.c858 = Constraint(expr= m.x528", "m.b726 + m.b727 <= 1) m.c1187 = Constraint(expr= m.b728 +", "Constraint(expr= m.x354 == 0) m.c319 = Constraint(expr= m.x355 == 0)", "= Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0) m.c606 = Constraint(expr=", "m.c1434 = Constraint(expr= m.b615 - m.b633 >= 0) m.c1435 =", "# 865 685 180 0 0 0 0 0 #", "m.c66 = Constraint(expr= m.x12 - m.x225 - m.x228 == 0)", "m.b647 >= 0) m.c1449 = Constraint(expr= m.b627 - m.b648 >=", "1) m.c1155 = Constraint(expr= m.b710 + m.b712 <= 1) m.c1156", "20*m.b630 <= 20) m.c400 = Constraint(expr= m.x379 + 20*m.b631 <=", "m.c1166 = Constraint(expr= m.b717 + m.b718 <= 1) m.c1167 =", "0) m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001", "0) m.c1084 = Constraint(expr= m.b666 - m.b667 <= 0) m.c1085", "Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c965 = Constraint(expr= 4*m.b728 + m.x818 == 0) m.c966 =", "m.x541 == 0) m.c761 = Constraint(expr= m.x167 - m.x506 -", "m.b645 <= 0) m.c1062 = Constraint(expr= m.b644 - m.b646 <=", "m.c1249 = Constraint(expr= m.b758 + m.b759 <= 1) m.c1250 =", "0) m.c1433 = Constraint(expr= m.b614 - m.b632 >= 0) m.c1434", "0) m.c1472 = Constraint(expr= m.b662 - m.b671 >= 0) m.c1473", "m.x344 == 0) m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345", "6*m.b698 + m.x788 == 0) m.c936 = Constraint(expr= 10*m.b699 +", "+ m.b694 <= 1) m.c1119 = Constraint(expr= m.b692 + m.b694", "- m.b663 + m.b664 - m.b754 <= 0) m.c1352 =", "1) m.c1276 = Constraint(expr= m.b771 + m.b772 <= 1) m.c1277", "m.b596 - m.b598 <= 0) m.c1015 = Constraint(expr= m.b597 -", "0) m.c19 = Constraint(expr= m.x46 - m.x55 - m.x58 -", "- m.x252 == 0) m.c178 = Constraint(expr= m.x34 - m.x250", "= Constraint(expr= m.x172 - m.x514 - m.x517 == 0) m.c767", "m.b731 + m.b732 <= 1) m.c1196 = Constraint(expr= m.b732 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 =", "1) m.c1280 = Constraint(expr= m.b774 + m.b775 <= 1) m.c1281", "0) m.c678 = Constraint(expr= m.x150 - m.x489 - m.x492 ==", "0) m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001", "0.999* m.b616) <= 0) m.c233 = Constraint(expr= m.x293 == 0)", "0) m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001", "m.b629 - m.b630 <= 0) m.c1047 = Constraint(expr= m.b629 -", "m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383", ">= 0) m.c1469 = Constraint(expr= m.b653 - m.b659 >= 0)", "= Constraint(expr= m.x3 - m.x6 - m.x9 == 0) m.c4", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794 =", "0) m.c1058 = Constraint(expr= m.b641 - m.b642 <= 0) m.c1059", "Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 +", "<= 2.54515263975353) m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353)", "m.b767 + m.b769 <= 1) m.c1270 = Constraint(expr= m.b768 +", "Constraint(expr= m.b662 - m.b674 >= 0) m.c1476 = Constraint(expr= m.b663", "- m.x268 - m.x274 == 0) m.c149 = Constraint(expr= m.x242", "+ 4.45628648004517*m.b602 <= 4.45628648004517) m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603", "= Constraint(expr= - m.x148 - m.x151 + m.x154 == 0)", "- m.x167 == 0) m.c45 = Constraint(expr= m.x159 - m.x162", "- m.x382 - m.x385 == 0) m.c482 = Constraint(expr= m.x119", "Constraint(expr= m.x313 + 15*m.b628 <= 15) m.c359 = Constraint(expr= m.x356", "m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139", "- m.b664 + m.b673 + m.b676 >= 0) m.c1463 =", "0) m.c1311 = Constraint(expr= - m.b623 + m.b624 - m.b714", "Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1.04900943706034) m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034)", "m.x483 - m.x486 == 0) m.c652 = Constraint(expr= m.x148 -", "+ m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999* m.b655) <= 0) m.c614", "0.705049913072943*m.b672 <= 0.705049913072943) m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <=", "m.b734 + m.b735 <= 1) m.c1200 = Constraint(expr= m.b734 +", "- m.b604 >= 0) m.c1406 = Constraint(expr= m.b596 + m.b599", "0) m.c443 = Constraint(expr= m.x80 - m.x362 - m.x365 ==", "= Constraint(expr= m.b698 + m.b699 <= 1) m.c1130 = Constraint(expr=", "= Constraint(expr= m.b756 + m.b757 <= 1) m.c1247 = Constraint(expr=", "13.5*m.b622 <= 0) m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <=", "m.x319 - 1.83548069293539*m.b631 <= 0) m.c392 = Constraint(expr= m.x323 +", "m.x507 - m.x510 == 0) m.c763 = Constraint(expr= m.x169 -", "0) m.c1053 = Constraint(expr= m.b635 - m.b637 <= 0) m.c1054", "+ m.b763 <= 1) m.c1258 = Constraint(expr= m.b762 + m.b763", "= Constraint(expr= m.x221 + 40*m.b599 <= 40) m.c99 = Constraint(expr=", "0) m.c1313 = Constraint(expr= m.b626 - m.b716 <= 0) m.c1314", "m.b739 <= 0) m.c1337 = Constraint(expr= m.b650 - m.b740 <=", "Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 +", "<= 0) m.c1344 = Constraint(expr= - m.b656 + m.b657 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c977 = Constraint(expr= 2*m.b740 + m.x830 == 0) m.c978 =", "- m.x525 - m.x531 == 0) m.c733 = Constraint(expr= m.x178", "Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0) m.c248 = Constraint(expr= m.x293", "Constraint(expr= m.x273 == 0) m.c142 = Constraint(expr= m.x274 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 =", "0) m.c315 = Constraint(expr= m.x306 == 0) m.c316 = Constraint(expr=", "m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327) m.c665 =", "= Constraint(expr= m.x523 == 0) m.c701 = Constraint(expr= m.x161 -", "0) m.c34 = Constraint(expr= m.x136 - m.x139 == 0) m.c35", "m.c1419 = Constraint(expr= m.b606 - m.b618 >= 0) m.c1420 =", "Constraint(expr= m.x204 - m.x579 - m.x582 == 0) m.c856 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 =", "0) m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0) m.c737", "== 0) m.c113 = Constraint(expr= m.x263 == 0) m.c114 =", "4*m.b767 + m.x857 == 0) m.c1005 = Constraint(expr= 8*m.b768 +", "= Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0) m.c831 = Constraint(expr=", "m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464", "1) m.c1266 = Constraint(expr= m.b767 + m.b769 <= 1) m.c1267", "m.b714 + m.b715 <= 1) m.c1161 = Constraint(expr= m.b713 +", "m.x316 - m.x322 == 0) m.c218 = Constraint(expr= m.x284 -", "Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943) m.c808 = Constraint(expr= m.x547", "m.b650 - m.b652 <= 0) m.c1069 = Constraint(expr= m.b651 -", "m.c1229 = Constraint(expr= m.b749 + m.b750 <= 1) m.c1230 =", "m.x47 - m.x50 == 0) m.c15 = Constraint(expr= m.x39 -", "m.c541 = Constraint(expr= m.x100 - m.x394 - m.x397 == 0)", "- m.x523 == 0) m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662", "m.b740 + m.b741 <= 1) m.c1214 = Constraint(expr= m.b741 +", "m.c1222 = Constraint(expr= m.b744 + m.b745 <= 1) m.c1223 =", "m.b654 - m.b660 >= 0) m.c1471 = Constraint(expr= m.b655 -", "0) m.c24 = Constraint(expr= - m.x72 - m.x90 + m.x93", "- 0.75*m.x237 + m.x261 == 0) m.c109 = Constraint(expr= -", "<= 0) m.c560 = Constraint(expr= m.x401 == 0) m.c561 =", "= Constraint(expr= m.x481 == 0) m.c671 = Constraint(expr= m.x491 ==", "m.x377 == 0) m.c384 = Constraint(expr= m.x87 - m.x375 -", "m.b636) <= 0) m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1", "== 0) m.c481 = Constraint(expr= m.x94 - m.x382 - m.x385", "= Constraint(expr= - m.b647 + m.b648 - m.b738 <= 0)", "m.b605 - m.b617 >= 0) m.c1419 = Constraint(expr= m.b606 -", "0) m.c1457 = Constraint(expr= - m.b653 + m.b656 + m.b659", "m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553) m.c454 = Constraint(expr= m.x367 +", "m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247", "+ 0.999* m.b635) <= 0) m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636)", "= Constraint(expr= m.x98 - m.x392 - m.x395 == 0) m.c540", "Constraint(expr= m.x563 + 15*m.b683 <= 15) m.c915 = Constraint(expr= m.x564", "+ m.b763 <= 1) m.c1257 = Constraint(expr= m.b761 + m.b763", "+ m.b706 <= 1) m.c1141 = Constraint(expr= m.b704 + m.b705", "== 0) m.c444 = Constraint(expr= m.x81 - m.x363 - m.x366", "Constraint(expr= m.b746 + m.b748 <= 1) m.c1225 = Constraint(expr= m.b746", "m.x563 == 0) m.c906 = Constraint(expr= m.x192 - m.x561 -", "m.x809 == 0) m.c957 = Constraint(expr= 2*m.b720 + m.x810 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672 =", "0) m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519 == 0)", "m.x283 == 0) m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <=", "m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c462 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.b730 <= 1) m.c1193 = Constraint(expr= m.b731 + m.b732", "m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242", "Constraint(expr= m.x554 - 15*m.b680 <= 0) m.c885 = Constraint(expr= m.x555", "+ m.x823 == 0) m.c971 = Constraint(expr= 3*m.b734 + m.x824", "= Constraint(expr= m.x30 - m.x243 - m.x246 == 0) m.c145", "<= 0.705049913072943) m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943)", "= Constraint(expr= 7*m.b714 + m.x804 == 0) m.c952 = Constraint(expr=", "m.b716 + m.b717 <= 1) m.c1166 = Constraint(expr= m.b717 +", "- m.b609 + m.b610 - m.b700 <= 0) m.c1298 =", "m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39", "m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0) m.c390 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 =", "m.b769 <= 1) m.c1270 = Constraint(expr= m.b768 + m.b769 <=", "m.x385 == 0) m.c482 = Constraint(expr= m.x119 - m.x434 -", "m.c728 = Constraint(expr= m.x164 - m.x500 - m.x503 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 =", "3.04984759446376) m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376) m.c578", "4.45628648004517*m.b599 <= 0) m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <=", "1) m.c1247 = Constraint(expr= m.b758 + m.b759 <= 1) m.c1248", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 =", "+ 1.83548069293539*m.b611 <= 1.83548069293539) m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612", "Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 +", "m.x347 == 0) m.c291 = Constraint(expr= m.x348 == 0) m.c292", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 =", "m.c1155 = Constraint(expr= m.b710 + m.b712 <= 1) m.c1156 =", "m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543", "= Constraint(expr= m.x352 - 9*m.b625 <= 0) m.c335 = Constraint(expr=", "m.c1312 = Constraint(expr= - m.b623 - m.b624 + m.b625 -", "== 0) m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591 ==", "- 2.30162356062425*m.b639 <= 0) m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640", "Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b632 - m.b633 <= 0) m.c1050 = Constraint(expr= m.b632 -", "m.x143 == 0) m.c36 = Constraint(expr= m.x138 - m.x141 -", "= Constraint(expr= m.b668 - m.b669 <= 0) m.c1086 = Constraint(expr=", "Constraint(expr= m.x107 - m.x410 - m.x413 == 0) m.c621 =", "- m.x417 - m.x420 == 0) m.c388 = Constraint(expr= m.x112", "== 0) m.c380 = Constraint(expr= m.x62 - m.x317 - m.x323", "m.x385 + 33.5*m.b640 <= 33.5) m.c497 = Constraint(expr= m.x434 -", "m.b639 >= 0) m.c1441 = Constraint(expr= m.b619 - m.b640 >=", "= Constraint(expr= m.x72 - m.x345 - m.x348 == 0) m.c298", "- m.x271 - m.x277 == 0) m.c269 = Constraint(expr= m.x68", "Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x552 == 0) m.c826 = Constraint(expr= m.x187 - m.x550 -", "Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0)", "0.999*m.b652)))*(0.001 + 0.999* m.b652) <= 0) m.c587 = Constraint(expr= m.x407", "0) m.c939 = Constraint(expr= 7*m.b702 + m.x792 == 0) m.c940", "- 33.5*m.b639 <= 0) m.c493 = Constraint(expr= m.x382 - 33.5*m.b640", "m.b624 - m.b714 <= 0) m.c1312 = Constraint(expr= - m.b623", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0.940066550763924) m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924)", "m.c1324 = Constraint(expr= - m.b635 - m.b636 + m.b637 -", "+ m.x418 == 0) m.c371 = Constraint(expr= m.x323 == 0)", "+ 9*m.b644 <= 9) m.c549 = Constraint(expr= m.x396 + 9*m.b645", "= Constraint(expr= m.x177 - m.x525 - m.x531 == 0) m.c733", "- 15*m.b682 <= 0) m.c887 = Constraint(expr= m.x557 + 15*m.b680", "0.999*m.b597) <= 0) m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1", "Constraint(expr= m.x42 - m.x267 - m.x273 == 0) m.c148 =", "m.x518 - 0.705049913072943*m.b662 <= 0) m.c714 = Constraint(expr= m.x519 -", "Constraint(expr= m.b644 - m.b734 <= 0) m.c1332 = Constraint(expr= -", "= Constraint(expr= m.x133 - m.x460 - m.x463 == 0) m.c599", "Constraint(expr= m.x152 - m.x155 - m.x158 == 0) m.c42 =", "m.x394 - m.x397 == 0) m.c542 = Constraint(expr= m.x125 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x254 - 30*m.b608 <= 0) m.c192 = Constraint(expr= m.x255 -", "- m.x561 - m.x564 == 0) m.c907 = Constraint(expr= m.x193", "m.b637 >= 0) m.c1439 = Constraint(expr= m.b617 - m.b638 >=", "m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594", "= Constraint(expr= m.b616 - m.b634 >= 0) m.c1436 = Constraint(expr=", "- m.x327 - m.x333 == 0) m.c244 = Constraint(expr= m.x67", "m.x275 == 0) m.c267 = Constraint(expr= m.x42 - m.x270 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x377 == 0) m.c375 = Constraint(expr= m.x378 == 0) m.c376", "- 8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 =", "0) m.c519 = Constraint(expr= m.x387 - 9*m.b642 <= 0) m.c520", "<= 0) m.c1292 = Constraint(expr= m.b605 - m.b695 <= 0)", "== 0) m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345 ==", "m.c337 = Constraint(expr= m.x355 + 9*m.b625 <= 9) m.c338 =", "= Constraint(expr= m.b687 + m.b688 <= 1) m.c1109 = Constraint(expr=", "0) m.c926 = Constraint(expr= 8*m.b689 + m.x779 == 0) m.c927", "m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287", "m.x509 == 0) m.c753 = Constraint(expr= m.x510 == 0) m.c754", "0) m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0) m.c499", "m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598", "Constraint(expr= m.x153 - m.x156 - m.x159 == 0) m.c43 =", "- 0.78338879230327*m.b658 <= 0) m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656", "- m.x508 + m.x538 == 0) m.c749 = Constraint(expr= -", "m.x137 == 0) m.c33 = Constraint(expr= m.x135 - m.x138 ==", "<= 0) m.c398 = Constraint(expr= m.x377 + 20*m.b629 <= 20)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b728 + m.b730 <= 1) m.c1192 = Constraint(expr= m.b729 +", "1) m.c1131 = Constraint(expr= m.b698 + m.b700 <= 1) m.c1132", "Constraint(expr= - m.b638 + m.b639 - m.b729 <= 0) m.c1327", "Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b774 <= 0) m.c1372 = Constraint(expr= - m.b683 -", "- 3.34221486003388*m.b604 <= 0) m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602", "= Constraint(expr= m.x493 == 0) m.c674 = Constraint(expr= m.x143 -", "+ 350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686 - 4*m.b687", "== 0) m.c532 = Constraint(expr= - m.x394 + m.x448 ==", "Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ m.x345 == 0) m.c286 = Constraint(expr= - 0.9*m.x298 +", "+ 290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210", "m.b624 + m.b625 - m.b715 <= 0) m.c1313 = Constraint(expr=", "- m.x166 - m.x169 == 0) m.c47 = Constraint(expr= m.x173", "0) m.c671 = Constraint(expr= m.x491 == 0) m.c672 = Constraint(expr=", "m.x125 - m.x446 - m.x449 == 0) m.c543 = Constraint(expr=", "+ m.b708 <= 1) m.c1148 = Constraint(expr= m.b708 + m.b709", "0) m.c1070 = Constraint(expr= m.b653 - m.b654 <= 0) m.c1071", "1) m.c1125 = Constraint(expr= m.b695 + m.b697 <= 1) m.c1126", "== 0) m.c799 = Constraint(expr= m.x184 - m.x544 - m.x547", "m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161", "m.x786 == 0) m.c934 = Constraint(expr= 5*m.b697 + m.x787 ==", "5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112 +", "<= 15) m.c331 = Constraint(expr= m.x307 + 15*m.b625 <= 15)", "= Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001", "m.b612 - m.b613 <= 0) m.c1031 = Constraint(expr= m.b614 -", "<= 1) m.c1171 = Constraint(expr= m.b719 + m.b720 <= 1)", "m.c988 = Constraint(expr= 9*m.b751 + m.x841 == 0) m.c989 =", "m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831", "Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0) m.c456 = Constraint(expr= m.x429", "m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 =", "= Constraint(expr= m.x306 == 0) m.c316 = Constraint(expr= m.x307 ==", "- m.x145 == 0) m.c38 = Constraint(expr= - m.x146 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0)", "0) m.c1453 = Constraint(expr= m.b628 - m.b652 >= 0) m.c1454", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 =", "m.c67 = Constraint(expr= m.x13 - m.x226 - m.x229 == 0)", "m.c567 = Constraint(expr= m.x102 - m.x399 - m.x402 == 0)", "= Constraint(expr= m.b602 - m.b692 <= 0) m.c1290 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 =", "1.11894339953103*m.b652 <= 0) m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <=", "+ m.b733 <= 1) m.c1195 = Constraint(expr= m.b731 + m.b732", "<= 0) m.c1338 = Constraint(expr= - m.b650 + m.b651 -", "0) m.c1445 = Constraint(expr= m.b623 - m.b644 >= 0) m.c1446", "m.b602 + m.b611 + m.b614 >= 0) m.c1377 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c116 = Constraint(expr= m.x26 - m.x236 - m.x239 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b617 >= 0) m.c1419 = Constraint(expr= m.b606 - m.b618 >=", "4.45628648004517*m.b601 <= 4.45628648004517) m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260", "m.b697 <= 0) m.c1295 = Constraint(expr= m.b608 - m.b698 <=", "- 20*m.b631 <= 0) m.c398 = Constraint(expr= m.x377 + 20*m.b629", "Constraint(expr= m.b653 - m.b654 <= 0) m.c1071 = Constraint(expr= m.b653", "- 8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733", "m.c1323 = Constraint(expr= - m.b635 + m.b636 - m.b726 <=", "m.x186 - m.x549 - m.x552 == 0) m.c826 = Constraint(expr=", "m.c513 = Constraint(expr= m.x96 - m.x387 - m.x390 == 0)", "- m.b665 - m.b666 + m.b667 - m.b757 <= 0)", "30*m.b609 <= 30) m.c196 = Constraint(expr= m.x259 + 30*m.b610 <=", "Constraint(expr= m.b644 - m.b645 <= 0) m.c1062 = Constraint(expr= m.b644", "Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943)", "m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657", "0) m.c1390 = Constraint(expr= - m.b619 + m.b637 + m.b640", "0) m.c1057 = Constraint(expr= m.b639 - m.b640 <= 0) m.c1058", "= Constraint(expr= m.b741 + m.b742 <= 1) m.c1215 = Constraint(expr=", "= Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924) m.c740 = Constraint(expr=", "m.c793 = Constraint(expr= m.x547 == 0) m.c794 = Constraint(expr= m.x569", "7*m.b719 + m.x809 == 0) m.c957 = Constraint(expr= 2*m.b720 +", "= Constraint(expr= m.b674 - m.b675 <= 0) m.c1092 = Constraint(expr=", "- m.x187 == 0) m.c50 = Constraint(expr= m.x179 - m.x188", "= Constraint(expr= m.b618 - m.b619 <= 0) m.c1037 = Constraint(expr=", "+ m.b763 <= 1) m.c1255 = Constraint(expr= m.b761 + m.b762", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 =", "- m.x5 - m.x8 == 0) m.c3 = Constraint(expr= m.x3", "= Constraint(expr= - m.b616 + m.b634 >= 0) m.c1385 =", "Constraint(expr= m.b725 + m.b726 <= 1) m.c1184 = Constraint(expr= m.b726", "m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724", "- m.x159 == 0) m.c43 = Constraint(expr= m.x154 - m.x157", "- m.x304 - m.x307 == 0) m.c323 = Constraint(expr= m.x74", "+ m.x537 == 0) m.c748 = Constraint(expr= - m.x508 +", "= Constraint(expr= m.b723 + m.b724 <= 1) m.c1181 = Constraint(expr=", "m.x587 + 13.5*m.b680 <= 13.5) m.c894 = Constraint(expr= m.x588 +", "m.b624 + m.b642 + m.b645 >= 0) m.c1399 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x120 - m.x435 - m.x438 == 0) m.c484 = Constraint(expr=", "+ 0.999*m.b659)))*(0.001 + 0.999* m.b659) <= 0) m.c666 = Constraint(expr=(m.x489/(0.001", "Constraint(expr= m.x149 - m.x488 - m.x491 == 0) m.c678 =", "= Constraint(expr= m.x563 == 0) m.c900 = Constraint(expr= m.x564 ==", "m.b744 <= 0) m.c1342 = Constraint(expr= - m.b653 - m.b654", "m.b724 <= 1) m.c1179 = Constraint(expr= m.b722 + m.b724 <=", "Constraint(expr= m.b710 + m.b711 <= 1) m.c1154 = Constraint(expr= m.b711", "Constraint(expr= m.b713 + m.b715 <= 1) m.c1162 = Constraint(expr= m.b714", "m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719) m.c813 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613 =", "m.c937 = Constraint(expr= 6*m.b700 + m.x790 == 0) m.c938 =", "m.b696 <= 0) m.c1294 = Constraint(expr= - m.b605 - m.b606", "Constraint(expr= m.x388 - 9*m.b643 <= 0) m.c521 = Constraint(expr= m.x389", "= Constraint(expr= m.b713 + m.b715 <= 1) m.c1159 = Constraint(expr=", "m.x793 == 0) m.c941 = Constraint(expr= 4*m.b704 + m.x794 ==", "+ 0.666992981045719*m.b673 <= 0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) -", "= Constraint(expr= 3*m.b734 + m.x824 == 0) m.c972 = Constraint(expr=", "m.b626 - m.b628 <= 0) m.c1045 = Constraint(expr= m.b627 -", "m.b735 + m.b736 <= 1) m.c1205 = Constraint(expr= m.b737 +", "= Constraint(expr= m.b677 - m.b767 <= 0) m.c1365 = Constraint(expr=", ">= 0) m.c1413 = Constraint(expr= m.b603 - m.b612 >= 0)", "= Constraint(expr= m.b647 - m.b648 <= 0) m.c1065 = Constraint(expr=", "m.b613 - m.b703 <= 0) m.c1301 = Constraint(expr= m.b614 -", "Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388) m.c222 = Constraint(expr= m.x288", "30*m.x155 + 40*m.x156 + 40*m.x157 - m.x170 - m.x171 -", "+ 2.54515263975353*m.b617 <= 2.54515263975353) m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618", "- m.x318 - m.x324 == 0) m.c382 = Constraint(expr= m.x64", "Constraint(expr= m.x467 == 0) m.c618 = Constraint(expr= m.x468 == 0)", ">= 0) m.c1388 = Constraint(expr= - m.b617 + m.b635 +", "0.999* m.b605) <= 0) m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) -", "m.b653 - m.b654 + m.b655 - m.b745 <= 0) m.c1343", "- m.x340 - m.x343 == 0) m.c272 = Constraint(expr= m.x269", "0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001", "m.x443 + 9*m.b641 <= 9) m.c528 = Constraint(expr= m.x444 +", "= Constraint(expr= m.b745 + m.x835 == 0) m.c983 = Constraint(expr=", "Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= 8*m.b757 + m.x847 == 0) m.c995 = Constraint(expr= 10*m.b758", "1) m.c1193 = Constraint(expr= m.b731 + m.b732 <= 1) m.c1194", "m.b646 >= 0) m.c1400 = Constraint(expr= - m.b626 + m.b647", "<= 3.04984759446376) m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 1.18887736200171*m.b657 <= 1.18887736200171) m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658", "Constraint(expr= m.x419 == 0) m.c378 = Constraint(expr= m.x420 == 0)", "m.c911 = Constraint(expr= m.x560 - 15*m.b683 <= 0) m.c912 =", "+ m.x152 == 0) m.c39 = Constraint(expr= - m.x147 -", "m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204", "= Constraint(expr= m.x131 - m.x458 - m.x461 == 0) m.c597", "m.b605 - m.b606 <= 0) m.c1023 = Constraint(expr= m.b605 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 =", "m.b763 <= 1) m.c1257 = Constraint(expr= m.b761 + m.b763 <=", "m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84", "m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75", "0) m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <= 0) m.c494", "= Constraint(expr= m.x395 + 9*m.b644 <= 9) m.c549 = Constraint(expr=", "m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0) m.c682 =", "0.940066550763924*m.b664 <= 0.940066550763924) m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <=", "m.x817 == 0) m.c965 = Constraint(expr= 4*m.b728 + m.x818 ==", "= Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0) m.c600 = Constraint(expr=", "Constraint(expr= m.b714 + m.b715 <= 1) m.c1163 = Constraint(expr= m.b716", "log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678) <= 0) m.c844", "111 804 0 0 0 0 # # Variable counts", "0) m.c11 = Constraint(expr= m.x23 - m.x26 - m.x29 -", "Constraint(expr= m.b618 - m.b619 <= 0) m.c1037 = Constraint(expr= m.b620", "10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761 - 8*m.b762 -", "m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0) m.c253 =", "Constraint(expr= m.x78 - m.x102 - m.x105 - m.x108 == 0)", "0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634) <=", "2.30162356062425*m.b640 <= 0) m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <=", "15*m.b623 <= 0) m.c327 = Constraint(expr= m.x303 - 15*m.b624 <=", "m.b655 - m.b661 >= 0) m.c1472 = Constraint(expr= m.b662 -", "+ m.b601 - m.b607 >= 0) m.c1409 = Constraint(expr= m.b596", "0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640)", "= Constraint(expr= m.b680 - m.b770 <= 0) m.c1368 = Constraint(expr=", "m.b661) <= 0) m.c668 = Constraint(expr= m.x479 == 0) m.c669", "m.b771 + m.b772 <= 1) m.c1277 = Constraint(expr= m.b773 +", "- m.x471 - m.x474 == 0) m.c649 = Constraint(expr= m.x142", "- m.x105 - m.x108 == 0) m.c31 = Constraint(expr= m.x79", "m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148", "m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558", "m.c199 = Constraint(expr= m.x280 - 15*m.b610 <= 0) m.c200 =", "m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817", "m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0) m.c124 =", "+ m.x847 == 0) m.c995 = Constraint(expr= 10*m.b758 + m.x848", "m.c180 = Constraint(expr= m.x36 - m.x255 - m.x258 == 0)", "<= 1) m.c1149 = Constraint(expr= m.b707 + m.b709 <= 1)", "Constraint(expr= m.b711 + m.b712 <= 1) m.c1155 = Constraint(expr= m.b710", "m.c1287 = Constraint(expr= - m.b599 + m.b600 - m.b690 <=", "- m.b639 + m.b640 - m.b730 <= 0) m.c1328 =", "m.b596 + m.b599 - m.b605 >= 0) m.c1407 = Constraint(expr=", "0) m.c1098 = Constraint(expr= m.b680 - m.b682 <= 0) m.c1099", "- m.x433 == 0) m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635", "0) m.c382 = Constraint(expr= m.x64 - m.x319 - m.x325 ==", "Constraint(expr= m.x457 == 0) m.c566 = Constraint(expr= m.x101 - m.x398", "Constraint(expr= m.x582 == 0) m.c850 = Constraint(expr= m.x583 == 0)", "m.b698 + m.b699 <= 1) m.c1128 = Constraint(expr= m.b698 +", "<= 1) m.c1106 = Constraint(expr= m.b687 + m.b688 <= 1)", "m.b739 <= 1) m.c1211 = Constraint(expr= m.b740 + m.b741 <=", "2*m.b731 + m.x821 == 0) m.c969 = Constraint(expr= 5*m.b732 +", "m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0) m.c575 =", "m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371", "Constraint(expr= m.x136 - m.x139 == 0) m.c35 = Constraint(expr= m.x137", "Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 =", "0) m.c1081 = Constraint(expr= m.b663 - m.b664 <= 0) m.c1082", "Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619) <= 0) m.c260 =", "- 4*m.b775, sense=maximize) m.c2 = Constraint(expr= m.x2 - m.x5 -", "<= 1) m.c1241 = Constraint(expr= m.b755 + m.b756 <= 1)", "Constraint(expr= m.b672 - m.b673 <= 0) m.c1091 = Constraint(expr= m.b674", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 =", "m.x349 == 0) m.c293 = Constraint(expr= m.x53 - m.x296 -", "= Constraint(expr= m.x17 - m.x20 - m.x23 == 0) m.c9", "+ m.b681 - m.b771 <= 0) m.c1369 = Constraint(expr= -", "= Constraint(expr= m.b647 - m.b649 <= 0) m.c1066 = Constraint(expr=", "m.b702 <= 0) m.c1300 = Constraint(expr= - m.b611 - m.b612", "m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617)", "m.x819 == 0) m.c967 = Constraint(expr= m.b730 + m.x820 ==", "= Constraint(expr= m.x474 == 0) m.c643 = Constraint(expr= m.x475 ==", "5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751 -", "m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820", "m.x215 == 0) m.c63 = Constraint(expr= m.x6 - m.x213 -", "m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 =", "0) m.c372 = Constraint(expr= m.x324 == 0) m.c373 = Constraint(expr=", "m.x452 - m.x455 == 0) m.c570 = Constraint(expr= m.x129 -", "- m.b652 <= 0) m.c1070 = Constraint(expr= m.b653 - m.b654", "15*m.b627 <= 0) m.c355 = Constraint(expr= m.x310 - 15*m.b628 <=", "m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792", "+ 0.999*m.b658) <= 0) m.c641 = Constraint(expr= m.x473 == 0)", "- 0.994083415506506*m.b677 <= 0) m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697 =", "- m.x451 == 0) m.c545 = Constraint(expr= m.x392 - 9*m.b644", "Constraint(expr= - m.b641 + m.b642 - m.b732 <= 0) m.c1330", "m.c777 = Constraint(expr= m.x513 - 30*m.b669 <= 0) m.c778 =", "0.999*m.b617)))*(0.001 + 0.999*m.b617) <= 0) m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618)", "m.b752 + m.b754 <= 1) m.c1237 = Constraint(expr= m.b752 +", "Constraint(expr= m.b605 - m.b695 <= 0) m.c1293 = Constraint(expr= -", "m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c465 =", "m.c315 = Constraint(expr= m.x306 == 0) m.c316 = Constraint(expr= m.x307", "Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x234 == 0) m.c94 = Constraint(expr= m.x16 - m.x232 -", "= Constraint(expr= - m.b656 - m.b657 + m.b658 - m.b748", "Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c854 = Constraint(expr= m.x203 - m.x578 - m.x581 == 0)", "m.x430 - m.x433 == 0) m.c449 = Constraint(expr= m.x362 -", "= Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001", "m.x295 == 0) m.c236 = Constraint(expr= m.x332 == 0) m.c237", "m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0) m.c429 =", "m.c908 = Constraint(expr= m.x209 - m.x590 - m.x593 == 0)", "0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999* m.b649) <= 0)", "m.b660 - m.b750 <= 0) m.c1348 = Constraint(expr= - m.b659", "Constraint(expr= m.x198 - m.x567 - m.x570 == 0) m.c802 =", "m.x324 == 0) m.c373 = Constraint(expr= m.x325 == 0) m.c374", "m.b736 <= 1) m.c1203 = Constraint(expr= m.b734 + m.b736 <=", "<= 0.940066550763924) m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924)", "m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96", "Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684", "- m.x96 - m.x99 == 0) m.c28 = Constraint(expr= m.x76", "= Constraint(expr= m.x146 - m.x482 - m.x485 == 0) m.c651", "== 0) m.c447 = Constraint(expr= m.x117 - m.x429 - m.x432", "0) m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0) m.c688", "Constraint(expr= m.b642 - m.b643 <= 0) m.c1061 = Constraint(expr= m.b644", "= Constraint(expr= m.x173 - m.x182 - m.x185 == 0) m.c48", "- 9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769 - 2*m.b770", "= Constraint(expr= m.x229 == 0) m.c62 = Constraint(expr= m.x5 -", "0) m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0) m.c227", "Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924) m.c775 = Constraint(expr= m.x511", "m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180", "m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425) m.c501 = Constraint(expr= m.x438 +", "- 0.690184503917672*m.b677 <= 0) m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678", "Constraint(expr= m.b627 - m.b651 >= 0) m.c1453 = Constraint(expr= m.b628", "m.x466 - m.x469 == 0) m.c626 = Constraint(expr= m.x410 -", "m.b700 <= 1) m.c1129 = Constraint(expr= m.b698 + m.b699 <=", "m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407", "- 4.45628648004517*m.b600 <= 0) m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601", "= Constraint(expr= - m.b653 - m.b654 + m.b655 - m.b745", "+ 0.940066550763924*m.b667 <= 0.940066550763924) m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665", "m.c331 = Constraint(expr= m.x307 + 15*m.b625 <= 15) m.c332 =", "m.c1055 = Constraint(expr= m.b638 - m.b639 <= 0) m.c1056 =", "m.b674 - m.b764 <= 0) m.c1362 = Constraint(expr= - m.b674", "0.999*m.b616)))*(0.001 + 0.999* m.b616) <= 0) m.c233 = Constraint(expr= m.x293", "+ m.b666 - m.b756 <= 0) m.c1354 = Constraint(expr= -", "- m.b675 + m.b676 - m.b766 <= 0) m.c1364 =", "m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474", "- 3.34221486003388*m.b602 <= 0) m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603", "m.c1138 = Constraint(expr= m.b702 + m.b703 <= 1) m.c1139 =", "Constraint(expr= m.x593 + 9*m.b683 <= 9) m.c921 = Constraint(expr= m.x594", "0) m.c983 = Constraint(expr= 2*m.b746 + m.x836 == 0) m.c984", "sense=maximize) m.c2 = Constraint(expr= m.x2 - m.x5 - m.x8 ==", "+ m.x842 == 0) m.c990 = Constraint(expr= 8*m.b753 + m.x843", "m.c238 = Constraint(expr= m.x334 == 0) m.c239 = Constraint(expr= m.x50", "m.c1080 = Constraint(expr= m.b662 - m.b664 <= 0) m.c1081 =", "+ 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999*", "m.x574 - 0.480234946352917*m.b676 <= 0) m.c839 = Constraint(expr= m.x575 +", "= Constraint(expr= m.b626 - m.b650 >= 0) m.c1452 = Constraint(expr=", "- m.x137 == 0) m.c33 = Constraint(expr= m.x135 - m.x138", "<= 1) m.c1144 = Constraint(expr= m.b705 + m.b706 <= 1)", "0) m.c779 = Constraint(expr= m.x515 + 30*m.b668 <= 30) m.c780", "- m.b681 >= 0) m.c1483 = Constraint(expr= m.b670 - m.b682", "Constraint(expr= 2*m.b712 + m.x802 == 0) m.c950 = Constraint(expr= 4*m.b713", "- 1.04900943706034*m.b648 <= 0) m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649", "1) m.c1210 = Constraint(expr= m.b738 + m.b739 <= 1) m.c1211", "m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385", "+ m.b615 - m.b705 <= 0) m.c1303 = Constraint(expr= -", "m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367", "Constraint(expr= m.b665 - m.b666 <= 0) m.c1083 = Constraint(expr= m.b665", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638 =", "m.b644 - m.b645 <= 0) m.c1062 = Constraint(expr= m.b644 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 =", "m.x441 == 0) m.c505 = Constraint(expr= - m.x388 + m.x442", "m.b731 + m.b732 <= 1) m.c1194 = Constraint(expr= m.b731 +", "0) m.c1288 = Constraint(expr= - m.b599 - m.b600 + m.b601", "240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686 -", "m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943) m.c836 =", "m.b614 >= 0) m.c1377 = Constraint(expr= - m.b603 + m.b612", "0) m.c118 = Constraint(expr= m.x28 - m.x238 - m.x241 ==", "0) m.c507 = Constraint(expr= m.x390 == 0) m.c508 = Constraint(expr=", "m.b653 >= 0) m.c1401 = Constraint(expr= - m.b627 + m.b648", "m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518 == 0) m.c693", "m.b616 + m.b634 >= 0) m.c1385 = Constraint(expr= - m.b605", "m.x36 - 2*m.x37 - 10*m.x86 - 5*m.x87 - 5*m.x88 -", "- m.x339 - m.x342 == 0) m.c271 = Constraint(expr= m.x70", "= Constraint(expr= - m.b644 + m.b645 - m.b735 <= 0)", "Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388) m.c250 = Constraint(expr= m.x295", "0) m.c1059 = Constraint(expr= m.b641 - m.b643 <= 0) m.c1060", "+ m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999* m.b673) <= 0) m.c791", "m.x8 == 0) m.c3 = Constraint(expr= m.x3 - m.x6 -", "1) m.c1230 = Constraint(expr= m.b749 + m.b751 <= 1) m.c1231", "= Constraint(expr= m.x553 == 0) m.c821 = Constraint(expr= m.x575 ==", "== 0) m.c729 = Constraint(expr= m.x165 - m.x501 - m.x504", "m.x502 - m.x505 == 0) m.c731 = Constraint(expr= m.x176 -", "Constraint(expr= m.b671 - m.b761 <= 0) m.c1359 = Constraint(expr= -", "+ m.b748 <= 1) m.c1228 = Constraint(expr= m.b747 + m.b748", "m.b644 - m.b646 <= 0) m.c1063 = Constraint(expr= m.b645 -", "= Constraint(expr= m.x217 == 0) m.c59 = Constraint(expr= m.x227 ==", "Constraint(expr= m.x407 == 0) m.c588 = Constraint(expr= m.x408 == 0)", "m.c1172 = Constraint(expr= m.b720 + m.b721 <= 1) m.c1173 =", "+ 350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203", "9*m.b641 <= 9) m.c528 = Constraint(expr= m.x444 + 9*m.b642 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 =", "0) m.c1033 = Constraint(expr= m.b615 - m.b616 <= 0) m.c1034", "- 3.34221486003388*m.b616 <= 0) m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614", "m.c1374 = Constraint(expr= m.b597 + m.b600 == 1) m.c1375 =", "4.45628648004517) m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517) m.c191", "m.b746 + m.b747 <= 1) m.c1224 = Constraint(expr= m.b746 +", "= Constraint(expr= m.x171 - m.x513 - m.x516 == 0) m.c766", "m.b710 + m.b711 <= 1) m.c1152 = Constraint(expr= m.b710 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c139 = Constraint(expr= m.x247 == 0) m.c140 = Constraint(expr=", "m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943) m.c836 = Constraint(expr= m.x572 -", "0) m.c619 = Constraint(expr= m.x469 == 0) m.c620 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c756 = Constraint(expr= m.x516 == 0) m.c757 =", "m.x79 - m.x103 - m.x106 - m.x109 == 0) m.c32", "m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696", "m.c175 = Constraint(expr= m.x283 == 0) m.c176 = Constraint(expr= m.x32", "+ 0.999* m.b639) <= 0) m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640)", "m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999* m.b613) <= 0) m.c206 =", "m.x316 - 1.83548069293539*m.b613 <= 0) m.c227 = Constraint(expr= m.x320 +", "m.c8 = Constraint(expr= m.x17 - m.x20 - m.x23 == 0)", "- 0.690184503917672*m.b679 <= 0) m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677", "== 0) m.c269 = Constraint(expr= m.x68 - m.x338 - m.x341", "Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0)", "0.999*m.b598) <= 0) m.c56 = Constraint(expr= m.x215 == 0) m.c57", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 =", "= Constraint(expr= m.b731 + m.b733 <= 1) m.c1195 = Constraint(expr=", "<= 0) m.c1288 = Constraint(expr= - m.b599 - m.b600 +", "m.b727 <= 1) m.c1185 = Constraint(expr= m.b725 + m.b727 <=", "= Constraint(expr= m.x443 + 9*m.b641 <= 9) m.c528 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 =", "m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999* m.b615) <= 0) m.c232 =", "= Constraint(expr= m.x310 - 15*m.b628 <= 0) m.c356 = Constraint(expr=", "- m.b630 + m.b631 - m.b721 <= 0) m.c1319 =", "+ m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677) <= 0) m.c843 =", "m.x388 - m.x391 == 0) m.c515 = Constraint(expr= m.x122 -", "m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171) m.c685 =", "<= 0) m.c1362 = Constraint(expr= - m.b674 + m.b675 -", "m.x455 == 0) m.c564 = Constraint(expr= m.x456 == 0) m.c565", "m.c483 = Constraint(expr= m.x120 - m.x435 - m.x438 == 0)", "0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c464 = Constraint(expr=(m.x434/(0.001 +", "0.999*m.b613)))*(0.001 + 0.999* m.b613) <= 0) m.c206 = Constraint(expr= m.x287", "m.c1393 = Constraint(expr= - m.b610 + m.b622 + m.b625 +", "= Constraint(expr= - m.x13 - m.x16 + m.x19 == 0)", "= Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348) m.c255 = Constraint(expr=", "m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17", "= Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388) m.c224 = Constraint(expr=", "0) m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0) m.c833", "m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599)", "m.c1283 = Constraint(expr= m.b596 - m.b686 <= 0) m.c1284 =", "<= 1) m.c1111 = Constraint(expr= m.b689 + m.b690 <= 1)", "0) m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0) m.c654", "m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 +", "0) m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001", "m.c1412 = Constraint(expr= m.b602 - m.b611 >= 0) m.c1413 =", "+ m.b601 == 1) m.c1376 = Constraint(expr= - m.b602 +", "Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0) m.c76 = Constraint(expr= m.x226", "- m.x512 - m.x515 == 0) m.c765 = Constraint(expr= m.x171", "m.x95 - m.x386 - m.x389 == 0) m.c513 = Constraint(expr=", "Constraint(expr= m.x419 + 20*m.b629 <= 20) m.c405 = Constraint(expr= m.x420", "Constraint(expr= m.b737 + m.b738 <= 1) m.c1208 = Constraint(expr= m.b738", "Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b655 <= 0) m.c1072 = Constraint(expr= m.b654 - m.b655 <=", "+ m.b611 + m.b614 >= 0) m.c1377 = Constraint(expr= -", "m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0) m.c579 =", "= Constraint(expr= m.x559 == 0) m.c875 = Constraint(expr= m.x587 ==", "m.x393 - 9*m.b645 <= 0) m.c547 = Constraint(expr= m.x394 -", "- 1.83548069293539*m.b629 <= 0) m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630", "8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733 -", "<= 0) m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1265 = Constraint(expr= m.b767 + m.b768 <= 1) m.c1266", "== 0) m.c240 = Constraint(expr= m.x51 - m.x291 - m.x294", "+ 20*m.b631 <= 20) m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) -", "== 0) m.c241 = Constraint(expr= m.x52 - m.x292 - m.x295", "== 0) m.c706 = Constraint(expr= m.x175 - m.x520 - m.x523", "m.b651) <= 0) m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1", "+ m.x860 == 0) m.c1008 = Constraint(expr= m.b771 + m.x861", "+ 15*m.b627 <= 15) m.c358 = Constraint(expr= m.x313 + 15*m.b628", "0.999* m.b637) <= 0) m.c437 = Constraint(expr= m.x365 == 0)", "m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 +", "m.c912 = Constraint(expr= m.x561 - 15*m.b684 <= 0) m.c913 =", "Constraint(expr= m.b719 + m.b721 <= 1) m.c1174 = Constraint(expr= m.b720", "<= 0) m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0)", "m.c994 = Constraint(expr= 8*m.b757 + m.x847 == 0) m.c995 =", "9*m.b644 <= 9) m.c549 = Constraint(expr= m.x396 + 9*m.b645 <=", "= Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171) m.c636 = Constraint(expr=", "= Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001", "Constraint(expr= m.b674 - m.b764 <= 0) m.c1362 = Constraint(expr= -", "= Constraint(expr= m.x451 + 9*m.b646 <= 9) m.c557 = Constraint(expr=(m.x452/(0.001", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625 =", "== 0) m.c27 = Constraint(expr= m.x75 - m.x96 - m.x99", "- 15*m.b610 <= 0) m.c200 = Constraint(expr= m.x281 + 15*m.b608", "0.999* m.b615) <= 0) m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) -", "0) m.c295 = Constraint(expr= m.x55 - m.x298 - m.x301 ==", "m.x519 - 0.705049913072943*m.b663 <= 0) m.c715 = Constraint(expr= m.x520 -", "1) m.c1111 = Constraint(expr= m.b689 + m.b690 <= 1) m.c1112", "= Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0) m.c864 = Constraint(expr=", "Constraint(expr= m.x100 - m.x394 - m.x397 == 0) m.c542 =", "m.c1176 = Constraint(expr= m.b722 + m.b724 <= 1) m.c1177 =", "Constraint(expr= m.b713 + m.b714 <= 1) m.c1158 = Constraint(expr= m.b713", "1) m.c1143 = Constraint(expr= m.b704 + m.b706 <= 1) m.c1144", "= Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001", "Constraint(expr= m.x51 - m.x291 - m.x294 == 0) m.c241 =", "m.x29 - m.x242 - m.x245 == 0) m.c144 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= 8*m.b753 + m.x843 == 0) m.c991 = Constraint(expr=", "m.x519 == 0) m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520", "m.x530 == 0) m.c732 = Constraint(expr= m.x177 - m.x525 -", "= Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001", "m.x393 + m.x447 == 0) m.c532 = Constraint(expr= - m.x394", "m.x93 == 0) m.c25 = Constraint(expr= - m.x73 - m.x91", "= Constraint(expr= m.x214 - 40*m.b598 <= 0) m.c71 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 =", "== 0) m.c565 = Constraint(expr= m.x457 == 0) m.c566 =", "== 0) m.c28 = Constraint(expr= m.x76 - m.x97 - m.x100", "20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128 +", "Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b669 <= 0) m.c1086 = Constraint(expr= m.b668 - m.b670 <=", "Total E G L N X C B # 1486", "= Objective(expr= - m.x2 - m.x3 - m.x4 + 5*m.x20", "Constraint(expr= m.b596 - m.b597 <= 0) m.c1014 = Constraint(expr= m.b596", "Constraint(expr= m.x98 - m.x392 - m.x395 == 0) m.c540 =", "<= 1) m.c1276 = Constraint(expr= m.b771 + m.b772 <= 1)", "counts # Total const NL DLL # 3373 3193 180", "0) m.c388 = Constraint(expr= m.x112 - m.x418 - m.x421 ==", "+ m.b702 <= 1) m.c1136 = Constraint(expr= m.b702 + m.b703", "= Constraint(expr= m.x540 == 0) m.c760 = Constraint(expr= m.x541 ==", "m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943) m.c834 =", "m.c475 = Constraint(expr= m.x439 == 0) m.c476 = Constraint(expr= m.x83", "m.b612) <= 0) m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1", "m.b641 - m.b642 <= 0) m.c1059 = Constraint(expr= m.b641 -", "== 0) m.c851 = Constraint(expr= m.x176 - m.x527 - m.x533", "m.x57 - m.x303 - m.x306 == 0) m.c322 = Constraint(expr=", "- m.b754 <= 0) m.c1352 = Constraint(expr= m.b665 - m.b755", "= Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376) m.c576 = Constraint(expr=", "- m.x412 - m.x415 == 0) m.c623 = Constraint(expr= m.x134", "9*m.b684 <= 9) m.c922 = Constraint(expr= m.x595 + 9*m.b685 <=", "m.c148 = Constraint(expr= m.x43 - m.x268 - m.x274 == 0)", "Constraint(expr= m.b749 + m.b750 <= 1) m.c1230 = Constraint(expr= m.b749", "m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171) m.c636 = Constraint(expr= m.x468 +", "Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0) m.c689 = Constraint(expr= m.x491", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 =", "+ 0.999* m.b676) <= 0) m.c818 = Constraint(expr= m.x551 ==", "m.x532 == 0) m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <=", "m.b665 - m.b666 <= 0) m.c1083 = Constraint(expr= m.b665 -", "m.x17 == 0) m.c6 = Constraint(expr= - m.x12 - m.x15", "Constraint(expr= - m.x250 + m.x280 == 0) m.c164 = Constraint(expr=", "+ 1.83548069293539*m.b613 <= 1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) -", "0.572481933717686*m.b635 <= 0) m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <=", "Constraint(expr= m.b653 - m.b743 <= 0) m.c1341 = Constraint(expr= -", "m.c39 = Constraint(expr= - m.x147 - m.x150 + m.x153 ==", "m.c336 = Constraint(expr= m.x354 + 9*m.b624 <= 9) m.c337 =", "- m.x490 - m.x493 == 0) m.c680 = Constraint(expr= m.x476", "m.c755 = Constraint(expr= m.x515 == 0) m.c756 = Constraint(expr= m.x516", "<= 0) m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0)", "= Constraint(expr= m.b641 - m.b643 <= 0) m.c1060 = Constraint(expr=", "m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376) m.c576 = Constraint(expr= m.x402 +", "Constraint(expr= m.b773 + m.b775 <= 1) m.c1282 = Constraint(expr= m.b774", "0) m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0) m.c188", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 =", "m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506) m.c863 = Constraint(expr= m.x578 -", "1) m.c1150 = Constraint(expr= m.b708 + m.b709 <= 1) m.c1151", "4.45628648004517) m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0) m.c156", "= Constraint(expr= m.x76 - m.x352 - m.x355 == 0) m.c326", "+ 390*m.x198 + 350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202", "m.c1111 = Constraint(expr= m.b689 + m.b690 <= 1) m.c1112 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x3 - m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22", "1) m.c1177 = Constraint(expr= m.b722 + m.b723 <= 1) m.c1178", "m.c1200 = Constraint(expr= m.b734 + m.b736 <= 1) m.c1201 =", "m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593", "Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506) m.c863 = Constraint(expr= m.x578", "0.999* m.b667) <= 0) m.c722 = Constraint(expr= m.x503 == 0)", "== 0) m.c176 = Constraint(expr= m.x32 - m.x248 - m.x251", "+ 0.999* m.b613) <= 0) m.c206 = Constraint(expr= m.x287 ==", "m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133", "7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768 -", "- 3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize) m.c2", "<= 0) m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0)", "+ 3.04984759446376*m.b627 <= 3.04984759446376) m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628", "Constraint(expr= m.x68 - m.x338 - m.x341 == 0) m.c270 =", "1.11894339953103) m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103) m.c610", "- m.b759 <= 0) m.c1357 = Constraint(expr= - m.b668 -", "m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0) m.c274 =", "30*m.b668 <= 0) m.c777 = Constraint(expr= m.x513 - 30*m.b669 <=", "0) m.c1446 = Constraint(expr= m.b624 - m.b645 >= 0) m.c1447", "m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333", "Constraint(expr= 5*m.b732 + m.x822 == 0) m.c970 = Constraint(expr= 2*m.b733", "m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207", "= Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0) m.c431 = Constraint(expr=", "Constraint(expr= m.x445 + 9*m.b643 <= 9) m.c530 = Constraint(expr= -", "m.x286 - m.x289 == 0) m.c215 = Constraint(expr= m.x62 -", "Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376) m.c365 = Constraint(expr= -", "<= 0) m.c552 = Constraint(expr= m.x447 - 9*m.b645 <= 0)", "m.x180 - m.x189 - m.x192 - m.x195 == 0) m.c52", "- m.x416 - m.x419 == 0) m.c387 = Constraint(expr= m.x111", "Constraint(expr= m.b608 - m.b620 >= 0) m.c1422 = Constraint(expr= m.b609", "Constraint(expr= 4*m.b775 + m.x865 == 0) m.c1013 = Constraint(expr= m.b596", "Constraint(expr= m.x74 - m.x95 - m.x98 == 0) m.c27 =", "m.b704 <= 0) m.c1302 = Constraint(expr= - m.b614 + m.b615", "m.c642 = Constraint(expr= m.x474 == 0) m.c643 = Constraint(expr= m.x475", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850 =", "m.x136 - m.x139 == 0) m.c35 = Constraint(expr= m.x137 -", "m.b735 <= 0) m.c1333 = Constraint(expr= - m.b644 - m.b645", "Constraint(expr= 4*m.b694 + m.x784 == 0) m.c932 = Constraint(expr= 10*m.b695", "= Constraint(expr= - m.b627 + m.b648 + m.b651 + m.b654", "m.x591 == 0) m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592", "0) m.c1447 = Constraint(expr= m.b625 - m.b646 >= 0) m.c1448", "m.x73 - m.x346 - m.x349 == 0) m.c299 = Constraint(expr=", "m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924) m.c738 = Constraint(expr= m.x504 +", "0) m.c163 = Constraint(expr= - m.x250 + m.x280 == 0)", "0) m.c323 = Constraint(expr= m.x74 - m.x350 - m.x353 ==", "== 0) m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0)", "= Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0) m.c102 = Constraint(expr=", "m.x413 == 0) m.c615 = Constraint(expr= m.x414 == 0) m.c616", "= Constraint(expr= m.b771 + m.b772 <= 1) m.c1275 = Constraint(expr=", "+ m.b691 <= 1) m.c1114 = Constraint(expr= m.b690 + m.b691", "0.940066550763924) m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924) m.c691", "m.c697 = Constraint(expr= m.x499 == 0) m.c698 = Constraint(expr= m.x521", "40*m.x118 + 30*m.x119 + 20*m.x120 + 20*m.x121 + 35*m.x122 +", "m.x392 - 9*m.b644 <= 0) m.c546 = Constraint(expr= m.x393 -", "m.x229 == 0) m.c62 = Constraint(expr= m.x5 - m.x212 -", "m.c195 = Constraint(expr= m.x258 + 30*m.b609 <= 30) m.c196 =", "m.c1290 = Constraint(expr= - m.b602 + m.b603 - m.b693 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0)", "1.18887736200171*m.b656 <= 1.18887736200171) m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <=", "m.b727 <= 1) m.c1183 = Constraint(expr= m.b725 + m.b726 <=", "+ 1.26558121681553*m.b617 <= 1.26558121681553) m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 =", "m.b711 <= 1) m.c1152 = Constraint(expr= m.b710 + m.b712 <=", "= Constraint(expr= m.x10 - m.x220 - m.x223 == 0) m.c92", "<= 0) m.c1062 = Constraint(expr= m.b644 - m.b646 <= 0)", "m.c726 = Constraint(expr= m.x531 == 0) m.c727 = Constraint(expr= m.x532", "+ 13.5*m.b621 <= 13.5) m.c310 = Constraint(expr= m.x349 + 13.5*m.b622", "m.b665 + m.b677 >= 0) m.c1464 = Constraint(expr= - m.b666", "1) m.c1203 = Constraint(expr= m.b734 + m.b736 <= 1) m.c1204", "<= 0.940066550763924) m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0)", "+ m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999* m.b672) <= 0) m.c790", "0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679) <=", "Constraint(expr= m.x151 - m.x490 - m.x493 == 0) m.c680 =", "<= 0) m.c1094 = Constraint(expr= m.b677 - m.b678 <= 0)", "m.x200 - m.x572 - m.x575 == 0) m.c828 = Constraint(expr=", "<= 0) m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 +", "Constraint(expr= 3*m.b736 + m.x826 == 0) m.c974 = Constraint(expr= 5*m.b737", "= Constraint(expr= - m.x73 - m.x91 + m.x94 == 0)", "m.b601 <= 0) m.c1019 = Constraint(expr= m.b602 - m.b603 <=", "- 1.18887736200171*m.b657 <= 0) m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658", "Constraint(expr= m.b624 - m.b625 <= 0) m.c1043 = Constraint(expr= m.b626", "Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b745 - 2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749 -", "m.c1389 = Constraint(expr= - m.b618 + m.b636 + m.b639 >=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c438 = Constraint(expr= m.x366 == 0) m.c439 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b634)))*(0.001 + 0.999*m.b634) <= 0) m.c410 = Constraint(expr= m.x335", "- 1.32154609891348*m.b632 <= 0) m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633", "m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716", "- m.x242 - m.x245 == 0) m.c144 = Constraint(expr= m.x30", "m.b701 <= 0) m.c1299 = Constraint(expr= - m.b611 + m.b612", "- m.x267 - m.x273 == 0) m.c148 = Constraint(expr= m.x43", "m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580", "+ m.x536 == 0) m.c750 = Constraint(expr= - 0.5*m.x513 +", "= Constraint(expr= m.x147 - m.x483 - m.x486 == 0) m.c652", "m.c1019 = Constraint(expr= m.b602 - m.b603 <= 0) m.c1020 =", "m.b749 + m.b750 <= 1) m.c1230 = Constraint(expr= m.b749 +", "m.c1376 = Constraint(expr= - m.b602 + m.b611 + m.b614 >=", "+ m.b697 <= 1) m.c1125 = Constraint(expr= m.b695 + m.b697", "m.b658 <= 0) m.c1076 = Constraint(expr= m.b659 - m.b660 <=", "m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850", "15*m.b621 <= 15) m.c304 = Constraint(expr= m.x301 + 15*m.b622 <=", "m.x99 == 0) m.c28 = Constraint(expr= m.x76 - m.x97 -", "Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0) m.c498 = Constraint(expr= m.x435", "Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0) m.c742 = Constraint(expr= m.x526", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646 =", "m.c442 = Constraint(expr= m.x433 == 0) m.c443 = Constraint(expr= m.x80", "= Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001", "- m.x89 + m.x92 == 0) m.c24 = Constraint(expr= -", "m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999* m.b616) <= 0) m.c233 =", "m.c244 = Constraint(expr= m.x67 - m.x328 - m.x334 == 0)", "m.x423 - 0.842233385663186*m.b633 <= 0) m.c430 = Constraint(expr= m.x424 -", "m.x579 - 0.690184503917672*m.b678 <= 0) m.c865 = Constraint(expr= m.x580 -", "m.c1246 = Constraint(expr= m.b756 + m.b757 <= 1) m.c1247 =", "m.x367 == 0) m.c440 = Constraint(expr= m.x431 == 0) m.c441", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683 =", "- 1.83548069293539*m.b613 <= 0) m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611", "+ m.b772 <= 1) m.c1277 = Constraint(expr= m.b773 + m.b774", "== 0) m.c907 = Constraint(expr= m.x193 - m.x562 - m.x565", "= Constraint(expr= - m.x146 - m.x149 + m.x152 == 0)", "m.x170 - m.x512 - m.x515 == 0) m.c765 = Constraint(expr=", "4.45628648004517*m.b606 <= 0) m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <=", "Constraint(expr= - 0.9*m.x317 + m.x416 == 0) m.c366 = Constraint(expr=", "+ 0.999* m.b653) <= 0) m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654)", "m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528", "- m.b666 + m.b678 >= 0) m.c1465 = Constraint(expr= -", "- m.x392 - m.x395 == 0) m.c540 = Constraint(expr= m.x99", "m.c944 = Constraint(expr= 5*m.b707 + m.x797 == 0) m.c945 =", "= Constraint(expr= m.x173 - m.x518 - m.x521 == 0) m.c705", "- m.b661 >= 0) m.c1472 = Constraint(expr= m.b662 - m.b671", "log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632) <= 0) m.c408", "== 0) m.c85 = Constraint(expr= m.x223 == 0) m.c86 =", "m.x546 == 0) m.c799 = Constraint(expr= m.x184 - m.x544 -", "+ m.b656 + m.b659 >= 0) m.c1458 = Constraint(expr= -", "m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311", "Constraint(expr= m.x12 - m.x225 - m.x228 == 0) m.c67 =", "0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0)", "0) m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001", "m.b641 - m.b643 <= 0) m.c1060 = Constraint(expr= m.b642 -", "9) m.c337 = Constraint(expr= m.x355 + 9*m.b625 <= 9) m.c338", "2.30162356062425*m.b640 <= 2.30162356062425) m.c503 = Constraint(expr= - m.x386 + m.x440", "- 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999* m.b600) <=", "m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697", "+ 9*m.b642 <= 9) m.c529 = Constraint(expr= m.x445 + 9*m.b643", "m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801", "= Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001", "m.b720 <= 1) m.c1170 = Constraint(expr= m.b719 + m.b721 <=", "- 0.842233385663186*m.b634 <= 0) m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632", "= Constraint(expr= 8*m.b741 + m.x831 == 0) m.c979 = Constraint(expr=", "= Constraint(expr= m.b774 + m.b775 <= 1) m.c1281 = Constraint(expr=", "15*m.b683 <= 15) m.c915 = Constraint(expr= m.x564 + 15*m.b684 <=", "= Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539) m.c395 = Constraint(expr=", "m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324", "= Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0) m.c736 = Constraint(expr=", "- m.b679 <= 0) m.c1096 = Constraint(expr= m.b678 - m.b679", "0) m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520 == 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813", "<= 2.30162356062425) m.c503 = Constraint(expr= - m.x386 + m.x440 ==", "m.c873 = Constraint(expr= m.x558 == 0) m.c874 = Constraint(expr= m.x559", "m.x77 - m.x356 - m.x359 == 0) m.c351 = Constraint(expr=", "m.x440 - m.x443 == 0) m.c516 = Constraint(expr= m.x123 -", "= Constraint(expr= m.b767 + m.b769 <= 1) m.c1267 = Constraint(expr=", "+ 1.18887736200171*m.b658 <= 1.18887736200171) m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656", "= Constraint(expr= m.x431 == 0) m.c441 = Constraint(expr= m.x432 ==", "- m.x328 - m.x334 == 0) m.c245 = Constraint(expr= m.x290", "= Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0) m.c500 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 =", "- 1.18887736200171*m.b655 <= 0) m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653", "m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 +", "= Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0) m.c688 = Constraint(expr=", "m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943) m.c717 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 =", "Constraint(expr= m.b665 - m.b755 <= 0) m.c1353 = Constraint(expr= -", "m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248", "0) m.c9 = Constraint(expr= m.x18 - m.x21 - m.x24 ==", "Constraint(expr= - m.b611 + m.b612 - m.b702 <= 0) m.c1300", "= Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388) m.c132 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 =", "m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218", "= Constraint(expr= m.x18 - m.x21 - m.x24 == 0) m.c10", "m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353) m.c160 = Constraint(expr= m.x274 +", "<= 3.34221486003388) m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0)", "8*m.b689 + m.x779 == 0) m.c927 = Constraint(expr= 7*m.b690 +", "m.c1025 = Constraint(expr= m.b608 - m.b609 <= 0) m.c1026 =", "<= 0) m.c1349 = Constraint(expr= m.b662 - m.b752 <= 0)", "m.c1407 = Constraint(expr= m.b597 + m.b600 - m.b606 >= 0)", "m.c1363 = Constraint(expr= - m.b674 - m.b675 + m.b676 -", "<= 0) m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0)", "m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732", "= Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001", "+ 0.705049913072943*m.b676 <= 0.705049913072943) m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674", "m.b637) <= 0) m.c437 = Constraint(expr= m.x365 == 0) m.c438", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657 =", "5*m.b732 + m.x822 == 0) m.c970 = Constraint(expr= 2*m.b733 +", "Constraint(expr= m.x208 - m.x586 - m.x589 == 0) m.c884 =", "m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613", "= Constraint(expr= - m.b626 + m.b647 + m.b650 + m.b653", "m.x288 == 0) m.c208 = Constraint(expr= m.x289 == 0) m.c209", "= Constraint(expr= m.x200 - m.x572 - m.x575 == 0) m.c828", "+ m.x262 == 0) m.c110 = Constraint(expr= m.x239 == 0)", "- 9*m.b644 <= 0) m.c552 = Constraint(expr= m.x447 - 9*m.b645", "- 3.04984759446376*m.b655 <= 0) m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653", "Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 1) m.c1190 = Constraint(expr= m.b729 + m.b730 <= 1)", "m.x460 - m.x463 == 0) m.c599 = Constraint(expr= m.x404 -", "<= 1) m.c1188 = Constraint(expr= m.b728 + m.b730 <= 1)", "4*m.b742 - m.b743 - 4*m.b744 - m.b745 - 2*m.b746 -", "= Constraint(expr= m.b647 - m.b737 <= 0) m.c1335 = Constraint(expr=", "+ 1.32154609891348*m.b633 <= 1.32154609891348) m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634", "== 0) m.c963 = Constraint(expr= 6*m.b726 + m.x816 == 0)", "+ m.b657 - m.b747 <= 0) m.c1345 = Constraint(expr= -", "Nonzero counts # Total const NL DLL # 3373 3193", "m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714", "m.x92 == 0) m.c24 = Constraint(expr= - m.x72 - m.x90", "4*m.b775 + m.x865 == 0) m.c1013 = Constraint(expr= m.b596 -", "m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 +", "Constraint(expr= m.x301 == 0) m.c290 = Constraint(expr= m.x347 == 0)", "m.x505 == 0) m.c731 = Constraint(expr= m.x176 - m.x524 -", "m.c1028 = Constraint(expr= m.b611 - m.b612 <= 0) m.c1029 =", "m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93", "m.c903 = Constraint(expr= m.x594 == 0) m.c904 = Constraint(expr= m.x595", "m.x5 - m.x8 == 0) m.c3 = Constraint(expr= m.x3 -", "m.x75 - m.x351 - m.x354 == 0) m.c325 = Constraint(expr=", "m.b654 >= 0) m.c1402 = Constraint(expr= - m.b628 + m.b649", "= Constraint(expr= m.b605 - m.b695 <= 0) m.c1293 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 =", "= Constraint(expr= m.x148 - m.x484 - m.x487 == 0) m.c653", "Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838 =", "+ m.b769 <= 1) m.c1269 = Constraint(expr= m.b767 + m.b769", "Constraint(expr= m.x265 == 0) m.c116 = Constraint(expr= m.x26 - m.x236", "m.b669 - m.b670 <= 0) m.c1088 = Constraint(expr= m.b671 -", "- m.b608 - m.b609 + m.b610 - m.b700 <= 0)", "m.b603 + m.b612 + m.b615 >= 0) m.c1378 = Constraint(expr=", "Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553) m.c490 = Constraint(expr= m.x373", "0) m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0) m.c736", "5*m.b737 + m.x827 == 0) m.c975 = Constraint(expr= 7*m.b738 +", "m.b671 + m.b674 >= 0) m.c1461 = Constraint(expr= - m.b663", "1) m.c1254 = Constraint(expr= m.b761 + m.b763 <= 1) m.c1255", "Constraint(expr= m.x396 == 0) m.c535 = Constraint(expr= m.x397 == 0)", "Constraint(expr= - m.b608 + m.b620 + m.b623 + m.b626 >=", "= Constraint(expr= m.x227 == 0) m.c60 = Constraint(expr= m.x228 ==", "m.x392 - m.x395 == 0) m.c540 = Constraint(expr= m.x99 -", "0) m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0) m.c581", "= Constraint(expr= m.b749 + m.b750 <= 1) m.c1232 = Constraint(expr=", "<= 1) m.c1105 = Constraint(expr= m.b686 + m.b687 <= 1)", "m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 =", "- m.x375 + m.x417 == 0) m.c370 = Constraint(expr= -", "0) m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0) m.c226", "- m.x569 == 0) m.c801 = Constraint(expr= m.x198 - m.x567", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 =", "m.c358 = Constraint(expr= m.x313 + 15*m.b628 <= 15) m.c359 =", "m.b638 - m.b639 + m.b640 - m.b730 <= 0) m.c1328", "m.x381 - m.x384 == 0) m.c481 = Constraint(expr= m.x94 -", "m.c214 = Constraint(expr= m.x49 - m.x286 - m.x289 == 0)", "Constraint(expr= m.b741 + m.b742 <= 1) m.c1217 = Constraint(expr= m.b743", "= Constraint(expr= m.x45 - m.x54 - m.x57 - m.x60 ==", "m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678) <= 0) m.c844 = Constraint(expr=(m.x580/(0.001", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 =", "1.83548069293539*m.b612 <= 1.83548069293539) m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <=", "m.b767 <= 0) m.c1365 = Constraint(expr= - m.b677 + m.b678", "m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155", "m.c319 = Constraint(expr= m.x355 == 0) m.c320 = Constraint(expr= m.x56", "m.x828 == 0) m.c976 = Constraint(expr= 6*m.b739 + m.x829 ==", "m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830", "Constraint(expr= 6*m.b691 + m.x781 == 0) m.c929 = Constraint(expr= 6*m.b692", "Constraint(expr= 10*m.b699 + m.x789 == 0) m.c937 = Constraint(expr= 6*m.b700", "m.b691 <= 1) m.c1113 = Constraint(expr= m.b689 + m.b691 <=", "m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350", "Constraint(expr= m.x295 == 0) m.c236 = Constraint(expr= m.x332 == 0)", "== 0) m.c825 = Constraint(expr= m.x186 - m.x549 - m.x552", "10*m.b699 + m.x789 == 0) m.c937 = Constraint(expr= 6*m.b700 +", "Constraint(expr= m.b597 + m.b600 == 1) m.c1375 = Constraint(expr= m.b598", "+ 5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86", "<= 3.04984759446376) m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0)", "Constraint(expr= m.x371 == 0) m.c468 = Constraint(expr= m.x372 == 0)", "m.c1157 = Constraint(expr= m.b713 + m.b714 <= 1) m.c1158 =", "0) m.c567 = Constraint(expr= m.x102 - m.x399 - m.x402 ==", "+ 0.940066550763924*m.b670 <= 0.940066550763924) m.c776 = Constraint(expr= m.x512 - 30*m.b668", "m.x585 == 0) m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586", "m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5) m.c895 =", "m.b688 <= 1) m.c1108 = Constraint(expr= m.b687 + m.b688 <=", "Constraint(expr= m.x473 == 0) m.c642 = Constraint(expr= m.x474 == 0)", "Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924) m.c690 = Constraint(expr= m.x492", "m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388) m.c224 =", "<= 1.18887736200171) m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0)", "m.c1244 = Constraint(expr= m.b756 + m.b757 <= 1) m.c1245 =", "m.b608 - m.b623 >= 0) m.c1425 = Constraint(expr= m.b609 -", "0) m.c119 = Constraint(expr= m.x38 - m.x260 - m.x263 ==", "Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 +", "m.x301 == 0) m.c296 = Constraint(expr= m.x71 - m.x344 -", "= Constraint(expr= m.b686 + m.b688 <= 1) m.c1108 = Constraint(expr=", "m.b732 + m.b733 <= 1) m.c1199 = Constraint(expr= m.b734 +", "m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 =", "m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0) m.c810 =", "m.b615) <= 0) m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1", "m.x806 == 0) m.c954 = Constraint(expr= 9*m.b717 + m.x807 ==", "m.b647) <= 0) m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1", "m.x126 - m.x447 - m.x450 == 0) m.c544 = Constraint(expr=", ">= 0) m.c1480 = Constraint(expr= m.b667 - m.b679 >= 0)", "m.c117 = Constraint(expr= m.x27 - m.x237 - m.x240 == 0)", "m.x356 - 3.04984759446376*m.b626 <= 0) m.c360 = Constraint(expr= m.x357 -", "Constraint(expr= m.b710 + m.b712 <= 1) m.c1153 = Constraint(expr= m.b710", "m.b602 - m.b614 >= 0) m.c1416 = Constraint(expr= m.b603 -", "= Constraint(expr= m.x564 == 0) m.c901 = Constraint(expr= m.x565 ==", "m.x406 - 3.04984759446376*m.b652 <= 0) m.c602 = Constraint(expr= m.x407 +", "m.c1339 = Constraint(expr= - m.b650 - m.b651 + m.b652 -", "- m.b608 + m.b620 + m.b623 + m.b626 >= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694", "== 0) m.c378 = Constraint(expr= m.x420 == 0) m.c379 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c1335 = Constraint(expr= - m.b647 + m.b648 - m.b738 <=", "+ m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999* m.b615) <= 0) m.c232", "9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725 -", "= Constraint(expr= m.x215 == 0) m.c57 = Constraint(expr= m.x216 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x368 - 1.26558121681553*m.b638 <= 0) m.c486 = Constraint(expr= m.x369 -", "<= 0) m.c1321 = Constraint(expr= - m.b632 - m.b633 +", "m.c1372 = Constraint(expr= - m.b683 - m.b684 + m.b685 -", "= Constraint(expr= m.x575 == 0) m.c822 = Constraint(expr= m.x576 ==", "<= 0) m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376)", "- m.x469 == 0) m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653", "0.999*m.b659)))*(0.001 + 0.999* m.b659) <= 0) m.c666 = Constraint(expr=(m.x489/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 =", "m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838", "m.x497 == 0) m.c696 = Constraint(expr= m.x498 == 0) m.c697", "Constraint(expr= m.b650 - m.b740 <= 0) m.c1338 = Constraint(expr= -", "+ 15*m.b610 <= 15) m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) -", "0) m.c1335 = Constraint(expr= - m.b647 + m.b648 - m.b738", "- m.x548 - m.x551 == 0) m.c825 = Constraint(expr= m.x186", "m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 +", "40*m.b598 <= 0) m.c71 = Constraint(expr= m.x215 + 40*m.b596 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x179 - m.x536 - m.x539 == 0) m.c768 = Constraint(expr=", "0) m.c727 = Constraint(expr= m.x532 == 0) m.c728 = Constraint(expr=", "m.b647 - m.b648 <= 0) m.c1065 = Constraint(expr= m.b647 -", "m.b629 >= 0) m.c1431 = Constraint(expr= m.b612 - m.b630 >=", "0.705049913072943*m.b662 <= 0) m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <=", "Constraint(expr= m.b668 - m.b758 <= 0) m.c1356 = Constraint(expr= -", "= Constraint(expr= m.x15 - m.x231 - m.x234 == 0) m.c94", "== 0) m.c952 = Constraint(expr= 4*m.b715 + m.x805 == 0)", "+ 0.999* m.b606) <= 0) m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607)", "= Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0) m.c860 = Constraint(expr=", "<= 1) m.c1224 = Constraint(expr= m.b746 + m.b748 <= 1)", "m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276", "Constraint(expr= m.b723 + m.b724 <= 1) m.c1181 = Constraint(expr= m.b725", "m.b765 <= 1) m.c1262 = Constraint(expr= m.b765 + m.b766 <=", "m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200", "+ 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999*", "- m.b747 <= 0) m.c1345 = Constraint(expr= - m.b656 -", "m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839", "<= 0) m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 +", "Constraint(expr= m.x218 - 40*m.b599 <= 0) m.c96 = Constraint(expr= m.x219", "- m.b650 - m.b651 + m.b652 - m.b742 <= 0)", "m.x403 == 0) m.c569 = Constraint(expr= m.x128 - m.x452 -", "1) m.c1151 = Constraint(expr= m.b710 + m.b711 <= 1) m.c1152", "0.999*m.b677) <= 0) m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1", "Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b659 - m.b749 <= 0) m.c1347 = Constraint(expr= - m.b659", "- log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598) <= 0)", "m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259", "m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244", "m.b683 >= 0) m.c1485 = Constraint(expr= m.b669 - m.b684 >=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b597 + m.b600 == 1) m.c1375 = Constraint(expr= m.b598 +", "m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21", "0) m.c20 = Constraint(expr= m.x68 - m.x80 - m.x83 ==", "<= 2.30162356062425) m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425)", "= Constraint(expr= m.b598 + m.b601 - m.b610 >= 0) m.c1412", "m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61", "m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579", "Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0) m.c220 = Constraint(expr= m.x286", "= Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034) m.c583 = Constraint(expr=", "m.b674) <= 0) m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1", "m.b693 + m.b694 <= 1) m.c1119 = Constraint(expr= m.b692 +", "m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150", "m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506) m.c862 =", "m.b640 - m.b730 <= 0) m.c1328 = Constraint(expr= m.b641 -", "- 1.26558121681553*m.b639 <= 0) m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640", "m.x338 - m.x341 == 0) m.c270 = Constraint(expr= m.x69 -", "+ m.x812 == 0) m.c960 = Constraint(expr= m.b723 + m.x813", "- m.b596 - m.b597 + m.b598 - m.b688 <= 0)", "m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103) m.c609 =", "- m.b614 - m.b615 + m.b616 - m.b706 <= 0)", "m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512", "m.x56 - m.x59 == 0) m.c18 = Constraint(expr= m.x45 -", "m.x174 - m.x183 - m.x186 == 0) m.c49 = Constraint(expr=", "Constraint(expr= m.x403 == 0) m.c563 = Constraint(expr= m.x455 == 0)", "Constraint(expr= m.b624 - m.b642 >= 0) m.c1444 = Constraint(expr= m.b625", "0) m.c1359 = Constraint(expr= - m.b671 + m.b672 - m.b762", "m.c36 = Constraint(expr= m.x138 - m.x141 - m.x144 == 0)", "= Constraint(expr= m.b629 - m.b630 <= 0) m.c1047 = Constraint(expr=", "= Constraint(expr= m.x585 - 13.5*m.b681 <= 0) m.c892 = Constraint(expr=", "= Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001", "0) m.c755 = Constraint(expr= m.x515 == 0) m.c756 = Constraint(expr=", "m.b675 >= 0) m.c1477 = Constraint(expr= m.b664 - m.b676 >=", "m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438", "- 1.32154609891348*m.b633 <= 0) m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634", "m.c1210 = Constraint(expr= m.b738 + m.b739 <= 1) m.c1211 =", "= Constraint(expr= m.x92 - m.x380 - m.x383 == 0) m.c480", "<= 1) m.c1281 = Constraint(expr= m.b773 + m.b775 <= 1)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632 =", "0) m.c265 = Constraint(expr= m.x343 == 0) m.c266 = Constraint(expr=", "0) m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0) m.c221", "Constraint(expr= - m.b620 - m.b621 + m.b622 - m.b712 <=", "m.b617 + m.b635 + m.b638 >= 0) m.c1389 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c243 = Constraint(expr= m.x66 - m.x327 - m.x333", "- m.x324 == 0) m.c382 = Constraint(expr= m.x64 - m.x319", "0) m.c942 = Constraint(expr= 3*m.b705 + m.x795 == 0) m.c943", "m.b611 + m.b614 >= 0) m.c1377 = Constraint(expr= - m.b603", "Constraint(expr= m.b610 - m.b625 >= 0) m.c1427 = Constraint(expr= m.b608", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 =", "== 0) m.c617 = Constraint(expr= m.x467 == 0) m.c618 =", "m.c1265 = Constraint(expr= m.b767 + m.b768 <= 1) m.c1266 =", "9*m.b646 <= 9) m.c551 = Constraint(expr= m.x446 - 9*m.b644 <=", "- m.b655 + m.b658 + m.b661 >= 0) m.c1460 =", "m.b672) <= 0) m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1", "+ m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c464", "0.690184503917672*m.b678 <= 0.690184503917672) m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <=", "== 0) m.c566 = Constraint(expr= m.x101 - m.x398 - m.x401", "<= 0) m.c1059 = Constraint(expr= m.b641 - m.b643 <= 0)", "m.c1085 = Constraint(expr= m.b668 - m.b669 <= 0) m.c1086 =", "m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.b734 + m.b736 <= 1) m.c1201 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b615 + m.b633 >= 0) m.c1384 = Constraint(expr= -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x309 - 15*m.b627 <= 0) m.c355 = Constraint(expr=", "m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136", "Constraint(expr= 9*m.b693 + m.x783 == 0) m.c931 = Constraint(expr= 4*m.b694", "- m.x574 - m.x577 == 0) m.c830 = Constraint(expr= m.x548", "- m.b657 + m.b658 - m.b748 <= 0) m.c1346 =", "1) m.c1130 = Constraint(expr= m.b699 + m.b700 <= 1) m.c1131", "m.x498 == 0) m.c703 = Constraint(expr= m.x163 - m.x496 -", "m.x239 == 0) m.c111 = Constraint(expr= m.x240 == 0) m.c112", "- m.b661 <= 0) m.c1078 = Constraint(expr= m.b660 - m.b661", "Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0)", "= Constraint(expr= 5*m.b711 + m.x801 == 0) m.c949 = Constraint(expr=", "m.b682 - m.b772 <= 0) m.c1370 = Constraint(expr= m.b683 -", "== 0) m.c346 = Constraint(expr= m.x361 == 0) m.c347 =", "- 9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725", "30*m.b610 <= 30) m.c197 = Constraint(expr= m.x278 - 15*m.b608 <=", "== 0) m.c92 = Constraint(expr= m.x14 - m.x230 - m.x233", "m.x454 - 1.04900943706034*m.b649 <= 0) m.c581 = Constraint(expr= m.x455 +", "0) m.c200 = Constraint(expr= m.x281 + 15*m.b608 <= 15) m.c201", "m.x51 - m.x291 - m.x294 == 0) m.c241 = Constraint(expr=", "m.c970 = Constraint(expr= 2*m.b733 + m.x823 == 0) m.c971 =", "- m.x563 == 0) m.c906 = Constraint(expr= m.x192 - m.x561", "m.x111 - m.x417 - m.x420 == 0) m.c388 = Constraint(expr=", "m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116", "<= 0) m.c1309 = Constraint(expr= - m.b620 - m.b621 +", "m.c1379 = Constraint(expr= - m.b611 + m.b629 >= 0) m.c1380", "Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c730 = Constraint(expr= m.x166 - m.x502 - m.x505", "0) m.c951 = Constraint(expr= 7*m.b714 + m.x804 == 0) m.c952", "0) m.c1470 = Constraint(expr= m.b654 - m.b660 >= 0) m.c1471", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 =", "= Constraint(expr= m.b740 + m.b742 <= 1) m.c1213 = Constraint(expr=", "0) m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517) m.c126", "m.x23 - m.x26 - m.x29 - m.x32 == 0) m.c12", "= Constraint(expr= m.x541 + 15*m.b670 <= 15) m.c788 = Constraint(expr=(m.x566/(0.001", "m.b688 <= 0) m.c1286 = Constraint(expr= m.b599 - m.b689 <=", "Constraint(expr= m.b671 - m.b673 <= 0) m.c1090 = Constraint(expr= m.b672", "0) m.c540 = Constraint(expr= m.x99 - m.x393 - m.x396 ==", "m.c356 = Constraint(expr= m.x311 + 15*m.b626 <= 15) m.c357 =", "+ m.x850 == 0) m.c998 = Constraint(expr= 4*m.b761 + m.x851", "m.c173 = Constraint(expr= m.x281 == 0) m.c174 = Constraint(expr= m.x282", "- m.x353 == 0) m.c324 = Constraint(expr= m.x75 - m.x351", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677 =", "0) m.c1476 = Constraint(expr= m.b663 - m.b675 >= 0) m.c1477", "m.x217 == 0) m.c59 = Constraint(expr= m.x227 == 0) m.c60", "1.26558121681553) m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553) m.c283", "Constraint(expr= m.b677 - m.b678 <= 0) m.c1095 = Constraint(expr= m.b677", "= Constraint(expr= m.b606 - m.b618 >= 0) m.c1420 = Constraint(expr=", "- 0.5*m.x256 + m.x280 == 0) m.c167 = Constraint(expr= m.x251", "0) m.c651 = Constraint(expr= m.x147 - m.x483 - m.x486 ==", "0) m.c1085 = Constraint(expr= m.b668 - m.b669 <= 0) m.c1086", "m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398", "1) m.c1204 = Constraint(expr= m.b735 + m.b736 <= 1) m.c1205", "= Constraint(expr= m.x258 + 30*m.b609 <= 30) m.c196 = Constraint(expr=", "m.x443 == 0) m.c510 = Constraint(expr= m.x444 == 0) m.c511", "m.x590 - m.x593 == 0) m.c909 = Constraint(expr= m.x210 -", "m.x312 == 0) m.c349 = Constraint(expr= m.x61 - m.x310 -", "<= 0) m.c355 = Constraint(expr= m.x310 - 15*m.b628 <= 0)", "1) m.c1159 = Constraint(expr= m.b713 + m.b714 <= 1) m.c1160", "15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156 +", "m.c472 = Constraint(expr= m.x385 == 0) m.c473 = Constraint(expr= m.x437", "5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111 +", "Constraint(expr= m.b699 + m.b700 <= 1) m.c1131 = Constraint(expr= m.b698", "<= 0.940066550763924) m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924)", "== 0) m.c167 = Constraint(expr= m.x251 == 0) m.c168 =", "0) m.c169 = Constraint(expr= m.x253 == 0) m.c170 = Constraint(expr=", "= Constraint(expr= m.x301 + 15*m.b622 <= 15) m.c305 = Constraint(expr=", "+ 15*m.b624 <= 15) m.c331 = Constraint(expr= m.x307 + 15*m.b625", "- m.b682 <= 0) m.c1100 = Constraint(expr= m.b683 - m.b684", "m.b624 - m.b625 <= 0) m.c1043 = Constraint(expr= m.b626 -", "m.c1165 = Constraint(expr= m.b716 + m.b717 <= 1) m.c1166 =", "= Constraint(expr= m.b628 - m.b655 >= 0) m.c1457 = Constraint(expr=", "m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376) m.c363 = Constraint(expr= m.x360 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x536 - 15*m.b668 <= 0) m.c783 = Constraint(expr= m.x537 -", "m.b658 - m.b748 <= 0) m.c1346 = Constraint(expr= m.b659 -", "0) m.c61 = Constraint(expr= m.x229 == 0) m.c62 = Constraint(expr=", "m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539", "Constraint(expr= m.b719 + m.b721 <= 1) m.c1171 = Constraint(expr= m.b719", "0) m.c1320 = Constraint(expr= - m.b632 + m.b633 - m.b723", "m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943) m.c835 =", "= Constraint(expr= m.x333 == 0) m.c238 = Constraint(expr= m.x334 ==", "0) m.c1014 = Constraint(expr= m.b596 - m.b598 <= 0) m.c1015", "Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 0) m.c1046 = Constraint(expr= m.b629 - m.b630 <= 0)", "m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124", "m.b651 - m.b741 <= 0) m.c1339 = Constraint(expr= - m.b650", "= Constraint(expr= 4*m.b713 + m.x803 == 0) m.c951 = Constraint(expr=", "m.c182 = Constraint(expr= m.x44 - m.x278 - m.x281 == 0)", "0) m.c1009 = Constraint(expr= 3*m.b772 + m.x862 == 0) m.c1010", "- m.x233 == 0) m.c93 = Constraint(expr= m.x15 - m.x231", "+ 9*m.b624 <= 9) m.c337 = Constraint(expr= m.x355 + 9*m.b625", "0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658) <=", "+ 0.994083415506506*m.b679 <= 0.994083415506506) m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619 =", "<= 15) m.c303 = Constraint(expr= m.x300 + 15*m.b621 <= 15)", "m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103) m.c610 = Constraint(expr= m.x463 +", "- m.x24 == 0) m.c10 = Constraint(expr= m.x19 - m.x22", "m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280", "- m.x343 == 0) m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617", "<= 15) m.c358 = Constraint(expr= m.x313 + 15*m.b628 <= 15)", "0) m.c1357 = Constraint(expr= - m.b668 - m.b669 + m.b670", "- 2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724", "m.c761 = Constraint(expr= m.x167 - m.x506 - m.x509 == 0)", "m.x415 == 0) m.c623 = Constraint(expr= m.x134 - m.x464 -", "Constraint(expr= m.b743 + m.b745 <= 1) m.c1222 = Constraint(expr= m.b744", "+ m.b671 + m.b674 >= 0) m.c1461 = Constraint(expr= -", "Constraint(expr= m.x307 + 15*m.b625 <= 15) m.c332 = Constraint(expr= m.x350", "m.b609 >= 0) m.c1411 = Constraint(expr= m.b598 + m.b601 -", "m.b760 <= 1) m.c1252 = Constraint(expr= m.b759 + m.b760 <=", "- m.x484 - m.x487 == 0) m.c653 = Constraint(expr= m.x470", "m.x450 + 9*m.b645 <= 9) m.c556 = Constraint(expr= m.x451 +", "m.b709 <= 0) m.c1307 = Constraint(expr= m.b620 - m.b710 <=", "= Constraint(expr= m.b729 + m.b730 <= 1) m.c1191 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 =", "- m.x295 == 0) m.c242 = Constraint(expr= m.x65 - m.x326", "m.c1215 = Constraint(expr= m.b740 + m.b742 <= 1) m.c1216 =", "m.b661 >= 0) m.c1460 = Constraint(expr= - m.b662 + m.b671", "+ m.b703 <= 1) m.c1139 = Constraint(expr= m.b704 + m.b705", "m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0) m.c227 =", "m.c334 = Constraint(expr= m.x352 - 9*m.b625 <= 0) m.c335 =", "0.480234946352917*m.b676 <= 0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1", "1.32154609891348*m.b614 <= 1.32154609891348) m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <=", "Constraint(expr= m.x296 - 15*m.b620 <= 0) m.c300 = Constraint(expr= m.x297", "m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236", "0) m.c546 = Constraint(expr= m.x393 - 9*m.b645 <= 0) m.c547", "m.x796 == 0) m.c944 = Constraint(expr= 5*m.b707 + m.x797 ==", "1) m.c1192 = Constraint(expr= m.b729 + m.b730 <= 1) m.c1193", "Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 +", "Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0) m.c839 = Constraint(expr= m.x575", "<= 0) m.c1316 = Constraint(expr= m.b629 - m.b719 <= 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673 =", "- m.b626 + m.b647 + m.b650 + m.b653 >= 0)", "Constraint(expr= m.b611 - m.b701 <= 0) m.c1299 = Constraint(expr= -", "<= 0) m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388)", "+ m.x788 == 0) m.c936 = Constraint(expr= 10*m.b699 + m.x789", "- m.b620 + m.b638 >= 0) m.c1395 = Constraint(expr= -", ">= 0) m.c1396 = Constraint(expr= - m.b622 + m.b640 >=", "m.c184 = Constraint(expr= m.x46 - m.x280 - m.x283 == 0)", "Constraint(expr= - m.b611 + m.b629 >= 0) m.c1380 = Constraint(expr=", "Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001 +", "0) m.c1410 = Constraint(expr= m.b597 + m.b600 - m.b609 >=", "Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0) m.c743 = Constraint(expr= m.x530", "- m.x164 - m.x167 == 0) m.c45 = Constraint(expr= m.x159", "<= 3.04984759446376) m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171) m.c657 =", "Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 +", "0.705049913072943) m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943) m.c718", "Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1465 = Constraint(expr= - m.b667 + m.b679 >= 0) m.c1466", "+ 1.32154609891348*m.b615 <= 1.32154609891348) m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616", "<= 1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 +", "- 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999* m.b626) <=", "0) m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171) m.c657", "sos1 sos2 scont sint # 865 685 180 0 0", "Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 =", "m.b671 - m.b672 <= 0) m.c1089 = Constraint(expr= m.b671 -", "<= 1) m.c1128 = Constraint(expr= m.b698 + m.b700 <= 1)", "+ m.b601 - m.b610 >= 0) m.c1412 = Constraint(expr= m.b602", "0) m.c957 = Constraint(expr= 2*m.b720 + m.x810 == 0) m.c958", "Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x122 - m.x440 - m.x443 == 0) m.c516 = Constraint(expr=", "== 0) m.c940 = Constraint(expr= 4*m.b703 + m.x793 == 0)", "= Constraint(expr= m.b692 + m.b693 <= 1) m.c1118 = Constraint(expr=", "== 0) m.c1001 = Constraint(expr= 7*m.b764 + m.x854 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 =", "= Constraint(expr= - m.b671 + m.b672 - m.b762 <= 0)", "m.x348 + 13.5*m.b621 <= 13.5) m.c310 = Constraint(expr= m.x349 +", "+ 30*m.x119 + 20*m.x120 + 20*m.x121 + 35*m.x122 + 50*m.x123", "m.c529 = Constraint(expr= m.x445 + 9*m.b643 <= 9) m.c530 =", "Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0) m.c247 = Constraint(expr= m.x292", "m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672) m.c868 =", "1.32154609891348*m.b633 <= 0) m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <=", "m.b622 <= 0) m.c1039 = Constraint(expr= m.b621 - m.b622 <=", "m.b634 >= 0) m.c1385 = Constraint(expr= - m.b605 + m.b617", "MINLP written by GAMS Convert at 01/15/21 11:37:33 # #", "0) m.c347 = Constraint(expr= m.x59 - m.x308 - m.x311 ==", "1.04900943706034*m.b649 <= 0) m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <=", "m.c1446 = Constraint(expr= m.b624 - m.b645 >= 0) m.c1447 =", "1) m.c1221 = Constraint(expr= m.b743 + m.b745 <= 1) m.c1222", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 =", "m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0) m.c688 =", "290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= - m.b620 - m.b621 + m.b622 - m.b712", "= Constraint(expr= m.x444 + 9*m.b642 <= 9) m.c529 = Constraint(expr=", "1) m.c1158 = Constraint(expr= m.b713 + m.b715 <= 1) m.c1159", "m.c1382 = Constraint(expr= - m.b614 + m.b632 >= 0) m.c1383", "m.x510 == 0) m.c754 = Constraint(expr= m.x511 == 0) m.c755", "= Constraint(expr= - 0.75*m.x496 + m.x520 == 0) m.c695 =", "Constraint(expr= m.b744 + m.b745 <= 1) m.c1221 = Constraint(expr= m.b743", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 =", "30*m.b610 <= 0) m.c194 = Constraint(expr= m.x257 + 30*m.b608 <=", "= Constraint(expr= - m.b677 + m.b678 - m.b768 <= 0)", "1) m.c1245 = Constraint(expr= m.b755 + m.b757 <= 1) m.c1246", "+ m.b674 >= 0) m.c1461 = Constraint(expr= - m.b663 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x213 - m.x216 == 0) m.c64 = Constraint(expr= m.x7 -", "- m.x568 - m.x571 == 0) m.c803 = Constraint(expr= m.x542", "Constraint(expr= m.b755 + m.b756 <= 1) m.c1242 = Constraint(expr= m.b755", "m.b677 + m.b678 - m.b768 <= 0) m.c1366 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b662 - m.b674 >= 0) m.c1476 = Constraint(expr= m.b663 -", "0 0 0 # FX 0 0 0 0 0", "= Constraint(expr= - m.b677 - m.b678 + m.b679 - m.b769", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 =", "- m.x551 == 0) m.c825 = Constraint(expr= m.x186 - m.x549", "0.999*m.b635)))*(0.001 + 0.999* m.b635) <= 0) m.c435 = Constraint(expr=(m.x429/(0.001 +", "= Constraint(expr= m.b603 - m.b612 >= 0) m.c1414 = Constraint(expr=", "+ 0.999* m.b607) <= 0) m.c137 = Constraint(expr= m.x245 ==", "- m.x72 - m.x90 + m.x93 == 0) m.c25 =", "0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999* m.b673)", "m.b690 <= 1) m.c1110 = Constraint(expr= m.b689 + m.b691 <=", "2.54515263975353) m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0) m.c279", "9) m.c524 = Constraint(expr= m.x440 - 9*m.b641 <= 0) m.c525", "m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836", "m.x212 - m.x215 == 0) m.c63 = Constraint(expr= m.x6 -", "= Constraint(expr= m.x416 - 20*m.b629 <= 0) m.c402 = Constraint(expr=", "m.b654 + m.b655 - m.b745 <= 0) m.c1343 = Constraint(expr=", "m.x846 == 0) m.c994 = Constraint(expr= 8*m.b757 + m.x847 ==", "m.b759 <= 1) m.c1250 = Constraint(expr= m.b759 + m.b760 <=", "Constraint(expr= m.b755 + m.b757 <= 1) m.c1243 = Constraint(expr= m.b755", "0) m.c1352 = Constraint(expr= m.b665 - m.b755 <= 0) m.c1353", "m.c371 = Constraint(expr= m.x323 == 0) m.c372 = Constraint(expr= m.x324", "Constraint(expr= m.x111 - m.x417 - m.x420 == 0) m.c388 =", "m.x594 == 0) m.c910 = Constraint(expr= m.x211 - m.x592 -", "m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201", "m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418", "m.x527 - 0.994083415506506*m.b677 <= 0) m.c858 = Constraint(expr= m.x528 -", "m.b636 <= 0) m.c1053 = Constraint(expr= m.b635 - m.b637 <=", "= Constraint(expr= m.x480 == 0) m.c670 = Constraint(expr= m.x481 ==", "m.c1126 = Constraint(expr= m.b696 + m.b697 <= 1) m.c1127 =", "m.b732 <= 1) m.c1194 = Constraint(expr= m.b731 + m.b733 <=", "= Constraint(expr= - m.b638 - m.b639 + m.b640 - m.b730", "+ m.x813 == 0) m.c961 = Constraint(expr= 9*m.b724 + m.x814", "- m.x108 == 0) m.c31 = Constraint(expr= m.x79 - m.x103", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173 =", "m.x88 - m.x376 - m.x379 == 0) m.c386 = Constraint(expr=", "m.b756 <= 0) m.c1354 = Constraint(expr= - m.b665 - m.b666", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 =", "m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388) m.c250 =", "- m.x378 == 0) m.c385 = Constraint(expr= m.x88 - m.x376", "Constraint(expr= m.x92 - m.x380 - m.x383 == 0) m.c480 =", "m.x281 == 0) m.c183 = Constraint(expr= m.x45 - m.x279 -", "+ m.b627 - m.b717 <= 0) m.c1315 = Constraint(expr= -", "m.b760 <= 1) m.c1249 = Constraint(expr= m.b758 + m.b759 <=", "m.b674 + m.b675 - m.b765 <= 0) m.c1363 = Constraint(expr=", "m.c1369 = Constraint(expr= - m.b680 - m.b681 + m.b682 -", "m.c1391 = Constraint(expr= - m.b608 + m.b620 + m.b623 +", "1) m.c1274 = Constraint(expr= m.b771 + m.b772 <= 1) m.c1275", "Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x262 - 3.34221486003388*m.b604 <= 0) m.c131 = Constraint(expr= m.x263 +", "= Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553) m.c283 = Constraint(expr=", "m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 +", "= Constraint(expr= m.x251 == 0) m.c168 = Constraint(expr= m.x252 ==", "<= 0) m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388)", "== 0) m.c323 = Constraint(expr= m.x74 - m.x350 - m.x353", "Constraint(expr= m.x215 + 40*m.b596 <= 40) m.c72 = Constraint(expr= m.x216", "== 0) m.c621 = Constraint(expr= m.x108 - m.x411 - m.x414", "= Constraint(expr= - 0.9*m.x319 + m.x418 == 0) m.c368 =", "<= 1) m.c1217 = Constraint(expr= m.b743 + m.b744 <= 1)", "= Constraint(expr= m.x486 == 0) m.c646 = Constraint(expr= m.x487 ==", "Constraint(expr= - 0.5*m.x514 + m.x538 == 0) m.c752 = Constraint(expr=", "== 0) m.c383 = Constraint(expr= m.x86 - m.x374 - m.x377", "Constraint(expr= 5*m.b737 + m.x827 == 0) m.c975 = Constraint(expr= 7*m.b738", "- m.x196 == 0) m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) -", "1) m.c1251 = Constraint(expr= m.b758 + m.b760 <= 1) m.c1252", "0.999* m.b635) <= 0) m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) -", "= Constraint(expr= m.x62 - m.x317 - m.x323 == 0) m.c381", "Constraint(expr= m.b604 - m.b613 >= 0) m.c1415 = Constraint(expr= m.b602", "m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583", "m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614", "1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999* m.b600) <= 0)", "= Constraint(expr= m.x379 == 0) m.c377 = Constraint(expr= m.x419 ==", "m.b630 >= 0) m.c1381 = Constraint(expr= - m.b613 + m.b631", "m.b725 <= 0) m.c1323 = Constraint(expr= - m.b635 + m.b636", "1) m.c1183 = Constraint(expr= m.b725 + m.b726 <= 1) m.c1184", "Constraint(expr= m.b674 - m.b676 <= 0) m.c1093 = Constraint(expr= m.b675", "0) m.c27 = Constraint(expr= m.x75 - m.x96 - m.x99 ==", "m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0) m.c123 =", "<= 0) m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171)", "Constraint(expr= 7*m.b763 + m.x853 == 0) m.c1001 = Constraint(expr= 7*m.b764", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686 =", "Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0) m.c500 = Constraint(expr= m.x437", "= Constraint(expr= m.b644 - m.b645 <= 0) m.c1062 = Constraint(expr=", "0) m.c1469 = Constraint(expr= m.b653 - m.b659 >= 0) m.c1470", "0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999* m.b661)", "m.c1263 = Constraint(expr= m.b764 + m.b766 <= 1) m.c1264 =", "2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724 -", "m.c385 = Constraint(expr= m.x88 - m.x376 - m.x379 == 0)", "= Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0) m.c662 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262 ==", "m.c262 = Constraint(expr= m.x277 == 0) m.c263 = Constraint(expr= m.x341", "20*m.b630 <= 0) m.c403 = Constraint(expr= m.x418 - 20*m.b631 <=", "0) m.c729 = Constraint(expr= m.x165 - m.x501 - m.x504 ==", "<= 1) m.c1218 = Constraint(expr= m.b743 + m.b745 <= 1)", "m.x493 == 0) m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <=", "= Constraint(expr= m.b674 - m.b676 <= 0) m.c1093 = Constraint(expr=", "= Constraint(expr= m.b716 + m.b718 <= 1) m.c1165 = Constraint(expr=", "0) m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353) m.c276", "Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <=", "0.999*m.b612)))*(0.001 + 0.999* m.b612) <= 0) m.c205 = Constraint(expr=(m.x316/(0.001 +", "m.b615 >= 0) m.c1378 = Constraint(expr= - m.b604 + m.b613", "1) m.c1148 = Constraint(expr= m.b708 + m.b709 <= 1) m.c1149", "Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517) m.c153 = Constraint(expr= m.x246", "- m.x291 - m.x294 == 0) m.c241 = Constraint(expr= m.x52", "m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142", "- 0.9*m.x298 + m.x346 == 0) m.c287 = Constraint(expr= m.x299", "= Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0) m.c806 = Constraint(expr=", "Constraint(expr= m.b744 + m.b745 <= 1) m.c1223 = Constraint(expr= m.b746", "+ m.b676 >= 0) m.c1463 = Constraint(expr= - m.b665 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 =", "0) m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0) m.c125", "0.999* m.b671) <= 0) m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) -", "Constraint(expr= 6*m.b698 + m.x788 == 0) m.c936 = Constraint(expr= 10*m.b699", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798 =", "sint # 865 685 180 0 0 0 0 0", "- m.x500 - m.x503 == 0) m.c729 = Constraint(expr= m.x165", "+ 0.480234946352917*m.b674 <= 0.480234946352917) m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675", "== 0) m.c644 = Constraint(expr= m.x485 == 0) m.c645 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x14 + m.x17 == 0) m.c6 = Constraint(expr= - m.x12", "= Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0) m.c424 = Constraint(expr=", "m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0) m.c683 =", "m.b689 + m.b690 <= 1) m.c1112 = Constraint(expr= m.b690 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 =", "Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431) m.c78 = Constraint(expr= m.x228", "0) m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001", "m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0) m.c608 =", "m.x540 == 0) m.c769 = Constraint(expr= m.x181 - m.x538 -", "0.994083415506506*m.b665 <= 0) m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <=", "= Constraint(expr= m.b737 + m.b738 <= 1) m.c1206 = Constraint(expr=", "= Constraint(expr= m.b689 + m.b690 <= 1) m.c1112 = Constraint(expr=", "= Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001", "m.c142 = Constraint(expr= m.x274 == 0) m.c143 = Constraint(expr= m.x29", "Constraint(expr= m.b603 - m.b612 >= 0) m.c1414 = Constraint(expr= m.b604", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 =", "m.c21 = Constraint(expr= m.x69 - m.x81 - m.x84 == 0)", "m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171) m.c637 =", "Constraint(expr= - m.b653 - m.b654 + m.b655 - m.b745 <=", "= Constraint(expr= m.b598 + m.b601 - m.b607 >= 0) m.c1409", "+ m.b751 <= 1) m.c1231 = Constraint(expr= m.b749 + m.b750", "m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758", "= Constraint(expr= m.x252 == 0) m.c169 = Constraint(expr= m.x253 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 =", "m.x161 - m.x164 - m.x167 == 0) m.c45 = Constraint(expr=", "2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750 -", "m.b671 - m.b673 <= 0) m.c1090 = Constraint(expr= m.b672 -", "<= 1) m.c1248 = Constraint(expr= m.b758 + m.b760 <= 1)", "+ m.b653 >= 0) m.c1401 = Constraint(expr= - m.b627 +", "m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23", "m.x48 - m.x51 == 0) m.c16 = Constraint(expr= m.x40 -", "0) m.c544 = Constraint(expr= m.x127 - m.x448 - m.x451 ==", "0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999* m.b651) <= 0)", "m.b773 <= 0) m.c1371 = Constraint(expr= - m.b683 + m.b684", "== 0) m.c823 = Constraint(expr= m.x577 == 0) m.c824 =", "m.x273 == 0) m.c148 = Constraint(expr= m.x43 - m.x268 -", "- 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999* m.b659) <=", "m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261 == 0) m.c109", "m.b610 <= 0) m.c1028 = Constraint(expr= m.b611 - m.b612 <=", "m.c93 = Constraint(expr= m.x15 - m.x231 - m.x234 == 0)", "Constraint(expr= m.x73 - m.x346 - m.x349 == 0) m.c299 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802 =", "0) m.c822 = Constraint(expr= m.x576 == 0) m.c823 = Constraint(expr=", "m.c29 = Constraint(expr= m.x77 - m.x101 - m.x104 - m.x107", "0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999* m.b649)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 =", "1.18887736200171) m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171) m.c685", "m.b743 + m.b744 <= 1) m.c1220 = Constraint(expr= m.b744 +", "= Constraint(expr= m.x283 == 0) m.c176 = Constraint(expr= m.x32 -", "== 0) m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261 ==", "m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706", "0) m.c39 = Constraint(expr= - m.x147 - m.x150 + m.x153", "Constraint(expr= m.x300 == 0) m.c289 = Constraint(expr= m.x301 == 0)", "m.b741 <= 1) m.c1214 = Constraint(expr= m.b741 + m.b742 <=", "+ m.b736 <= 1) m.c1204 = Constraint(expr= m.b735 + m.b736", "m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111", "m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637", "m.c677 = Constraint(expr= m.x149 - m.x488 - m.x491 == 0)", "0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001", "m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729", "= Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0) m.c628 = Constraint(expr=", "m.c1143 = Constraint(expr= m.b704 + m.b706 <= 1) m.c1144 =", "<= 0) m.c1307 = Constraint(expr= m.b620 - m.b710 <= 0)", "m.c1360 = Constraint(expr= - m.b671 - m.b672 + m.b673 -", "m.c780 = Constraint(expr= m.x516 + 30*m.b669 <= 30) m.c781 =", "m.b692 + m.b693 <= 1) m.c1118 = Constraint(expr= m.b693 +", "m.c851 = Constraint(expr= m.x176 - m.x527 - m.x533 == 0)", "m.x521 == 0) m.c705 = Constraint(expr= m.x174 - m.x519 -", "- m.x275 == 0) m.c267 = Constraint(expr= m.x42 - m.x270", "m.b723 <= 1) m.c1176 = Constraint(expr= m.b722 + m.b724 <=", "0) m.c1062 = Constraint(expr= m.b644 - m.b646 <= 0) m.c1063", "- m.b655 >= 0) m.c1457 = Constraint(expr= - m.b653 +", "= Constraint(expr= - m.x11 - m.x14 + m.x17 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 =", "m.c86 = Constraint(expr= m.x233 == 0) m.c87 = Constraint(expr= m.x234", "1) m.c1188 = Constraint(expr= m.b728 + m.b730 <= 1) m.c1189", "Constraint(expr= m.b618 - m.b639 >= 0) m.c1441 = Constraint(expr= m.b619", "m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151", "Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 =", "m.c438 = Constraint(expr= m.x366 == 0) m.c439 = Constraint(expr= m.x367", "== 0) m.c696 = Constraint(expr= m.x498 == 0) m.c697 =", "0) m.c785 = Constraint(expr= m.x539 + 15*m.b668 <= 15) m.c786", "Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b667 <= 0) m.c1084 = Constraint(expr= m.b666 - m.b667", "1) m.c1186 = Constraint(expr= m.b726 + m.b727 <= 1) m.c1187", "m.b714 + m.b715 <= 1) m.c1163 = Constraint(expr= m.b716 +", "<= 1) m.c1246 = Constraint(expr= m.b756 + m.b757 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 =", "= Constraint(expr= m.x167 - m.x506 - m.x509 == 0) m.c762", "<= 1) m.c1137 = Constraint(expr= m.b701 + m.b703 <= 1)", "Constraint(expr= m.x383 == 0) m.c471 = Constraint(expr= m.x384 == 0)", "m.b768 <= 1) m.c1266 = Constraint(expr= m.b767 + m.b769 <=", "m.b650 - m.b740 <= 0) m.c1338 = Constraint(expr= - m.b650", ">= 0) m.c1458 = Constraint(expr= - m.b654 + m.b657 +", "<= 0) m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 +", "m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114", "- m.x223 == 0) m.c92 = Constraint(expr= m.x14 - m.x230", "= Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0) m.c832 = Constraint(expr=", "m.c924 = Constraint(expr= 4*m.b687 + m.x777 == 0) m.c925 =", "- m.b675 >= 0) m.c1477 = Constraint(expr= m.b664 - m.b676", "Total cont binary integer sos1 sos2 scont sint # 865", "- 40*m.b596 <= 0) m.c69 = Constraint(expr= m.x213 - 40*m.b597", "m.x101 - m.x398 - m.x401 == 0) m.c567 = Constraint(expr=", "0.705049913072943) m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0) m.c810", "m.b742 <= 1) m.c1216 = Constraint(expr= m.b741 + m.b742 <=", "+ 0.999*m.b649)))*(0.001 + 0.999* m.b649) <= 0) m.c560 = Constraint(expr=", "- m.x407 == 0) m.c594 = Constraint(expr= m.x105 - m.x405", "m.x46 - m.x280 - m.x283 == 0) m.c185 = Constraint(expr=", "= Constraint(expr= m.x563 + 15*m.b683 <= 15) m.c915 = Constraint(expr=", "m.c1478 = Constraint(expr= m.b665 - m.b677 >= 0) m.c1479 =", "= Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0) m.c390 = Constraint(expr=", "m.x447 == 0) m.c532 = Constraint(expr= - m.x394 + m.x448", "0) m.c98 = Constraint(expr= m.x221 + 40*m.b599 <= 40) m.c99", "- 5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741", "+ m.x864 == 0) m.c1012 = Constraint(expr= 4*m.b775 + m.x865", "m.c1075 = Constraint(expr= m.b657 - m.b658 <= 0) m.c1076 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ m.x838 == 0) m.c986 = Constraint(expr= 9*m.b749 + m.x839", "0) m.c1485 = Constraint(expr= m.b669 - m.b684 >= 0) m.c1486", "0) m.c13 = Constraint(expr= m.x25 - m.x28 - m.x31 -", "Constraint(expr= - m.b596 - m.b597 + m.b598 - m.b688 <=", "- 0.994083415506506*m.b678 <= 0) m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679", "m.b659 >= 0) m.c1458 = Constraint(expr= - m.b654 + m.b657", "m.x103 - m.x106 - m.x109 == 0) m.c32 = Constraint(expr=", "0) m.c292 = Constraint(expr= m.x349 == 0) m.c293 = Constraint(expr=", ">= 0) m.c1467 = Constraint(expr= m.b654 - m.b657 >= 0)", "m.c1359 = Constraint(expr= - m.b671 + m.b672 - m.b762 <=", "m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924) m.c692 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860 =", "Constraint(expr= m.x246 == 0) m.c139 = Constraint(expr= m.x247 == 0)", "m.x396 == 0) m.c541 = Constraint(expr= m.x100 - m.x394 -", "m.b698 + m.b700 <= 1) m.c1132 = Constraint(expr= m.b699 +", "+ 0.999*m.b652)))*(0.001 + 0.999* m.b652) <= 0) m.c587 = Constraint(expr=", "m.b608 + m.b620 + m.b623 + m.b626 >= 0) m.c1392", "m.b618 - m.b619 <= 0) m.c1037 = Constraint(expr= m.b620 -", "+ 3.34221486003388*m.b604 <= 3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) -", "m.x97 - m.x388 - m.x391 == 0) m.c515 = Constraint(expr=", "0) m.c1409 = Constraint(expr= m.b596 + m.b599 - m.b608 >=", "m.x538 == 0) m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536", "= Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171) m.c685 = Constraint(expr=", "+ m.b636 - m.b726 <= 0) m.c1324 = Constraint(expr= -", "0) m.c730 = Constraint(expr= m.x166 - m.x502 - m.x505 ==", "m.b660 <= 0) m.c1077 = Constraint(expr= m.b659 - m.b661 <=", "m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289", "m.c1395 = Constraint(expr= - m.b621 + m.b639 >= 0) m.c1396", "m.x196 == 0) m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1", "- m.b741 <= 0) m.c1339 = Constraint(expr= - m.b650 -", "0) m.c207 = Constraint(expr= m.x288 == 0) m.c208 = Constraint(expr=", "= Constraint(expr= 2*m.b748 + m.x838 == 0) m.c986 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b767 + m.b768 <= 1) m.c1268 = Constraint(expr= m.b768", "13.5*m.b680 <= 0) m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <=", "m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602", "Constraint(expr= m.b633 - m.b634 <= 0) m.c1052 = Constraint(expr= m.b635", "= Constraint(expr= m.x377 + 20*m.b629 <= 20) m.c399 = Constraint(expr=", "= Constraint(expr= m.x88 - m.x376 - m.x379 == 0) m.c386", "m.b736 <= 1) m.c1205 = Constraint(expr= m.b737 + m.b738 <=", "m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740", "m.x32 == 0) m.c12 = Constraint(expr= m.x24 - m.x27 -", "Constraint(expr= m.x71 - m.x344 - m.x347 == 0) m.c297 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x129 - m.x453 - m.x456 == 0) m.c571 =", "m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448", "m.c4 = Constraint(expr= m.x4 - m.x7 - m.x10 == 0)", "m.x275 == 0) m.c261 = Constraint(expr= m.x276 == 0) m.c262", "m.c90 = Constraint(expr= m.x9 - m.x219 - m.x222 == 0)", "== 0) m.c648 = Constraint(expr= m.x141 - m.x471 - m.x474", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 =", "m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445", "+ m.b639 >= 0) m.c1390 = Constraint(expr= - m.b619 +", "m.c1239 = Constraint(expr= m.b752 + m.b754 <= 1) m.c1240 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 =", "= Constraint(expr= m.x187 - m.x550 - m.x553 == 0) m.c827", "- log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633) <= 0)", "= Constraint(expr= m.x68 - m.x338 - m.x341 == 0) m.c270", "m.c440 = Constraint(expr= m.x431 == 0) m.c441 = Constraint(expr= m.x432", "= Constraint(expr= m.b696 + m.b697 <= 1) m.c1125 = Constraint(expr=", "m.b734 + m.b736 <= 1) m.c1201 = Constraint(expr= m.b734 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 =", "= Constraint(expr= m.x141 - m.x471 - m.x474 == 0) m.c649", "m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478", "- 1.11894339953103*m.b652 <= 0) m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650", "m.b714 <= 1) m.c1158 = Constraint(expr= m.b713 + m.b715 <=", "Constraint(expr= m.b726 + m.b727 <= 1) m.c1185 = Constraint(expr= m.b725", "m.c1328 = Constraint(expr= m.b641 - m.b731 <= 0) m.c1329 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999* m.b599) <= 0) m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) -", "= Constraint(expr= m.x382 - 33.5*m.b640 <= 0) m.c494 = Constraint(expr=", "Constraint(expr= m.b654 - m.b655 <= 0) m.c1073 = Constraint(expr= m.b656", "m.c28 = Constraint(expr= m.x76 - m.x97 - m.x100 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999* m.b675) <= 0)", "= Constraint(expr= m.b666 - m.b667 <= 0) m.c1085 = Constraint(expr=", "Constraint(expr= m.x300 + 15*m.b621 <= 15) m.c304 = Constraint(expr= m.x301", "Constraint(expr= m.x76 - m.x97 - m.x100 == 0) m.c29 =", "1) m.c1147 = Constraint(expr= m.b707 + m.b708 <= 1) m.c1148", "m.x404 - m.x407 == 0) m.c594 = Constraint(expr= m.x105 -", "+ m.b718 <= 1) m.c1167 = Constraint(expr= m.b716 + m.b718", "Constraint(expr= m.b639 - m.b640 <= 0) m.c1058 = Constraint(expr= m.b641", "m.x536 == 0) m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537", "= Constraint(expr= - m.b664 + m.b673 + m.b676 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 =", "= Constraint(expr= m.x444 == 0) m.c511 = Constraint(expr= m.x445 ==", "== 0) m.c955 = Constraint(expr= 3*m.b718 + m.x808 == 0)", "9*m.b644 <= 0) m.c546 = Constraint(expr= m.x393 - 9*m.b645 <=", "== 0) m.c265 = Constraint(expr= m.x343 == 0) m.c266 =", "m.x294 == 0) m.c235 = Constraint(expr= m.x295 == 0) m.c236", "+ 0.842233385663186*m.b632 <= 0.842233385663186) m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633", "m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655", "Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0) m.c125 = Constraint(expr= m.x239", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 =", "= Constraint(expr= 6*m.b739 + m.x829 == 0) m.c977 = Constraint(expr=", "m.x543 - 0.705049913072943*m.b672 <= 0) m.c805 = Constraint(expr= m.x544 -", "0.999* m.b674) <= 0) m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) -", "m.x575 == 0) m.c828 = Constraint(expr= m.x201 - m.x573 -", "0) m.c1077 = Constraint(expr= m.b659 - m.b661 <= 0) m.c1078", "m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649", "m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924) m.c691 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x18 == 0) m.c7 = Constraint(expr= - m.x13 -", "== 0) m.c986 = Constraint(expr= 9*m.b749 + m.x839 == 0)", "Constraint(expr= - m.b623 - m.b624 + m.b625 - m.b715 <=", "Constraint(expr= m.b659 - m.b661 <= 0) m.c1078 = Constraint(expr= m.b660", "= Constraint(expr= - 0.5*m.x256 + m.x280 == 0) m.c167 =", "Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686) m.c459 = Constraint(expr= m.x432", "- m.b762 <= 0) m.c1360 = Constraint(expr= - m.b671 -", "m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125", "Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x436 - m.x439 == 0) m.c485 = Constraint(expr= m.x368 -", "m.b721 <= 1) m.c1174 = Constraint(expr= m.b720 + m.b721 <=", "m.b738 <= 1) m.c1206 = Constraint(expr= m.b737 + m.b739 <=", "+ m.b610 - m.b700 <= 0) m.c1298 = Constraint(expr= m.b611", "m.c1423 = Constraint(expr= m.b610 - m.b622 >= 0) m.c1424 =", "0) m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001", "m.b690 + m.b691 <= 1) m.c1115 = Constraint(expr= m.b692 +", "m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555", "- m.x283 == 0) m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608", "= Constraint(expr= m.b758 + m.b759 <= 1) m.c1250 = Constraint(expr=", "m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539) m.c395 = Constraint(expr= m.x374 -", "<= 0) m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5)", "2.54515263975353) m.c161 = Constraint(expr= - m.x248 + m.x278 == 0)", "Constraint(expr= - m.b666 + m.b678 >= 0) m.c1465 = Constraint(expr=", "m.c1102 = Constraint(expr= m.b684 - m.b685 <= 0) m.c1103 =", ">= 0) m.c1456 = Constraint(expr= m.b628 - m.b655 >= 0)", "- 0.940066550763924*m.b661 <= 0) m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659", "m.b611 + m.b612 - m.b702 <= 0) m.c1300 = Constraint(expr=", "- 3*m.b736 - 5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740", "= Constraint(expr= - 0.75*m.x495 + m.x519 == 0) m.c694 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999* m.b649) <= 0) m.c560 = Constraint(expr= m.x401 == 0)", "<= 0.940066550763924) m.c776 = Constraint(expr= m.x512 - 30*m.b668 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0)", ">= 0) m.c1464 = Constraint(expr= - m.b666 + m.b678 >=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x219 - 40*m.b600 <= 0) m.c97 = Constraint(expr= m.x220", "Constraint(expr= m.x468 == 0) m.c619 = Constraint(expr= m.x469 == 0)", "m.x785 == 0) m.c933 = Constraint(expr= 9*m.b696 + m.x786 ==", "== 0) m.c6 = Constraint(expr= - m.x12 - m.x15 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 =", "== 0) m.c58 = Constraint(expr= m.x217 == 0) m.c59 =", "0) m.c705 = Constraint(expr= m.x174 - m.x519 - m.x522 ==", "+ 1.32154609891348*m.b616 <= 1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) -", "= Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0) m.c391 = Constraint(expr=", "= Constraint(expr= 9*m.b717 + m.x807 == 0) m.c955 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1038 = Constraint(expr= m.b620 - m.b622 <= 0) m.c1039", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 =", "Constraint(expr= m.x52 - m.x292 - m.x295 == 0) m.c242 =", "= Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 =", "0) m.c267 = Constraint(expr= m.x42 - m.x270 - m.x276 ==", "m.x304 - 15*m.b625 <= 0) m.c329 = Constraint(expr= m.x305 +", "- m.b647 + m.b648 - m.b738 <= 0) m.c1336 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 =", "0) m.c167 = Constraint(expr= m.x251 == 0) m.c168 = Constraint(expr=", "m.b723 + m.x813 == 0) m.c961 = Constraint(expr= 9*m.b724 +", "<= 0) m.c1075 = Constraint(expr= m.b657 - m.b658 <= 0)", "m.x31 - m.x34 == 0) m.c14 = Constraint(expr= m.x38 -", "m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260 == 0) m.c108", "m.c1174 = Constraint(expr= m.b720 + m.b721 <= 1) m.c1175 =", "m.x812 == 0) m.c960 = Constraint(expr= m.b723 + m.x813 ==", "0) m.c723 = Constraint(expr= m.x504 == 0) m.c724 = Constraint(expr=", "Constraint(expr= 2*m.b706 + m.x796 == 0) m.c944 = Constraint(expr= 5*m.b707", "m.x481 == 0) m.c677 = Constraint(expr= m.x149 - m.x488 -", "= Constraint(expr= m.b707 + m.b708 <= 1) m.c1146 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)", "8*m.b753 - 4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757 -", "0) m.c40 = Constraint(expr= - m.x148 - m.x151 + m.x154", "+ 0.572481933717686*m.b635 <= 0.572481933717686) m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636", "- 2.30162356062425*m.b638 <= 0) m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639", "= Constraint(expr= m.x254 - 30*m.b608 <= 0) m.c192 = Constraint(expr=", "<= 0) m.c791 = Constraint(expr= m.x545 == 0) m.c792 =", "m.c850 = Constraint(expr= m.x583 == 0) m.c851 = Constraint(expr= m.x176", "= Constraint(expr= m.b734 + m.b736 <= 1) m.c1204 = Constraint(expr=", "= Constraint(expr= - m.x507 + m.x537 == 0) m.c748 =", "15*m.b682 <= 0) m.c887 = Constraint(expr= m.x557 + 15*m.b680 <=", "m.b701 + m.b703 <= 1) m.c1135 = Constraint(expr= m.b701 +", "m.b734 <= 0) m.c1332 = Constraint(expr= - m.b644 + m.b645", "m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553) m.c491 = Constraint(expr= m.x380 -", "= Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348) m.c427 = Constraint(expr=", "m.c1341 = Constraint(expr= - m.b653 + m.b654 - m.b744 <=", "- m.b645 >= 0) m.c1447 = Constraint(expr= m.b625 - m.b646", "0) m.c520 = Constraint(expr= m.x388 - 9*m.b643 <= 0) m.c521", "m.c352 = Constraint(expr= m.x79 - m.x358 - m.x361 == 0)", "+ 9*m.b641 <= 9) m.c522 = Constraint(expr= m.x390 + 9*m.b642", "- m.x591 - m.x594 == 0) m.c910 = Constraint(expr= m.x211", "- 0.940066550763924*m.b670 <= 0) m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668", "m.b639 + m.b640 - m.b730 <= 0) m.c1328 = Constraint(expr=", "m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160", "- 9*m.b625 <= 0) m.c335 = Constraint(expr= m.x353 + 9*m.b623", "0) m.c374 = Constraint(expr= m.x377 == 0) m.c375 = Constraint(expr=", "- 15*m.b683 <= 0) m.c912 = Constraint(expr= m.x561 - 15*m.b684", "Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 +", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826 =", "Constraint(expr= m.x240 == 0) m.c112 = Constraint(expr= m.x241 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x584 - 13.5*m.b680 <= 0) m.c891 = Constraint(expr= m.x585 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 =", "<= 15) m.c916 = Constraint(expr= m.x565 + 15*m.b685 <= 15)", "m.x229 == 0) m.c68 = Constraint(expr= m.x212 - 40*m.b596 <=", "- m.b681 <= 0) m.c1098 = Constraint(expr= m.b680 - m.b682", "Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517) m.c155 = Constraint(expr= m.x266 -", "20*m.b630 <= 0) m.c397 = Constraint(expr= m.x376 - 20*m.b631 <=", "Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376) m.c630 = Constraint(expr= m.x414", "Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x199 - m.x568 - m.x571 == 0) m.c803 =", "m.b709 <= 1) m.c1149 = Constraint(expr= m.b707 + m.b709 <=", "+ m.b675 - m.b765 <= 0) m.c1363 = Constraint(expr= -", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848 =", "== 0) m.c1008 = Constraint(expr= m.b771 + m.x861 == 0)", "- 9*m.b642 <= 0) m.c520 = Constraint(expr= m.x388 - 9*m.b643", "m.x155 - m.x158 == 0) m.c42 = Constraint(expr= m.x153 -", "= Constraint(expr= m.x299 == 0) m.c288 = Constraint(expr= m.x300 ==", "m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0) m.c654 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x234 == 0) m.c88 = Constraint(expr= m.x235 == 0) m.c89", "- m.b616 <= 0) m.c1034 = Constraint(expr= m.b617 - m.b618", "m.x308 - m.x311 == 0) m.c348 = Constraint(expr= m.x60 -", "m.b636 >= 0) m.c1438 = Constraint(expr= m.b619 - m.b637 >=", "4.45628648004517*m.b603 <= 4.45628648004517) m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <=", "= Constraint(expr= m.x9 - m.x219 - m.x222 == 0) m.c91", "0.5*m.x255 + m.x279 == 0) m.c166 = Constraint(expr= - 0.5*m.x256", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628 =", "m.x815 == 0) m.c963 = Constraint(expr= 6*m.b726 + m.x816 ==", "Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b712 <= 1) m.c1155 = Constraint(expr= m.b710 + m.b712 <=", "m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134", "m.x330 - m.x336 == 0) m.c418 = Constraint(expr= m.x67 -", "m.c1450 = Constraint(expr= m.b628 - m.b649 >= 0) m.c1451 =", "= Constraint(expr= - m.x12 - m.x15 + m.x18 == 0)", "m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999* m.b599) <= 0) m.c81 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)", "40) m.c72 = Constraint(expr= m.x216 + 40*m.b597 <= 40) m.c73", "Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.34221486003388*m.b604 <= 3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1", "m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0) m.c487 =", "m.c943 = Constraint(expr= 2*m.b706 + m.x796 == 0) m.c944 =", "<= 9) m.c923 = Constraint(expr= 5*m.b686 + m.x776 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592 == 0) m.c899", "- m.x528 - m.x534 == 0) m.c853 = Constraint(expr= m.x178", "= Constraint(expr= m.x169 - m.x508 - m.x511 == 0) m.c764", "Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 9*m.b646 <= 0) m.c548 = Constraint(expr= m.x395 + 9*m.b644", "m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808", "Constraint(expr= m.x185 - m.x548 - m.x551 == 0) m.c825 =", "Constraint(expr= m.b627 - m.b654 >= 0) m.c1456 = Constraint(expr= m.b628", "= Constraint(expr= m.x587 == 0) m.c876 = Constraint(expr= m.x588 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x84 - m.x369 - m.x372 == 0) m.c478 = Constraint(expr=", "== 0) m.c211 = Constraint(expr= m.x322 == 0) m.c212 =", "= Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5) m.c495 = Constraint(expr=", "= Constraint(expr= m.x106 - m.x406 - m.x409 == 0) m.c596", "0) m.c930 = Constraint(expr= 9*m.b693 + m.x783 == 0) m.c931", "m.b759 <= 0) m.c1357 = Constraint(expr= - m.b668 - m.b669", "<= 0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 +", "m.c1468 = Constraint(expr= m.b655 - m.b658 >= 0) m.c1469 =", "m.c512 = Constraint(expr= m.x95 - m.x386 - m.x389 == 0)", "m.b672 <= 0) m.c1089 = Constraint(expr= m.b671 - m.b673 <=", "0) m.c44 = Constraint(expr= m.x158 - m.x161 - m.x164 -", "0) m.c70 = Constraint(expr= m.x214 - 40*m.b598 <= 0) m.c71", "m.c357 = Constraint(expr= m.x312 + 15*m.b627 <= 15) m.c358 =", "m.c1054 = Constraint(expr= m.b636 - m.b637 <= 0) m.c1055 =", "- 2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716", "- m.x277 == 0) m.c269 = Constraint(expr= m.x68 - m.x338", "0) m.c981 = Constraint(expr= 4*m.b744 + m.x834 == 0) m.c982", "m.c1011 = Constraint(expr= 3*m.b774 + m.x864 == 0) m.c1012 =", "0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999* m.b615) <= 0)", "- m.x384 == 0) m.c481 = Constraint(expr= m.x94 - m.x382", "Constraint(expr= m.b677 - m.b767 <= 0) m.c1365 = Constraint(expr= -", "1) m.c1118 = Constraint(expr= m.b693 + m.b694 <= 1) m.c1119", "Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x190 - m.x556 - m.x559 == 0) m.c881 =", "4.45628648004517*m.b603 <= 0) m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <=", "- m.b603 + m.b604 - m.b694 <= 0) m.c1292 =", "0) m.c1008 = Constraint(expr= m.b771 + m.x861 == 0) m.c1009", "- 6*m.b708 - 7*m.b709 - 2*m.b710 - 5*m.b711 - 2*m.b712", "- m.x158 == 0) m.c42 = Constraint(expr= m.x153 - m.x156", "- 9*m.b645 <= 0) m.c553 = Constraint(expr= m.x448 - 9*m.b646", "0.994083415506506) m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506) m.c745", "- 2*m.b710 - 5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714", "Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b730 <= 0) m.c1328 = Constraint(expr= m.b641 - m.b731 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157", "m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847", "= Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425) m.c501 = Constraint(expr=", "= Constraint(expr= m.b632 - m.b722 <= 0) m.c1320 = Constraint(expr=", "Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924) m.c738 = Constraint(expr= m.x504", "0.666992981045719) m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719) m.c815", "0) m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5) m.c495", "m.b772 <= 1) m.c1273 = Constraint(expr= m.b770 + m.b771 <=", "m.b614 - m.b704 <= 0) m.c1302 = Constraint(expr= - m.b614", "1) m.c1205 = Constraint(expr= m.b737 + m.b738 <= 1) m.c1206", "m.c1457 = Constraint(expr= - m.b653 + m.b656 + m.b659 >=", "m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575", "Constraint(expr= m.x172 - m.x514 - m.x517 == 0) m.c767 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 =", "m.b650) <= 0) m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852 =", "m.c910 = Constraint(expr= m.x211 - m.x592 - m.x595 == 0)", "m.x442 - m.x445 == 0) m.c518 = Constraint(expr= m.x386 -", "+ 0.999*m.b647)))*(0.001 + 0.999* m.b647) <= 0) m.c558 = Constraint(expr=(m.x453/(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x434 - m.x437 == 0) m.c483 = Constraint(expr= m.x120 -", "- 2.54515263975353*m.b605 <= 0) m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606", "Constraint(expr= m.b680 - m.b770 <= 0) m.c1368 = Constraint(expr= -", "m.c1362 = Constraint(expr= - m.b674 + m.b675 - m.b765 <=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808 =", "at 01/15/21 11:37:33 # # Equation counts # Total E", "== 0) m.c388 = Constraint(expr= m.x112 - m.x418 - m.x421", "0) m.c1372 = Constraint(expr= - m.b683 - m.b684 + m.b685", "m.x217 + 40*m.b598 <= 40) m.c74 = Constraint(expr= m.x224 -", "0) m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280 == 0)", "m.c1234 = Constraint(expr= m.b750 + m.b751 <= 1) m.c1235 =", "- m.x576 == 0) m.c829 = Constraint(expr= m.x202 - m.x574", "m.x376 + m.x418 == 0) m.c371 = Constraint(expr= m.x323 ==", "Constraint(expr= 3*m.b760 + m.x850 == 0) m.c998 = Constraint(expr= 4*m.b761", "= Constraint(expr= m.b714 + m.b715 <= 1) m.c1163 = Constraint(expr=", "m.b637 + m.b640 >= 0) m.c1391 = Constraint(expr= - m.b608", "Constraint(expr= m.b616 - m.b634 >= 0) m.c1436 = Constraint(expr= m.b617", ">= 0) m.c1451 = Constraint(expr= m.b626 - m.b650 >= 0)", ">= 0) m.c1471 = Constraint(expr= m.b655 - m.b661 >= 0)", "- m.x236 - m.x239 == 0) m.c117 = Constraint(expr= m.x27", "== 0) m.c966 = Constraint(expr= 8*m.b729 + m.x819 == 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662 =", "m.x258 + 30*m.b609 <= 30) m.c196 = Constraint(expr= m.x259 +", "+ 10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120", "= Constraint(expr= m.x455 == 0) m.c564 = Constraint(expr= m.x456 ==", "0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c462 = Constraint(expr=(m.x435/(0.001 +", "- m.x464 - m.x467 == 0) m.c624 = Constraint(expr= m.x135", "- m.x8 == 0) m.c3 = Constraint(expr= m.x3 - m.x6", "0) m.c940 = Constraint(expr= 4*m.b703 + m.x793 == 0) m.c941", "+ m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999* m.b647) <= 0) m.c558", "- m.x303 - m.x306 == 0) m.c322 = Constraint(expr= m.x58", "m.b686 + m.b688 <= 1) m.c1108 = Constraint(expr= m.b687 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1045 = Constraint(expr= m.b627 - m.b628 <= 0) m.c1046", "Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744", "m.c347 = Constraint(expr= m.x59 - m.x308 - m.x311 == 0)", "<= 0) m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431)", "= Constraint(expr= m.x35 - m.x254 - m.x257 == 0) m.c180", "= Constraint(expr= - m.b650 + m.b651 - m.b741 <= 0)", "<= 0) m.c396 = Constraint(expr= m.x375 - 20*m.b630 <= 0)", "m.c1373 = Constraint(expr= m.b596 + m.b599 == 1) m.c1374 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 =", "<= 0) m.c56 = Constraint(expr= m.x215 == 0) m.c57 =", "- 9*m.b624 <= 0) m.c334 = Constraint(expr= m.x352 - 9*m.b625", "1.83548069293539*m.b613 <= 1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1", "m.b774 + m.b775 <= 1) m.c1281 = Constraint(expr= m.b773 +", ">= 0) m.c1436 = Constraint(expr= m.b617 - m.b635 >= 0)", "+ 0.999*m.b613)))*(0.001 + 0.999* m.b613) <= 0) m.c206 = Constraint(expr=", "m.c1203 = Constraint(expr= m.b734 + m.b736 <= 1) m.c1204 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648 =", "+ 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999*", "Constraint(expr= - m.x394 + m.x448 == 0) m.c533 = Constraint(expr=", "m.c1087 = Constraint(expr= m.b669 - m.b670 <= 0) m.c1088 =", "== 0) m.c41 = Constraint(expr= m.x152 - m.x155 - m.x158", "Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c266 = Constraint(expr= m.x41 - m.x269 - m.x275 ==", "== 0) m.c983 = Constraint(expr= 2*m.b746 + m.x836 == 0)", "m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517) m.c126 = Constraint(expr= m.x240 +", "m.x458 - 1.11894339953103*m.b650 <= 0) m.c606 = Constraint(expr= m.x459 -", "m.c481 = Constraint(expr= m.x94 - m.x382 - m.x385 == 0)", "= Constraint(expr= m.x8 - m.x218 - m.x221 == 0) m.c90", "m.b600 + m.b601 - m.b691 <= 0) m.c1289 = Constraint(expr=", "m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924) m.c711 = Constraint(expr= m.x498 +", "m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27", "= Constraint(expr= m.x562 - 15*m.b685 <= 0) m.c914 = Constraint(expr=", "== 0) m.c268 = Constraint(expr= m.x43 - m.x271 - m.x277", "+ m.b729 <= 1) m.c1190 = Constraint(expr= m.b729 + m.b730", "0) m.c622 = Constraint(expr= m.x109 - m.x412 - m.x415 ==", "Constraint(expr= m.x277 == 0) m.c263 = Constraint(expr= m.x341 == 0)", "+ m.x810 == 0) m.c958 = Constraint(expr= 9*m.b721 + m.x811", "0.705049913072943) m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943) m.c809", "m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815", "m.c967 = Constraint(expr= m.b730 + m.x820 == 0) m.c968 =", "m.x462 == 0) m.c592 = Constraint(expr= m.x463 == 0) m.c593", "- 6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702", "= Constraint(expr= m.x332 == 0) m.c237 = Constraint(expr= m.x333 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x102 - m.x399 - m.x402 == 0) m.c568 =", "0.666992981045719*m.b671 <= 0.666992981045719) m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 =", "- m.x439 == 0) m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638", "- 0.705049913072943*m.b662 <= 0) m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663", "<= 1) m.c1145 = Constraint(expr= m.b707 + m.b708 <= 1)", "- 33.5*m.b638 <= 0) m.c492 = Constraint(expr= m.x381 - 33.5*m.b639", "m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169", "- 0.994083415506506*m.b666 <= 0) m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680 =", "3.34221486003388) m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388) m.c223", "Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b624 <= 0) m.c1041 = Constraint(expr= m.b623 - m.b625 <=", "- m.b632 >= 0) m.c1434 = Constraint(expr= m.b615 - m.b633", "= Constraint(expr= 2*m.b746 + m.x836 == 0) m.c984 = Constraint(expr=", "0) m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001", "== 0) m.c387 = Constraint(expr= m.x111 - m.x417 - m.x420", "m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761", "== 0) m.c758 = Constraint(expr= m.x539 == 0) m.c759 =", "m.c1413 = Constraint(expr= m.b603 - m.b612 >= 0) m.c1414 =", "m.b676 >= 0) m.c1478 = Constraint(expr= m.b665 - m.b677 >=", "<= 1) m.c1203 = Constraint(expr= m.b734 + m.b736 <= 1)", "log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658) <= 0) m.c641", "4.45628648004517*m.b608 <= 4.45628648004517) m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <=", "350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686 - 4*m.b687 -", "m.c698 = Constraint(expr= m.x521 == 0) m.c699 = Constraint(expr= m.x522", "m.x430 - 0.572481933717686*m.b637 <= 0) m.c458 = Constraint(expr= m.x431 +", "m.c271 = Constraint(expr= m.x70 - m.x340 - m.x343 == 0)", "- m.x381 - m.x384 == 0) m.c481 = Constraint(expr= m.x94", "- m.x538 - m.x541 == 0) m.c770 = Constraint(expr= m.x506", "Constraint(expr= m.x342 == 0) m.c265 = Constraint(expr= m.x343 == 0)", "m.x582 == 0) m.c856 = Constraint(expr= m.x205 - m.x580 -", "0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999* m.b660)", "+ 0.999* m.b626) <= 0) m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627)", "0) m.c445 = Constraint(expr= m.x82 - m.x364 - m.x367 ==", "= Constraint(expr= m.x57 - m.x303 - m.x306 == 0) m.c322", "m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442", "= Constraint(expr= m.x185 - m.x548 - m.x551 == 0) m.c825", "- m.x470 - m.x473 == 0) m.c648 = Constraint(expr= m.x141", "m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999* m.b636) <= 0) m.c436 =", "== 0) m.c794 = Constraint(expr= m.x569 == 0) m.c795 =", "- 4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708", "m.c1410 = Constraint(expr= m.b597 + m.b600 - m.b609 >= 0)", "20*m.b631 <= 20) m.c401 = Constraint(expr= m.x416 - 20*m.b629 <=", "m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83", "Constraint(expr= m.x47 - m.x284 - m.x287 == 0) m.c213 =", "m.x805 == 0) m.c953 = Constraint(expr= 3*m.b716 + m.x806 ==", "Constraint(expr= m.x384 == 0) m.c472 = Constraint(expr= m.x385 == 0)", "0) m.c398 = Constraint(expr= m.x377 + 20*m.b629 <= 20) m.c399", "- m.x518 - m.x521 == 0) m.c705 = Constraint(expr= m.x174", "== 0) m.c5 = Constraint(expr= - m.x11 - m.x14 +", "m.x559 == 0) m.c875 = Constraint(expr= m.x587 == 0) m.c876", "Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 9*m.b645 <= 9) m.c556 = Constraint(expr= m.x451 + 9*m.b646", "Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b600 - m.b601 <= 0) m.c1019 = Constraint(expr= m.b602 -", "m.x463 == 0) m.c593 = Constraint(expr= m.x104 - m.x404 -", "0) m.c998 = Constraint(expr= 4*m.b761 + m.x851 == 0) m.c999", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 =", "m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 =", "- m.x456 == 0) m.c571 = Constraint(expr= m.x130 - m.x454", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 =", "0) m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417 == 0)", "m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283", "Constraint(expr= m.b638 - m.b728 <= 0) m.c1326 = Constraint(expr= -", "0) m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <= 0) m.c892", "= Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0) m.c254 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001 +", "0) m.c971 = Constraint(expr= 3*m.b734 + m.x824 == 0) m.c972", "Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0)", "- m.b687 <= 0) m.c1285 = Constraint(expr= - m.b596 -", "m.x345 - m.x348 == 0) m.c298 = Constraint(expr= m.x73 -", "m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672) m.c867 = Constraint(expr= m.x582 +", "m.c974 = Constraint(expr= 5*m.b737 + m.x827 == 0) m.c975 =", "== 0) m.c956 = Constraint(expr= 7*m.b719 + m.x809 == 0)", "m.b621 <= 0) m.c1038 = Constraint(expr= m.b620 - m.b622 <=", "m.x542 - m.x545 == 0) m.c798 = Constraint(expr= m.x183 -", "0.940066550763924*m.b667 <= 0.940066550763924) m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 =", "Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376) m.c631 = Constraint(expr= m.x415", "+ 0.999* m.b674) <= 0) m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675)", "<= 30) m.c197 = Constraint(expr= m.x278 - 15*m.b608 <= 0)", "<= 1) m.c1249 = Constraint(expr= m.b758 + m.b759 <= 1)", "20*m.b629 <= 0) m.c396 = Constraint(expr= m.x375 - 20*m.b630 <=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x227 == 0) m.c60 = Constraint(expr= m.x228 == 0)", "Constraint(expr= 7*m.b738 + m.x828 == 0) m.c976 = Constraint(expr= 6*m.b739", "<= 0) m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672)", "<= 0) m.c1027 = Constraint(expr= m.b609 - m.b610 <= 0)", "- m.x390 == 0) m.c514 = Constraint(expr= m.x97 - m.x388", "= Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0) m.c716 = Constraint(expr=", "- 33.5*m.b640 <= 0) m.c494 = Constraint(expr= m.x383 + 33.5*m.b638", "m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725", "m.x429 - 0.572481933717686*m.b636 <= 0) m.c457 = Constraint(expr= m.x430 -", "0) m.c88 = Constraint(expr= m.x235 == 0) m.c89 = Constraint(expr=", "= Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001", "m.x437 == 0) m.c474 = Constraint(expr= m.x438 == 0) m.c475", "== 0) m.c237 = Constraint(expr= m.x333 == 0) m.c238 =", "m.c1212 = Constraint(expr= m.b740 + m.b742 <= 1) m.c1213 =", "+ m.b745 <= 1) m.c1223 = Constraint(expr= m.b746 + m.b747", "0) m.c908 = Constraint(expr= m.x209 - m.x590 - m.x593 ==", "m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455", "m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431) m.c80 =", "m.c146 = Constraint(expr= m.x41 - m.x266 - m.x272 == 0)", "0) m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924) m.c774", "<= 1) m.c1131 = Constraint(expr= m.b698 + m.b700 <= 1)", "Constraint(expr= m.b759 + m.b760 <= 1) m.c1253 = Constraint(expr= m.b761", "- m.b654 + m.b657 + m.b660 >= 0) m.c1459 =", "Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 +", "== 0) m.c972 = Constraint(expr= 4*m.b735 + m.x825 == 0)", "= Constraint(expr= m.b743 + m.x833 == 0) m.c981 = Constraint(expr=", "- 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <=", "Constraint(expr= m.x233 == 0) m.c87 = Constraint(expr= m.x234 == 0)", "+ m.b738 <= 1) m.c1206 = Constraint(expr= m.b737 + m.b739", "m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632) <= 0) m.c408 = Constraint(expr=(m.x423/(0.001", "m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570", "0) m.c60 = Constraint(expr= m.x228 == 0) m.c61 = Constraint(expr=", "m.x447 - m.x450 == 0) m.c544 = Constraint(expr= m.x127 -", "Constraint(expr= 9*m.b721 + m.x811 == 0) m.c959 = Constraint(expr= 3*m.b722", "m.x75 - m.x96 - m.x99 == 0) m.c28 = Constraint(expr=", "m.b635 + m.b638 >= 0) m.c1389 = Constraint(expr= - m.b618", "+ m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999* m.b648) <= 0) m.c559", "0) m.c1458 = Constraint(expr= - m.b654 + m.b657 + m.b660", "+ 0.999*m.b633) <= 0) m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) -", "<= 0) m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 +", "1) m.c1218 = Constraint(expr= m.b743 + m.b745 <= 1) m.c1219", "m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641", "4.45628648004517*m.b610 <= 4.45628648004517) m.c191 = Constraint(expr= m.x254 - 30*m.b608 <=", "= Constraint(expr= m.x387 - 9*m.b642 <= 0) m.c520 = Constraint(expr=", "0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999* m.b636)", "0.940066550763924) m.c776 = Constraint(expr= m.x512 - 30*m.b668 <= 0) m.c777", "m.c1476 = Constraint(expr= m.b663 - m.b675 >= 0) m.c1477 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810 =", "m.c113 = Constraint(expr= m.x263 == 0) m.c114 = Constraint(expr= m.x264", "= Constraint(expr= m.x258 == 0) m.c172 = Constraint(expr= m.x259 ==", "== 0) m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0)", "0) m.c438 = Constraint(expr= m.x366 == 0) m.c439 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b654 <= 0) m.c1071 = Constraint(expr= m.b653 - m.b655", "0) m.c565 = Constraint(expr= m.x457 == 0) m.c566 = Constraint(expr=", "0.480234946352917*m.b675 <= 0.480234946352917) m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <=", "m.b603 <= 0) m.c1020 = Constraint(expr= m.b602 - m.b604 <=", "+ m.b751 <= 1) m.c1234 = Constraint(expr= m.b750 + m.b751", "0) m.c903 = Constraint(expr= m.x594 == 0) m.c904 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c235 = Constraint(expr= m.x295 == 0) m.c236 =", "m.b650 + m.b651 - m.b741 <= 0) m.c1339 = Constraint(expr=", "- m.x509 == 0) m.c762 = Constraint(expr= m.x168 - m.x507", "- m.b654 >= 0) m.c1456 = Constraint(expr= m.b628 - m.b655", "m.c1469 = Constraint(expr= m.b653 - m.b659 >= 0) m.c1470 =", "Constraint(expr= m.x305 == 0) m.c315 = Constraint(expr= m.x306 == 0)", "<= 0) m.c1302 = Constraint(expr= - m.b614 + m.b615 -", "== 0) m.c61 = Constraint(expr= m.x229 == 0) m.c62 =", "15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117 +", "+ m.x858 == 0) m.c1006 = Constraint(expr= 6*m.b769 + m.x859", "= Constraint(expr= m.b605 - m.b607 <= 0) m.c1024 = Constraint(expr=", "- m.b627 + m.b628 - m.b718 <= 0) m.c1316 =", "1.32154609891348*m.b615 <= 1.32154609891348) m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <=", "0.666992981045719) m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719) m.c814", "1) m.c1138 = Constraint(expr= m.b702 + m.b703 <= 1) m.c1139", "+ 4.45628648004517*m.b610 <= 4.45628648004517) m.c191 = Constraint(expr= m.x254 - 30*m.b608", "<= 0) m.c1306 = Constraint(expr= - m.b617 - m.b618 +", "== 0) m.c517 = Constraint(expr= m.x124 - m.x442 - m.x445", "Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0) m.c681 = Constraint(expr= m.x477", "+ 0.999*m.b672)))*(0.001 + 0.999* m.b672) <= 0) m.c790 = Constraint(expr=(m.x568/(0.001", "<= 9) m.c922 = Constraint(expr= m.x595 + 9*m.b685 <= 9)", "m.c975 = Constraint(expr= 7*m.b738 + m.x828 == 0) m.c976 =", "m.b604 - m.b616 >= 0) m.c1418 = Constraint(expr= m.b605 -", "Constraint(expr= m.x288 == 0) m.c208 = Constraint(expr= m.x289 == 0)", "= Constraint(expr= m.b629 - m.b631 <= 0) m.c1048 = Constraint(expr=", "m.x341 == 0) m.c264 = Constraint(expr= m.x342 == 0) m.c265", "- m.x278 - m.x281 == 0) m.c183 = Constraint(expr= m.x45", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 =", "0) m.c854 = Constraint(expr= m.x203 - m.x578 - m.x581 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 =", "m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353) m.c277 =", "+ m.b633 - m.b723 <= 0) m.c1321 = Constraint(expr= -", "= Constraint(expr= - 0.9*m.x297 + m.x345 == 0) m.c286 =", "m.b710 <= 0) m.c1308 = Constraint(expr= - m.b620 + m.b621", "m.c415 = Constraint(expr= m.x427 == 0) m.c416 = Constraint(expr= m.x65", "= Constraint(expr= m.b740 + m.b741 <= 1) m.c1212 = Constraint(expr=", "1.18887736200171*m.b660 <= 0) m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <=", "0) m.c442 = Constraint(expr= m.x433 == 0) m.c443 = Constraint(expr=", "+ m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999* m.b660) <= 0) m.c667", "= Constraint(expr= m.b629 - m.b719 <= 0) m.c1317 = Constraint(expr=", "0) m.c342 = Constraint(expr= m.x312 == 0) m.c343 = Constraint(expr=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847 =", "- m.b622 <= 0) m.c1039 = Constraint(expr= m.b621 - m.b622", "m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612", "m.b626 - m.b627 <= 0) m.c1044 = Constraint(expr= m.b626 -", "m.x94 - m.x382 - m.x385 == 0) m.c482 = Constraint(expr=", "= Constraint(expr= m.b654 - m.b655 <= 0) m.c1073 = Constraint(expr=", "= Constraint(expr= m.b597 + m.b600 == 1) m.c1375 = Constraint(expr=", "- 1.32154609891348*m.b615 <= 0) m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616", "- 15*m.b681 <= 0) m.c886 = Constraint(expr= m.x556 - 15*m.b682", "== 0) m.c616 = Constraint(expr= m.x415 == 0) m.c617 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805 =", "<= 1) m.c1191 = Constraint(expr= m.b728 + m.b730 <= 1)", "== 0) m.c802 = Constraint(expr= m.x199 - m.x568 - m.x571", "= Constraint(expr= m.x275 == 0) m.c261 = Constraint(expr= m.x276 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 =", "m.c876 = Constraint(expr= m.x588 == 0) m.c877 = Constraint(expr= m.x589", "= Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0) m.c486 = Constraint(expr=", "# # Nonzero counts # Total const NL DLL #", "- 0.78338879230327*m.b656 <= 0) m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657", "<= 0) m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 +", "m.b604 - m.b613 >= 0) m.c1415 = Constraint(expr= m.b602 -", "- 4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732", "= Constraint(expr= m.x166 - m.x502 - m.x505 == 0) m.c731", "= Constraint(expr= - m.b623 + m.b624 - m.b714 <= 0)", "= Constraint(expr= 4*m.b728 + m.x818 == 0) m.c966 = Constraint(expr=", "= Constraint(expr= m.x307 == 0) m.c317 = Constraint(expr= m.x353 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 =", "= Constraint(expr= m.b750 + m.b751 <= 1) m.c1233 = Constraint(expr=", "- m.b644 >= 0) m.c1446 = Constraint(expr= m.b624 - m.b645", "m.x140 - m.x143 == 0) m.c36 = Constraint(expr= m.x138 -", "0) m.c991 = Constraint(expr= 4*m.b754 + m.x844 == 0) m.c992", "90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198 + 350*m.x199 +", "+ m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999* m.b626) <= 0) m.c339", "m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862", "Constraint(expr= m.x108 - m.x411 - m.x414 == 0) m.c622 =", "m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999* m.b660) <= 0) m.c667 =", "Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924) m.c692 = Constraint(expr= -", "m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0) m.c714 =", "0.940066550763924*m.b666 <= 0.940066550763924) m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <=", "Constraint(expr= m.b690 + m.b691 <= 1) m.c1113 = Constraint(expr= m.b689", "- m.b602 + m.b611 + m.b614 >= 0) m.c1377 =", "15*m.b608 <= 0) m.c198 = Constraint(expr= m.x279 - 15*m.b609 <=", "== 0) m.c938 = Constraint(expr= 7*m.b701 + m.x791 == 0)", "= Constraint(expr= m.b764 + m.b766 <= 1) m.c1264 = Constraint(expr=", "- m.x348 == 0) m.c298 = Constraint(expr= m.x73 - m.x346", "= Constraint(expr= m.b725 + m.b727 <= 1) m.c1186 = Constraint(expr=", "<= 0) m.c1101 = Constraint(expr= m.b683 - m.b685 <= 0)", "= Constraint(expr= m.x217 + 40*m.b598 <= 40) m.c74 = Constraint(expr=", "m.c1418 = Constraint(expr= m.b605 - m.b617 >= 0) m.c1419 =", "m.x165 - m.x168 == 0) m.c46 = Constraint(expr= m.x160 -", "- 5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736", "m.x321 == 0) m.c211 = Constraint(expr= m.x322 == 0) m.c212", "- m.x29 - m.x32 == 0) m.c12 = Constraint(expr= m.x24", "Constraint(expr= - m.b629 - m.b630 + m.b631 - m.b721 <=", "0) m.c762 = Constraint(expr= m.x168 - m.x507 - m.x510 ==", "<= 0) m.c1016 = Constraint(expr= m.b599 - m.b600 <= 0)", "Constraint(expr= m.b615 - m.b633 >= 0) m.c1435 = Constraint(expr= m.b616", "m.b758 + m.b759 <= 1) m.c1250 = Constraint(expr= m.b759 +", "m.x421 == 0) m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <=", "m.x93 - m.x381 - m.x384 == 0) m.c481 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c442 = Constraint(expr= m.x433 == 0) m.c443 =", "m.c45 = Constraint(expr= m.x159 - m.x162 - m.x165 - m.x168", "- m.b666 <= 0) m.c1083 = Constraint(expr= m.b665 - m.b667", "m.x80 - m.x362 - m.x365 == 0) m.c444 = Constraint(expr=", "m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506) m.c861 = Constraint(expr= m.x534 +", "Constraint(expr= m.x170 - m.x512 - m.x515 == 0) m.c765 =", "m.b672 + m.b675 >= 0) m.c1462 = Constraint(expr= - m.b664", "= Constraint(expr= m.x110 - m.x416 - m.x419 == 0) m.c387", "Constraint(expr= m.b767 + m.b769 <= 1) m.c1270 = Constraint(expr= m.b768", "m.c1477 = Constraint(expr= m.b664 - m.b676 >= 0) m.c1478 =", "+ 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999*", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 =", "= Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0) m.c742 = Constraint(expr=", "- m.x232 - m.x235 == 0) m.c95 = Constraint(expr= m.x218", "<= 1.26558121681553) m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553)", "= Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0) m.c681 = Constraint(expr=", "== 0) m.c962 = Constraint(expr= 2*m.b725 + m.x815 == 0)", "Constraint(expr= m.b698 + m.b699 <= 1) m.c1128 = Constraint(expr= m.b698", "0) m.c801 = Constraint(expr= m.x198 - m.x567 - m.x570 ==", "= Constraint(expr= m.x421 == 0) m.c380 = Constraint(expr= m.x62 -", "Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686) m.c460 = Constraint(expr= m.x433", "Constraint(expr= m.x137 - m.x140 - m.x143 == 0) m.c36 =", "13.5*m.b621 <= 13.5) m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <=", "m.b660 - m.b661 <= 0) m.c1079 = Constraint(expr= m.b662 -", "m.c1293 = Constraint(expr= - m.b605 + m.b606 - m.b696 <=", "- m.b738 <= 0) m.c1336 = Constraint(expr= - m.b647 -", "# Total const NL DLL # 3373 3193 180 0", "1) m.c1176 = Constraint(expr= m.b722 + m.b724 <= 1) m.c1177", "m.b764 + m.b765 <= 1) m.c1260 = Constraint(expr= m.b764 +", "- m.b648 >= 0) m.c1450 = Constraint(expr= m.b628 - m.b649", "- m.b625 <= 0) m.c1042 = Constraint(expr= m.b624 - m.b625", "m.x212 - 40*m.b596 <= 0) m.c69 = Constraint(expr= m.x213 -", "m.c1209 = Constraint(expr= m.b737 + m.b739 <= 1) m.c1210 =", "<= 0.705049913072943) m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943)", "== 0) m.c880 = Constraint(expr= m.x190 - m.x556 - m.x559", "0) m.c646 = Constraint(expr= m.x487 == 0) m.c647 = Constraint(expr=", "m.b726 <= 1) m.c1182 = Constraint(expr= m.b725 + m.b727 <=", "4*m.b728 + m.x818 == 0) m.c966 = Constraint(expr= 8*m.b729 +", "== 0) m.c270 = Constraint(expr= m.x69 - m.x339 - m.x342", "m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999* m.b674) <= 0) m.c816 =", "m.x452 - 1.04900943706034*m.b647 <= 0) m.c579 = Constraint(expr= m.x453 -", "15*m.b609 <= 0) m.c199 = Constraint(expr= m.x280 - 15*m.b610 <=", "2*m.b712 + m.x802 == 0) m.c950 = Constraint(expr= 4*m.b713 +", "m.c1302 = Constraint(expr= - m.b614 + m.b615 - m.b705 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666 =", "m.c57 = Constraint(expr= m.x216 == 0) m.c58 = Constraint(expr= m.x217", "Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034) m.c583 = Constraint(expr= m.x457", "1) m.c1226 = Constraint(expr= m.b747 + m.b748 <= 1) m.c1227", "= Constraint(expr= m.b708 + m.b709 <= 1) m.c1149 = Constraint(expr=", "m.b614 >= 0) m.c1416 = Constraint(expr= m.b603 - m.b615 >=", "<= 0) m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388)", "= Constraint(expr= m.x373 == 0) m.c470 = Constraint(expr= m.x383 ==", "m.x426 == 0) m.c415 = Constraint(expr= m.x427 == 0) m.c416", "m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b667 + m.b679 >= 0) m.c1466 = Constraint(expr= m.b653", "0) m.c1484 = Constraint(expr= m.b668 - m.b683 >= 0) m.c1485", "+ 20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128 + 50*m.x129", "m.b697 <= 1) m.c1127 = Constraint(expr= m.b698 + m.b699 <=", "m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0) m.c125 =", "<= 1) m.c1267 = Constraint(expr= m.b767 + m.b768 <= 1)", "m.b612 >= 0) m.c1414 = Constraint(expr= m.b604 - m.b613 >=", "m.b614 - m.b632 >= 0) m.c1434 = Constraint(expr= m.b615 -", "0.940066550763924*m.b660 <= 0) m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <=", "- m.b679 <= 0) m.c1097 = Constraint(expr= m.b680 - m.b681", "+ 0.999*m.b601)))*(0.001 + 0.999* m.b601) <= 0) m.c83 = Constraint(expr=", "m.c555 = Constraint(expr= m.x450 + 9*m.b645 <= 9) m.c556 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c385 = Constraint(expr= m.x88 - m.x376 - m.x379 ==", "0) m.c588 = Constraint(expr= m.x408 == 0) m.c589 = Constraint(expr=", "== 0) m.c539 = Constraint(expr= m.x98 - m.x392 - m.x395", "m.b683 + m.b684 - m.b774 <= 0) m.c1372 = Constraint(expr=", "= Constraint(expr= m.b773 + m.b775 <= 1) m.c1282 = Constraint(expr=", "== 0) m.c980 = Constraint(expr= m.b743 + m.x833 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 =", "== 0) m.c1000 = Constraint(expr= 7*m.b763 + m.x853 == 0)", "m.x481 == 0) m.c671 = Constraint(expr= m.x491 == 0) m.c672", "+ m.b670 - m.b760 <= 0) m.c1358 = Constraint(expr= m.b671", "m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358", "m.x50 - m.x290 - m.x293 == 0) m.c240 = Constraint(expr=", "Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0) m.c573 = Constraint(expr= m.x399", "== 0) m.c57 = Constraint(expr= m.x216 == 0) m.c58 =", "Constraint(expr= m.b765 + m.b766 <= 1) m.c1265 = Constraint(expr= m.b767", "Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0) m.c688 = Constraint(expr= m.x490", "- m.b638 + m.b639 - m.b729 <= 0) m.c1327 =", "== 0) m.c724 = Constraint(expr= m.x505 == 0) m.c725 =", "m.b641 >= 0) m.c1443 = Constraint(expr= m.b624 - m.b642 >=", "0) m.c1441 = Constraint(expr= m.b619 - m.b640 >= 0) m.c1442", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 =", "== 0) m.c849 = Constraint(expr= m.x582 == 0) m.c850 =", "Constraint(expr= m.b671 - m.b672 <= 0) m.c1089 = Constraint(expr= m.b671", "+ m.b601 - m.b691 <= 0) m.c1289 = Constraint(expr= m.b602", "= Constraint(expr= 9*m.b749 + m.x839 == 0) m.c987 = Constraint(expr=", "== 0) m.c993 = Constraint(expr= 3*m.b756 + m.x846 == 0)", "- m.b604 + m.b613 + m.b616 >= 0) m.c1379 =", "- m.x257 == 0) m.c180 = Constraint(expr= m.x36 - m.x255", "Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517) m.c189 = Constraint(expr= m.x252", "== 0) m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0)", "Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001 +", "== 0) m.c469 = Constraint(expr= m.x373 == 0) m.c470 =", "Constraint(expr= m.x504 == 0) m.c724 = Constraint(expr= m.x505 == 0)", "== 0) m.c965 = Constraint(expr= 4*m.b728 + m.x818 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 =", "m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577", "m.x505 == 0) m.c725 = Constraint(expr= m.x530 == 0) m.c726", "0) m.c1337 = Constraint(expr= m.b650 - m.b740 <= 0) m.c1338", "Constraint(expr= m.b626 - m.b653 >= 0) m.c1455 = Constraint(expr= m.b627", "<= 0.78338879230327) m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327)", "+ 0.940066550763924*m.b668 <= 0.940066550763924) m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669", "Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c823 = Constraint(expr= m.x577 == 0) m.c824 = Constraint(expr=", "0) m.c420 = Constraint(expr= m.x114 - m.x423 - m.x426 ==", "m.b601 - m.b604 >= 0) m.c1406 = Constraint(expr= m.b596 +", "= Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924) m.c713 = Constraint(expr=", "2*m.b755 + m.x845 == 0) m.c993 = Constraint(expr= 3*m.b756 +", "m.b638 + m.b639 - m.b729 <= 0) m.c1327 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 =", "m.x419 + 20*m.b629 <= 20) m.c405 = Constraint(expr= m.x420 +", "+ 0.999* m.b600) <= 0) m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601)", "Constraint(expr= m.b619 - m.b637 >= 0) m.c1439 = Constraint(expr= m.b617", "0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999* m.b612)", "m.c269 = Constraint(expr= m.x68 - m.x338 - m.x341 == 0)", "0.78338879230327) m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327) m.c665", "1) m.c1201 = Constraint(expr= m.b734 + m.b735 <= 1) m.c1202", "m.x825 == 0) m.c973 = Constraint(expr= 3*m.b736 + m.x826 ==", "+ 0.994083415506506*m.b677 <= 0.994083415506506) m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678", "= Constraint(expr= m.x380 - 33.5*m.b638 <= 0) m.c492 = Constraint(expr=", "- m.x286 - m.x289 == 0) m.c215 = Constraint(expr= m.x62", "0) m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537 == 0)", "0) m.c234 = Constraint(expr= m.x294 == 0) m.c235 = Constraint(expr=", "m.c589 = Constraint(expr= m.x409 == 0) m.c590 = Constraint(expr= m.x461", "= Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0) m.c635 = Constraint(expr=", "Constraint(expr= - m.b608 + m.b609 - m.b699 <= 0) m.c1297", "Constraint(expr= m.b623 - m.b713 <= 0) m.c1311 = Constraint(expr= -", "- 8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744 - m.b745", "= Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0) m.c152 = Constraint(expr=", "m.c236 = Constraint(expr= m.x332 == 0) m.c237 = Constraint(expr= m.x333", "== 0) m.c93 = Constraint(expr= m.x15 - m.x231 - m.x234", "m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780 =", "Constraint(expr= m.x80 - m.x362 - m.x365 == 0) m.c444 =", "- m.x529 - m.x535 == 0) m.c854 = Constraint(expr= m.x203", "m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431) m.c79 = Constraint(expr= m.x229 +", "m.c940 = Constraint(expr= 4*m.b703 + m.x793 == 0) m.c941 =", "+ m.b679 - m.b769 <= 0) m.c1367 = Constraint(expr= m.b680", "m.x543 - m.x546 == 0) m.c799 = Constraint(expr= m.x184 -", "m.b660) <= 0) m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1", "= Constraint(expr= m.b725 + m.b727 <= 1) m.c1183 = Constraint(expr=", "m.b747 <= 0) m.c1345 = Constraint(expr= - m.b656 - m.b657", "m.c1151 = Constraint(expr= m.b710 + m.b711 <= 1) m.c1152 =", "0.999*m.b596) <= 0) m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 =", "<= 0) m.c1360 = Constraint(expr= - m.b671 - m.b672 +", "Constraint(expr= m.x112 - m.x418 - m.x421 == 0) m.c389 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001 +", "Constraint(expr= m.x258 + 30*m.b609 <= 30) m.c196 = Constraint(expr= m.x259", "- 0.690184503917672*m.b678 <= 0) m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679", "= Constraint(expr= m.b737 + m.b739 <= 1) m.c1207 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684 =", "= Constraint(expr= m.x390 == 0) m.c508 = Constraint(expr= m.x391 ==", "= Constraint(expr= m.x207 - m.x585 - m.x588 == 0) m.c883", "m.x219 - 40*m.b600 <= 0) m.c97 = Constraint(expr= m.x220 -", "m.x63 - m.x315 - m.x321 == 0) m.c217 = Constraint(expr=", "= Constraint(expr= m.b746 + m.b748 <= 1) m.c1228 = Constraint(expr=", "Constraint(expr= m.b635 - m.b637 <= 0) m.c1054 = Constraint(expr= m.b636", "- m.b618 >= 0) m.c1420 = Constraint(expr= m.b607 - m.b619", "50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127 +", "+ 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999*", "9*m.b643 <= 0) m.c521 = Constraint(expr= m.x389 + 9*m.b641 <=", "m.c570 = Constraint(expr= m.x129 - m.x453 - m.x456 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999*m.b597)))*(0.001 + 0.999*m.b597) <= 0) m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598)", "m.x106 - m.x406 - m.x409 == 0) m.c596 = Constraint(expr=", "m.c332 = Constraint(expr= m.x350 - 9*m.b623 <= 0) m.c333 =", "0) m.c1424 = Constraint(expr= m.b608 - m.b623 >= 0) m.c1425", "0) m.c417 = Constraint(expr= m.x66 - m.x330 - m.x336 ==", "Constraint(expr= m.b660 - m.b661 <= 0) m.c1079 = Constraint(expr= m.b662", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640 =", "m.x261 - m.x264 == 0) m.c121 = Constraint(expr= m.x40 -", "m.x537 == 0) m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 =", "<= 0) m.c1076 = Constraint(expr= m.b659 - m.b660 <= 0)", ">= 0) m.c1423 = Constraint(expr= m.b610 - m.b622 >= 0)", "- m.x374 + m.x416 == 0) m.c369 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 =", "m.x387 - m.x390 == 0) m.c514 = Constraint(expr= m.x97 -", "= Constraint(expr= m.x75 - m.x351 - m.x354 == 0) m.c325", "== 0) m.c946 = Constraint(expr= 7*m.b709 + m.x799 == 0)", "+ 15*m.b625 <= 15) m.c332 = Constraint(expr= m.x350 - 9*m.b623", "m.c1104 = Constraint(expr= m.b686 + m.b688 <= 1) m.c1105 =", "Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 +", "m.b746 + m.b748 <= 1) m.c1225 = Constraint(expr= m.b746 +", "<= 0) m.c1048 = Constraint(expr= m.b630 - m.b631 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x433 == 0) m.c443 = Constraint(expr= m.x80 -", "m.b660 >= 0) m.c1471 = Constraint(expr= m.b655 - m.b661 >=", "- m.x427 == 0) m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632", "- 4.45628648004517*m.b602 <= 0) m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 =", "= Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0) m.c279 = Constraint(expr=", "== 0) m.c448 = Constraint(expr= m.x118 - m.x430 - m.x433", "Constraint(expr= - m.x248 + m.x278 == 0) m.c162 = Constraint(expr=", "= Constraint(expr= m.x307 + 15*m.b625 <= 15) m.c332 = Constraint(expr=", "m.x444 + 9*m.b642 <= 9) m.c529 = Constraint(expr= m.x445 +", "m.c1240 = Constraint(expr= m.b753 + m.b754 <= 1) m.c1241 =", "+ m.b646 >= 0) m.c1400 = Constraint(expr= - m.b626 +", "m.x571 == 0) m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <=", "== 0) m.c1009 = Constraint(expr= 3*m.b772 + m.x862 == 0)", "m.x243 - 4.45628648004517*m.b606 <= 0) m.c151 = Constraint(expr= m.x244 -", "15*m.b683 <= 0) m.c912 = Constraint(expr= m.x561 - 15*m.b684 <=", "m.x519 - m.x522 == 0) m.c706 = Constraint(expr= m.x175 -", "m.b695 <= 0) m.c1293 = Constraint(expr= - m.b605 + m.b606", "Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)", "2*m.b710 - 5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714 -", "9) m.c549 = Constraint(expr= m.x396 + 9*m.b645 <= 9) m.c550", "+ m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999* m.b674) <= 0) m.c816", "- m.x71 - m.x89 + m.x92 == 0) m.c24 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859 =", "<= 0) m.c1332 = Constraint(expr= - m.b644 + m.b645 -", "m.b597 + m.b600 - m.b603 >= 0) m.c1405 = Constraint(expr=", "= Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0) m.c361 = Constraint(expr=", "m.b641 + m.b644 >= 0) m.c1398 = Constraint(expr= - m.b624", "Constraint(expr= m.b770 + m.b771 <= 1) m.c1272 = Constraint(expr= m.b770", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 =", "m.x507 - 0.940066550763924*m.b669 <= 0) m.c772 = Constraint(expr= m.x508 -", "= Constraint(expr= m.x595 == 0) m.c905 = Constraint(expr= m.x191 -", "0) m.c1087 = Constraint(expr= m.b669 - m.b670 <= 0) m.c1088", "0) m.c1353 = Constraint(expr= - m.b665 + m.b666 - m.b756", "<= 30) m.c196 = Constraint(expr= m.x259 + 30*m.b610 <= 30)", "20*m.b631 <= 0) m.c398 = Constraint(expr= m.x377 + 20*m.b629 <=", "has removed 1 variable and 1 equation from pyomo.environ import", "<= 0) m.c1025 = Constraint(expr= m.b608 - m.b609 <= 0)", "== 0) m.c596 = Constraint(expr= m.x131 - m.x458 - m.x461", "m.b694 <= 1) m.c1120 = Constraint(expr= m.b693 + m.b694 <=", "m.x368 - m.x371 == 0) m.c477 = Constraint(expr= m.x84 -", "m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397", "- m.b630 >= 0) m.c1432 = Constraint(expr= m.b613 - m.b631", "Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x492 == 0) m.c673 = Constraint(expr= m.x493 == 0) m.c674", "0) m.c764 = Constraint(expr= m.x170 - m.x512 - m.x515 ==", "== 0) m.c926 = Constraint(expr= 8*m.b689 + m.x779 == 0)", "m.x826 == 0) m.c974 = Constraint(expr= 5*m.b737 + m.x827 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 =", "m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341", "Constraint(expr= m.x401 == 0) m.c561 = Constraint(expr= m.x402 == 0)", "0) m.c933 = Constraint(expr= 9*m.b696 + m.x786 == 0) m.c934", "Constraint(expr= - m.x508 + m.x538 == 0) m.c749 = Constraint(expr=", "= Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0) m.c654 = Constraint(expr=", "+ m.b718 <= 1) m.c1168 = Constraint(expr= m.b717 + m.b718", "m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88", "m.c1471 = Constraint(expr= m.b655 - m.b661 >= 0) m.c1472 =", "m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653", "m.c48 = Constraint(expr= m.x174 - m.x183 - m.x186 == 0)", "Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506) m.c744 = Constraint(expr= m.x531", "0) m.c905 = Constraint(expr= m.x191 - m.x560 - m.x563 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785 =", "Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553) m.c454 = Constraint(expr= m.x367", "Constraint(expr= m.b668 - m.b669 <= 0) m.c1086 = Constraint(expr= m.b668", "9*m.b751 + m.x841 == 0) m.c989 = Constraint(expr= 5*m.b752 +", "2.30162356062425) m.c503 = Constraint(expr= - m.x386 + m.x440 == 0)", "m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629", "m.b671 >= 0) m.c1473 = Constraint(expr= m.b663 - m.b672 >=", "Constraint(expr= m.x33 - m.x249 - m.x252 == 0) m.c178 =", "0) m.c287 = Constraint(expr= m.x299 == 0) m.c288 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 =", "- 8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766", "+ 0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632)", "= Constraint(expr= m.x186 - m.x549 - m.x552 == 0) m.c826", "<= 1) m.c1231 = Constraint(expr= m.b749 + m.b750 <= 1)", "- 4*m.b761 - 8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765", "Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376) m.c577 = Constraint(expr= m.x403", "scont sint # 865 685 180 0 0 0 0", "<= 20) m.c400 = Constraint(expr= m.x379 + 20*m.b631 <= 20)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671 =", "Constraint(expr= m.x228 == 0) m.c61 = Constraint(expr= m.x229 == 0)", "- 3.04984759446376*m.b651 <= 0) m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652", "m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656) <= 0) m.c639 = Constraint(expr=(m.x483/(0.001", "m.b702 + m.b703 <= 1) m.c1137 = Constraint(expr= m.b701 +", "m.x494 - 0.940066550763924*m.b662 <= 0) m.c708 = Constraint(expr= m.x495 -", "0) m.c1401 = Constraint(expr= - m.b627 + m.b648 + m.b651", "Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)", "40*m.b597 <= 40) m.c73 = Constraint(expr= m.x217 + 40*m.b598 <=", "0.690184503917672*m.b678 <= 0) m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <=", "m.x567 - m.x570 == 0) m.c802 = Constraint(expr= m.x199 -", "m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0) m.c709 =", "- m.x91 + m.x94 == 0) m.c26 = Constraint(expr= m.x74", "0) m.c697 = Constraint(expr= m.x499 == 0) m.c698 = Constraint(expr=", "m.x508 - 0.940066550763924*m.b670 <= 0) m.c773 = Constraint(expr= m.x509 +", "m.b677 - m.b679 <= 0) m.c1096 = Constraint(expr= m.b678 -", "m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18", "0) m.c6 = Constraint(expr= - m.x12 - m.x15 + m.x18", "m.x53 - m.x296 - m.x299 == 0) m.c294 = Constraint(expr=", "m.c1277 = Constraint(expr= m.b773 + m.b774 <= 1) m.c1278 =", "- m.b642 + m.b643 - m.b733 <= 0) m.c1331 =", "m.c270 = Constraint(expr= m.x69 - m.x339 - m.x342 == 0)", "0) m.c345 = Constraint(expr= m.x360 == 0) m.c346 = Constraint(expr=", "= Constraint(expr= m.x593 + 9*m.b683 <= 9) m.c921 = Constraint(expr=", "0.78338879230327*m.b656 <= 0) m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <=", "= Constraint(expr= m.x555 - 15*m.b681 <= 0) m.c886 = Constraint(expr=", "m.x260 == 0) m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261", "Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c20 = Constraint(expr= m.x68 - m.x80 - m.x83 == 0)", "= Constraint(expr= 7*m.b738 + m.x828 == 0) m.c976 = Constraint(expr=", "<= 1) m.c1134 = Constraint(expr= m.b701 + m.b703 <= 1)", "- m.x31 - m.x34 == 0) m.c14 = Constraint(expr= m.x38", "m.b628) <= 0) m.c341 = Constraint(expr= m.x311 == 0) m.c342", "m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4", "+ m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999* m.b611) <= 0) m.c204", "m.x425 == 0) m.c414 = Constraint(expr= m.x426 == 0) m.c415", "m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119", "m.x334 == 0) m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <=", "<= 0) m.c1091 = Constraint(expr= m.b674 - m.b675 <= 0)", "== 0) m.c850 = Constraint(expr= m.x583 == 0) m.c851 =", "- m.x503 == 0) m.c729 = Constraint(expr= m.x165 - m.x501", "- 0.940066550763924*m.b668 <= 0) m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665 =", "- m.b631 <= 0) m.c1048 = Constraint(expr= m.b630 - m.b631", "= Constraint(expr= m.x491 == 0) m.c672 = Constraint(expr= m.x492 ==", "- 1.26558121681553*m.b637 <= 0) m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635", "m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c467 =", "m.x498 == 0) m.c697 = Constraint(expr= m.x499 == 0) m.c698", "0) m.c1423 = Constraint(expr= m.b610 - m.b622 >= 0) m.c1424", "Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x546 == 0) m.c799 = Constraint(expr= m.x184 - m.x544", "<= 0) m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 +", "1.18887736200171*m.b657 <= 0) m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <=", "m.b612 <= 0) m.c1029 = Constraint(expr= m.b611 - m.b613 <=", "+ m.b648 + m.b651 + m.b654 >= 0) m.c1402 =", "- m.b636 <= 0) m.c1053 = Constraint(expr= m.b635 - m.b637", "m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757", "m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171) m.c657 = Constraint(expr= m.x474 +", "0) m.c440 = Constraint(expr= m.x431 == 0) m.c441 = Constraint(expr=", "Constraint(expr= m.x312 + 15*m.b627 <= 15) m.c358 = Constraint(expr= m.x313", "+ m.x828 == 0) m.c976 = Constraint(expr= 6*m.b739 + m.x829", "- m.x477 - m.x480 == 0) m.c676 = Constraint(expr= m.x145", "m.b669 + m.b670 - m.b760 <= 0) m.c1358 = Constraint(expr=", "m.b611 + m.b629 >= 0) m.c1380 = Constraint(expr= - m.b612", "0) m.c784 = Constraint(expr= m.x538 - 15*m.b670 <= 0) m.c785", "- m.x184 - m.x187 == 0) m.c50 = Constraint(expr= m.x179", "+ m.b600 - m.b603 >= 0) m.c1405 = Constraint(expr= m.b598", "m.c290 = Constraint(expr= m.x347 == 0) m.c291 = Constraint(expr= m.x348", "m.x307 + 15*m.b625 <= 15) m.c332 = Constraint(expr= m.x350 -", "+ 0.999*m.b678)))*(0.001 + 0.999*m.b678) <= 0) m.c844 = Constraint(expr=(m.x580/(0.001 +", "Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348) m.c255 = Constraint(expr= m.x333", "m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590", "0) m.c25 = Constraint(expr= - m.x73 - m.x91 + m.x94", "Constraint(expr= m.b743 + m.b744 <= 1) m.c1218 = Constraint(expr= m.b743", "== 0) m.c697 = Constraint(expr= m.x499 == 0) m.c698 =", "- 0.480234946352917*m.b676 <= 0) m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674", "<= 0) m.c1313 = Constraint(expr= m.b626 - m.b716 <= 0)", "Constraint(expr= m.x492 == 0) m.c673 = Constraint(expr= m.x493 == 0)", "= Constraint(expr= m.x68 - m.x80 - m.x83 == 0) m.c21", "m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337", "Constraint(expr= m.b708 + m.b709 <= 1) m.c1149 = Constraint(expr= m.b707", "+ m.b628 - m.b718 <= 0) m.c1316 = Constraint(expr= m.b629", "Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c942 = Constraint(expr= 3*m.b705 + m.x795 == 0)", "= Constraint(expr= 6*m.b708 + m.x798 == 0) m.c946 = Constraint(expr=", "Constraint(expr= 6*m.b759 + m.x849 == 0) m.c997 = Constraint(expr= 3*m.b760", "m.x241 == 0) m.c113 = Constraint(expr= m.x263 == 0) m.c114", "m.c1417 = Constraint(expr= m.b604 - m.b616 >= 0) m.c1418 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 =", "m.b770 + m.b772 <= 1) m.c1273 = Constraint(expr= m.b770 +", "<= 0) m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327)", ">= 0) m.c1448 = Constraint(expr= m.b626 - m.b647 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 =", "m.b759 <= 1) m.c1248 = Constraint(expr= m.b758 + m.b760 <=", "Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 +", "== 0) m.c35 = Constraint(expr= m.x137 - m.x140 - m.x143", "<= 0) m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917)", "m.x95 - m.x98 == 0) m.c27 = Constraint(expr= m.x75 -", "m.c1016 = Constraint(expr= m.b599 - m.b600 <= 0) m.c1017 =", "+ m.b631 - m.b721 <= 0) m.c1319 = Constraint(expr= m.b632", "<= 0) m.c1342 = Constraint(expr= - m.b653 - m.b654 +", ">= 0) m.c1392 = Constraint(expr= - m.b609 + m.b621 +", "m.x432 == 0) m.c448 = Constraint(expr= m.x118 - m.x430 -", "== 0) m.c873 = Constraint(expr= m.x558 == 0) m.c874 =", "m.x50 == 0) m.c15 = Constraint(expr= m.x39 - m.x48 -", "m.x351 - 9*m.b624 <= 0) m.c334 = Constraint(expr= m.x352 -", "== 0) m.c702 = Constraint(expr= m.x162 - m.x495 - m.x498", "20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118 +", "m.c1350 = Constraint(expr= - m.b662 + m.b663 - m.b753 <=", "m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498", "0) m.c900 = Constraint(expr= m.x564 == 0) m.c901 = Constraint(expr=", "m.c932 = Constraint(expr= 10*m.b695 + m.x785 == 0) m.c933 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 =", "- m.b623 >= 0) m.c1425 = Constraint(expr= m.b609 - m.b624", "0.940066550763924*m.b662 <= 0) m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x365 == 0) m.c444 = Constraint(expr= m.x81 - m.x363", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849 =", "m.c1365 = Constraint(expr= - m.b677 + m.b678 - m.b768 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c785 = Constraint(expr= m.x539 + 15*m.b668 <= 15) m.c786 =", "m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0) m.c715 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b602 >= 0) m.c1404 = Constraint(expr= m.b597 + m.b600", "<= 0) m.c70 = Constraint(expr= m.x214 - 40*m.b598 <= 0)", "== 0) m.c968 = Constraint(expr= 2*m.b731 + m.x821 == 0)", "- m.b693 <= 0) m.c1291 = Constraint(expr= - m.b602 -", "<= 3.04984759446376) m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0)", "m.b647 - m.b648 + m.b649 - m.b739 <= 0) m.c1337", "<= 1) m.c1211 = Constraint(expr= m.b740 + m.b741 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x78 - m.x102 - m.x105 - m.x108 ==", "m.x843 == 0) m.c991 = Constraint(expr= 4*m.b754 + m.x844 ==", "Constraint(expr= m.x516 + 30*m.b669 <= 30) m.c781 = Constraint(expr= m.x517", "m.c591 = Constraint(expr= m.x462 == 0) m.c592 = Constraint(expr= m.x463", "m.x553 == 0) m.c827 = Constraint(expr= m.x200 - m.x572 -", "+ 0.999*m.b635)))*(0.001 + 0.999* m.b635) <= 0) m.c435 = Constraint(expr=(m.x429/(0.001", "= Constraint(expr= m.b669 - m.b681 >= 0) m.c1483 = Constraint(expr=", "m.c1463 = Constraint(expr= - m.b665 + m.b677 >= 0) m.c1464", "1.04900943706034*m.b647 <= 1.04900943706034) m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <=", "<= 0) m.c912 = Constraint(expr= m.x561 - 15*m.b684 <= 0)", "+ 1.11894339953103*m.b650 <= 1.11894339953103) m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651", "- m.x226 - m.x229 == 0) m.c68 = Constraint(expr= m.x212", "= Constraint(expr= m.x515 + 30*m.b668 <= 30) m.c780 = Constraint(expr=", "m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999* m.b601) <= 0) m.c83 =", "= Constraint(expr= m.b705 + m.b706 <= 1) m.c1143 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 =", "0) m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943) m.c717", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 =", "0) m.c1371 = Constraint(expr= - m.b683 + m.b684 - m.b774", "== 0) m.c66 = Constraint(expr= m.x12 - m.x225 - m.x228", "0) m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0) m.c859", "m.b666 - m.b756 <= 0) m.c1354 = Constraint(expr= - m.b665", "- m.b725 <= 0) m.c1323 = Constraint(expr= - m.b635 +", "<= 0) m.c1372 = Constraint(expr= - m.b683 - m.b684 +", "= Constraint(expr= m.b615 - m.b633 >= 0) m.c1435 = Constraint(expr=", "m.x27 - m.x237 - m.x240 == 0) m.c118 = Constraint(expr=", "m.c945 = Constraint(expr= 6*m.b708 + m.x798 == 0) m.c946 =", "+ m.b765 <= 1) m.c1260 = Constraint(expr= m.b764 + m.b766", "= Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943) m.c808 = Constraint(expr=", "m.c796 = Constraint(expr= m.x571 == 0) m.c797 = Constraint(expr= m.x182", "Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0) m.c274", "m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5) m.c894 =", "+ 0.999*m.b617)))*(0.001 + 0.999*m.b617) <= 0) m.c258 = Constraint(expr=(m.x339/(0.001 +", "0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999* m.b635) <= 0)", "0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678) <=", "0.572481933717686*m.b637 <= 0) m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <=", "Constraint(expr= m.b598 + m.b601 - m.b607 >= 0) m.c1409 =", "Constraint(expr= m.x132 - m.x459 - m.x462 == 0) m.c598 =", "- 0.6*m.x561 + m.x591 == 0) m.c898 = Constraint(expr= -", "m.b773 + m.b775 <= 1) m.c1282 = Constraint(expr= m.b774 +", "+ 3.04984759446376*m.b655 <= 3.04984759446376) m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653", "<= 1) m.c1117 = Constraint(expr= m.b692 + m.b693 <= 1)", "m.c318 = Constraint(expr= m.x354 == 0) m.c319 = Constraint(expr= m.x355", "25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119 +", "6*m.b700 - 7*m.b701 - 7*m.b702 - 4*m.b703 - 4*m.b704 -", "m.x293 == 0) m.c234 = Constraint(expr= m.x294 == 0) m.c235", "33.5*m.b640 <= 33.5) m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 =", "- m.b612 >= 0) m.c1414 = Constraint(expr= m.b604 - m.b613", "1.83548069293539) m.c395 = Constraint(expr= m.x374 - 20*m.b629 <= 0) m.c396", "= Constraint(expr= m.x347 == 0) m.c291 = Constraint(expr= m.x348 ==", "Constraint(expr= - 0.75*m.x494 + m.x518 == 0) m.c693 = Constraint(expr=", "8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744 - m.b745 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x219 - m.x222 == 0) m.c91 = Constraint(expr= m.x10 -", "Constraint(expr= 3*m.b765 + m.x855 == 0) m.c1003 = Constraint(expr= 9*m.b766", "Constraint(expr= m.b602 - m.b603 <= 0) m.c1020 = Constraint(expr= m.b602", "m.b736 <= 0) m.c1334 = Constraint(expr= m.b647 - m.b737 <=", "= Constraint(expr= m.b705 + m.b706 <= 1) m.c1145 = Constraint(expr=", "m.c534 = Constraint(expr= m.x396 == 0) m.c535 = Constraint(expr= m.x397", "+ 15*m.b608 <= 15) m.c201 = Constraint(expr= m.x282 + 15*m.b609", "m.b652) <= 0) m.c587 = Constraint(expr= m.x407 == 0) m.c588", "Constraint(expr= m.b735 + m.b736 <= 1) m.c1203 = Constraint(expr= m.b734", "<= 1) m.c1247 = Constraint(expr= m.b758 + m.b759 <= 1)", "m.b624 - m.b645 >= 0) m.c1447 = Constraint(expr= m.b625 -", "m.x250 + m.x280 == 0) m.c164 = Constraint(expr= - 0.5*m.x254", "m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121", "= Constraint(expr= m.x201 - m.x573 - m.x576 == 0) m.c829", "m.x30 - m.x243 - m.x246 == 0) m.c145 = Constraint(expr=", "m.x371 == 0) m.c477 = Constraint(expr= m.x84 - m.x369 -", "15*m.b670 <= 0) m.c785 = Constraint(expr= m.x539 + 15*m.b668 <=", "m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379", "Constraint(expr= m.x294 == 0) m.c235 = Constraint(expr= m.x295 == 0)", "Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171) m.c637 = Constraint(expr= m.x469", "- m.x220 - m.x223 == 0) m.c92 = Constraint(expr= m.x14", "m.c880 = Constraint(expr= m.x190 - m.x556 - m.x559 == 0)", "- 1.26558121681553*m.b638 <= 0) m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639", "- m.b727 <= 0) m.c1325 = Constraint(expr= m.b638 - m.b728", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 =", "m.b753 <= 1) m.c1236 = Constraint(expr= m.b752 + m.b754 <=", "m.x586 - m.x589 == 0) m.c884 = Constraint(expr= m.x554 -", "- 7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768", "m.x67 - m.x328 - m.x334 == 0) m.c245 = Constraint(expr=", "m.c1070 = Constraint(expr= m.b653 - m.b654 <= 0) m.c1071 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 =", "m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648", "= Constraint(expr= m.b720 + m.b721 <= 1) m.c1175 = Constraint(expr=", "= Constraint(expr= m.b743 + m.b745 <= 1) m.c1222 = Constraint(expr=", "6*m.b708 - 7*m.b709 - 2*m.b710 - 5*m.b711 - 2*m.b712 -", "m.x538 - m.x541 == 0) m.c770 = Constraint(expr= m.x506 -", "= Constraint(expr= m.x492 == 0) m.c673 = Constraint(expr= m.x493 ==", "0) m.c547 = Constraint(expr= m.x394 - 9*m.b646 <= 0) m.c548", "0) m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0) m.c635", "Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= - m.b621 + m.b639 >= 0) m.c1396 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 =", "m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999* m.b652) <= 0) m.c587 =", "Constraint(expr= - m.b680 - m.b681 + m.b682 - m.b772 <=", "+ m.b625 + m.b628 >= 0) m.c1394 = Constraint(expr= -", "- m.x354 == 0) m.c325 = Constraint(expr= m.x76 - m.x352", "m.c91 = Constraint(expr= m.x10 - m.x220 - m.x223 == 0)", "m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674", ">= 0) m.c1389 = Constraint(expr= - m.b618 + m.b636 +", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 =", "<= 0) m.c1347 = Constraint(expr= - m.b659 + m.b660 -", "= Constraint(expr= m.b632 - m.b634 <= 0) m.c1051 = Constraint(expr=", "m.b692 <= 0) m.c1290 = Constraint(expr= - m.b602 + m.b603", "m.x486 == 0) m.c646 = Constraint(expr= m.x487 == 0) m.c647", "m.c643 = Constraint(expr= m.x475 == 0) m.c644 = Constraint(expr= m.x485", "m.c1074 = Constraint(expr= m.b656 - m.b658 <= 0) m.c1075 =", "+ m.b618 - m.b708 <= 0) m.c1306 = Constraint(expr= -", "0) m.c1010 = Constraint(expr= 8*m.b773 + m.x863 == 0) m.c1011", "Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c1428 = Constraint(expr= m.b609 - m.b627 >= 0) m.c1429", "m.b657 + m.b658 - m.b748 <= 0) m.c1346 = Constraint(expr=", "== 0) m.c353 = Constraint(expr= m.x308 - 15*m.b626 <= 0)", "+ 0.999*m.b648)))*(0.001 + 0.999* m.b648) <= 0) m.c559 = Constraint(expr=(m.x454/(0.001", "by GAMS Convert at 01/15/21 11:37:33 # # Equation counts", "m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178", "m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563", "0) m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0) m.c104", "m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240", "Constraint(expr= m.b602 - m.b692 <= 0) m.c1290 = Constraint(expr= -", "+ 0.480234946352917*m.b676 <= 0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) -", "10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702 - 4*m.b703 -", "m.x567 - 0.666992981045719*m.b672 <= 0) m.c811 = Constraint(expr= m.x568 -", "+ 0.994083415506506*m.b667 <= 0.994083415506506) m.c746 = Constraint(expr= - m.x506 +", "+ m.b625 - m.b715 <= 0) m.c1313 = Constraint(expr= m.b626", "0) m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279 == 0)", "m.x512 - 30*m.b668 <= 0) m.c777 = Constraint(expr= m.x513 -", "- m.b633 <= 0) m.c1050 = Constraint(expr= m.b632 - m.b634", "m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432", "0) m.c724 = Constraint(expr= m.x505 == 0) m.c725 = Constraint(expr=", ">= 0) m.c1484 = Constraint(expr= m.b668 - m.b683 >= 0)", "<= 0) m.c1353 = Constraint(expr= - m.b665 + m.b666 -", "m.c471 = Constraint(expr= m.x384 == 0) m.c472 = Constraint(expr= m.x385", "0) m.c754 = Constraint(expr= m.x511 == 0) m.c755 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554", "m.x463 == 0) m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <=", "m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601", "= Constraint(expr= m.x295 == 0) m.c236 = Constraint(expr= m.x332 ==", "m.c397 = Constraint(expr= m.x376 - 20*m.b631 <= 0) m.c398 =", "0.940066550763924*m.b669 <= 0.940066550763924) m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <=", "== 0) m.c970 = Constraint(expr= 2*m.b733 + m.x823 == 0)", "Constraint(expr= 2*m.b755 + m.x845 == 0) m.c993 = Constraint(expr= 3*m.b756", "- 3*m.b760 - 4*m.b761 - 8*m.b762 - 7*m.b763 - 7*m.b764", "m.b631 - m.b721 <= 0) m.c1319 = Constraint(expr= m.b632 -", "0) m.c778 = Constraint(expr= m.x514 - 30*m.b670 <= 0) m.c779", "3.34221486003388*m.b611 <= 0) m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <=", "m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353) m.c276 = Constraint(expr= m.x276 +", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829 =", "- 0.5*m.x254 + m.x278 == 0) m.c165 = Constraint(expr= -", "0) m.c552 = Constraint(expr= m.x447 - 9*m.b645 <= 0) m.c553", "+ 9*m.b623 <= 9) m.c336 = Constraint(expr= m.x354 + 9*m.b624", "m.x573 - m.x576 == 0) m.c829 = Constraint(expr= m.x202 -", "- m.x585 - m.x588 == 0) m.c883 = Constraint(expr= m.x208", "m.b627 >= 0) m.c1393 = Constraint(expr= - m.b610 + m.b622", "- m.x192 - m.x195 == 0) m.c52 = Constraint(expr= m.x181", "15*m.b626 <= 15) m.c357 = Constraint(expr= m.x312 + 15*m.b627 <=", "- m.x557 == 0) m.c879 = Constraint(expr= m.x189 - m.x555", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777 =", "Constraint(expr= m.x272 == 0) m.c141 = Constraint(expr= m.x273 == 0)", "4*m.b744 - m.b745 - 2*m.b746 - 5*m.b747 - 2*m.b748 -", "Constraint(expr= - m.b674 - m.b675 + m.b676 - m.b766 <=", "m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326", "- m.x244 - m.x247 == 0) m.c146 = Constraint(expr= m.x41", "m.c1198 = Constraint(expr= m.b732 + m.b733 <= 1) m.c1199 =", "m.x297 - 15*m.b621 <= 0) m.c301 = Constraint(expr= m.x298 -", "m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393", "- 0.78338879230327*m.b657 <= 0) m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658", "15*m.b681 <= 15) m.c889 = Constraint(expr= m.x559 + 15*m.b682 <=", "= Constraint(expr= m.x559 + 15*m.b682 <= 15) m.c890 = Constraint(expr=", "m.b755 + m.b756 <= 1) m.c1244 = Constraint(expr= m.b756 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 =", "<= 0) m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0)", "Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0) m.c805 = Constraint(expr= m.x544", "0) m.c210 = Constraint(expr= m.x321 == 0) m.c211 = Constraint(expr=", ">= 0) m.c1465 = Constraint(expr= - m.b667 + m.b679 >=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 =", "= Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0) m.c859 = Constraint(expr=", "- m.b618 + m.b619 - m.b709 <= 0) m.c1307 =", "m.x523 == 0) m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <=", "m.b675 + m.b676 - m.b766 <= 0) m.c1364 = Constraint(expr=", "- m.b750 <= 0) m.c1348 = Constraint(expr= - m.b659 -", "<= 1) m.c1269 = Constraint(expr= m.b767 + m.b769 <= 1)", "<= 1) m.c1176 = Constraint(expr= m.b722 + m.b724 <= 1)", "Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0) m.c804 = Constraint(expr= m.x543", "Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c60 = Constraint(expr= m.x228 == 0) m.c61 =", "- m.x493 == 0) m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659", "m.x139 == 0) m.c35 = Constraint(expr= m.x137 - m.x140 -", "13.5) m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5) m.c310", "+ 9*m.b641 <= 9) m.c528 = Constraint(expr= m.x444 + 9*m.b642", "= Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0) m.c839 = Constraint(expr=", "- 0.940066550763924*m.b667 <= 0) m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665", "= Constraint(expr= m.b701 + m.b702 <= 1) m.c1136 = Constraint(expr=", "m.c112 = Constraint(expr= m.x241 == 0) m.c113 = Constraint(expr= m.x263", "Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= - m.x508 + m.x538 == 0) m.c749 =", "== 0) m.c1002 = Constraint(expr= 3*m.b765 + m.x855 == 0)", "+ m.b702 <= 1) m.c1134 = Constraint(expr= m.b701 + m.b703", "= Constraint(expr= m.b749 + m.b751 <= 1) m.c1234 = Constraint(expr=", "m.b641 + m.b642 - m.b732 <= 0) m.c1330 = Constraint(expr=", "m.c3 = Constraint(expr= m.x3 - m.x6 - m.x9 == 0)", "m.c901 = Constraint(expr= m.x565 == 0) m.c902 = Constraint(expr= m.x593", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 =", "= Constraint(expr= m.b770 + m.b771 <= 1) m.c1274 = Constraint(expr=", "1) m.c1215 = Constraint(expr= m.b740 + m.b742 <= 1) m.c1216", "= Constraint(expr= m.x80 - m.x362 - m.x365 == 0) m.c444", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 =", "m.c598 = Constraint(expr= m.x133 - m.x460 - m.x463 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x188 - m.x554 - m.x557 == 0) m.c879", "== 1) m.c1376 = Constraint(expr= - m.b602 + m.b611 +", "0) m.c51 = Constraint(expr= m.x180 - m.x189 - m.x192 -", "Constraint(expr= m.x9 - m.x219 - m.x222 == 0) m.c91 =", "m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639", "Constraint(expr= m.x143 - m.x476 - m.x479 == 0) m.c675 =", "m.x13 - m.x226 - m.x229 == 0) m.c68 = Constraint(expr=", "0) m.c907 = Constraint(expr= m.x193 - m.x562 - m.x565 ==", "- m.x398 - m.x401 == 0) m.c567 = Constraint(expr= m.x102", "m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750", "0.999*m.b649)))*(0.001 + 0.999* m.b649) <= 0) m.c560 = Constraint(expr= m.x401", "4.45628648004517*m.b607 <= 4.45628648004517) m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1150 = Constraint(expr= m.b708 + m.b709 <= 1) m.c1151 =", "- m.b640 >= 0) m.c1442 = Constraint(expr= m.b623 - m.b641", "+ 0.999* m.b667) <= 0) m.c722 = Constraint(expr= m.x503 ==", "Constraint(expr= m.b698 + m.b700 <= 1) m.c1129 = Constraint(expr= m.b698", "m.c651 = Constraint(expr= m.x147 - m.x483 - m.x486 == 0)", "1) m.c1108 = Constraint(expr= m.b687 + m.b688 <= 1) m.c1109", "0) m.c1102 = Constraint(expr= m.b684 - m.b685 <= 0) m.c1103", "Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388) m.c224 = Constraint(expr= m.x314", "m.x380 - m.x383 == 0) m.c480 = Constraint(expr= m.x93 -", "m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662", "m.b690 <= 1) m.c1112 = Constraint(expr= m.b690 + m.b691 <=", "m.c820 = Constraint(expr= m.x553 == 0) m.c821 = Constraint(expr= m.x575", "Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b717 + m.b718 <= 1) m.c1169 = Constraint(expr= m.b719", "Constraint(expr= m.b743 + m.b744 <= 1) m.c1220 = Constraint(expr= m.b744", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854 =", "m.b670 - m.b682 >= 0) m.c1484 = Constraint(expr= m.b668 -", "m.x348 == 0) m.c292 = Constraint(expr= m.x349 == 0) m.c293", "= Constraint(expr= m.x109 - m.x412 - m.x415 == 0) m.c623", "= Constraint(expr= m.x51 - m.x291 - m.x294 == 0) m.c241", "== 0) m.c115 = Constraint(expr= m.x265 == 0) m.c116 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x59 == 0) m.c18 = Constraint(expr= m.x45 - m.x54", "m.b770 + m.b772 <= 1) m.c1276 = Constraint(expr= m.b771 +", "== 0) m.c234 = Constraint(expr= m.x294 == 0) m.c235 =", "== 0) m.c588 = Constraint(expr= m.x408 == 0) m.c589 =", "- m.x421 == 0) m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629", "+ m.b700 <= 1) m.c1133 = Constraint(expr= m.b701 + m.b702", "+ m.b729 <= 1) m.c1188 = Constraint(expr= m.b728 + m.b730", "Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353) m.c160 = Constraint(expr= m.x274", "m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686) m.c460 = Constraint(expr= m.x433 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 =", "m.c484 = Constraint(expr= m.x121 - m.x436 - m.x439 == 0)", "m.b669 - m.b759 <= 0) m.c1357 = Constraint(expr= - m.b668", "- m.b617 >= 0) m.c1419 = Constraint(expr= m.b606 - m.b618", "== 0) m.c86 = Constraint(expr= m.x233 == 0) m.c87 =", "= Constraint(expr= - m.b612 + m.b630 >= 0) m.c1381 =", "# x b i s1s s2s sc si # Total", "== 0) m.c178 = Constraint(expr= m.x34 - m.x250 - m.x253", "- m.x248 + m.x278 == 0) m.c162 = Constraint(expr= -", "m.b659 - m.b661 <= 0) m.c1078 = Constraint(expr= m.b660 -", "20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)", "1.11894339953103*m.b652 <= 1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1", "m.x19 - m.x22 - m.x25 == 0) m.c11 = Constraint(expr=", "m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282", "- m.x594 == 0) m.c910 = Constraint(expr= m.x211 - m.x592", "m.b697 <= 1) m.c1126 = Constraint(expr= m.b696 + m.b697 <=", "m.x310 - 15*m.b628 <= 0) m.c356 = Constraint(expr= m.x311 +", "0) m.c927 = Constraint(expr= 7*m.b690 + m.x780 == 0) m.c928", "m.x54 - m.x57 - m.x60 == 0) m.c19 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 =", "Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 +", "15*m.b684 <= 15) m.c916 = Constraint(expr= m.x565 + 15*m.b685 <=", "- m.b610 <= 0) m.c1028 = Constraint(expr= m.b611 - m.b612", "1.04900943706034*m.b647 <= 0) m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <=", "20*m.b629 <= 20) m.c405 = Constraint(expr= m.x420 + 20*m.b630 <=", "- m.b647 >= 0) m.c1449 = Constraint(expr= m.b627 - m.b648", "Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c169 = Constraint(expr= m.x253 == 0) m.c170 = Constraint(expr= m.x257", "0.999*m.b606)))*(0.001 + 0.999* m.b606) <= 0) m.c136 = Constraint(expr=(m.x268/(0.001 +", "0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999* m.b627)", "= Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806 =", "m.c1046 = Constraint(expr= m.b629 - m.b630 <= 0) m.c1047 =", "= Constraint(expr= - m.b665 - m.b666 + m.b667 - m.b757", "0.999*m.b615)))*(0.001 + 0.999* m.b615) <= 0) m.c232 = Constraint(expr=(m.x328/(0.001 +", "+ m.b718 <= 1) m.c1165 = Constraint(expr= m.b716 + m.b717", "2.30162356062425*m.b638 <= 2.30162356062425) m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <=", "= Constraint(expr= m.x379 + 20*m.b631 <= 20) m.c401 = Constraint(expr=", ">= 0) m.c1438 = Constraint(expr= m.b619 - m.b637 >= 0)", "- m.x590 - m.x593 == 0) m.c909 = Constraint(expr= m.x210", "- m.b652 <= 0) m.c1069 = Constraint(expr= m.b651 - m.b652", "+ 0.999*m.b616)))*(0.001 + 0.999* m.b616) <= 0) m.c233 = Constraint(expr=", "- m.b668 - m.b669 + m.b670 - m.b760 <= 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807 =", "0) m.c959 = Constraint(expr= 3*m.b722 + m.x812 == 0) m.c960", "m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649 =", "<= 1) m.c1168 = Constraint(expr= m.b717 + m.b718 <= 1)", "Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924) m.c713 = Constraint(expr= m.x518", "+ m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617) <= 0) m.c258 =", "<= 40) m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0)", "0) m.c850 = Constraint(expr= m.x583 == 0) m.c851 = Constraint(expr=", "0) m.c510 = Constraint(expr= m.x444 == 0) m.c511 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1115 = Constraint(expr= m.b692 + m.b693 <= 1) m.c1116", "Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x582 == 0) m.c850 = Constraint(expr= m.x583 ==", "= Constraint(expr= m.x37 - m.x256 - m.x259 == 0) m.c182", ">= 0) m.c1416 = Constraint(expr= m.b603 - m.b615 >= 0)", "- m.b694 <= 0) m.c1292 = Constraint(expr= m.b605 - m.b695", "= Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171) m.c658 = Constraint(expr=", "- m.x369 - m.x372 == 0) m.c478 = Constraint(expr= m.x85", "- m.x98 == 0) m.c27 = Constraint(expr= m.x75 - m.x96", "- m.x313 == 0) m.c350 = Constraint(expr= m.x77 - m.x356", "2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774 -", "0) m.c1312 = Constraint(expr= - m.b623 - m.b624 + m.b625", "Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437", "0.78338879230327*m.b658 <= 0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1", "Constraint(expr= m.x79 - m.x103 - m.x106 - m.x109 == 0)", "m.b661 <= 0) m.c1079 = Constraint(expr= m.b662 - m.b663 <=", "- m.x338 - m.x341 == 0) m.c270 = Constraint(expr= m.x69", "Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0)", "- m.x472 - m.x475 == 0) m.c650 = Constraint(expr= m.x146", "Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0)", "== 0) m.c263 = Constraint(expr= m.x341 == 0) m.c264 =", "m.x228 == 0) m.c61 = Constraint(expr= m.x229 == 0) m.c62", "0) m.c1391 = Constraint(expr= - m.b608 + m.b620 + m.b623", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 =", "= Constraint(expr= m.x41 - m.x269 - m.x275 == 0) m.c267", "= Constraint(expr= 6*m.b769 + m.x859 == 0) m.c1007 = Constraint(expr=", "m.c1045 = Constraint(expr= m.b627 - m.b628 <= 0) m.c1046 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 =", "Constraint(expr= m.x206 - m.x584 - m.x587 == 0) m.c882 =", "- 0.6*m.x560 + m.x590 == 0) m.c897 = Constraint(expr= -", "== 1) m.c1375 = Constraint(expr= m.b598 + m.b601 == 1)", "m.b756 <= 1) m.c1242 = Constraint(expr= m.b755 + m.b757 <=", "0) m.c1380 = Constraint(expr= - m.b612 + m.b630 >= 0)", "m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501", "0) m.c270 = Constraint(expr= m.x69 - m.x339 - m.x342 ==", "- 4*m.b744 - m.b745 - 2*m.b746 - 5*m.b747 - 2*m.b748", "Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)", "1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999* m.b659) <= 0)", "= Constraint(expr= m.x531 == 0) m.c727 = Constraint(expr= m.x532 ==", "Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719) m.c814 = Constraint(expr= m.x571", "- m.x284 - m.x287 == 0) m.c213 = Constraint(expr= m.x48", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 =", "m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627", "Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0) m.c131 = Constraint(expr= m.x263", "280*m.x203 + 400*m.x204 + 430*m.x205 + 290*m.x206 + 300*m.x207 +", "Constraint(expr= m.x115 - m.x424 - m.x427 == 0) m.c422 =", "m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677)", "Constraint(expr= m.b683 - m.b773 <= 0) m.c1371 = Constraint(expr= -", "m.c1065 = Constraint(expr= m.b647 - m.b649 <= 0) m.c1066 =", "Constraint(expr= m.b732 + m.b733 <= 1) m.c1199 = Constraint(expr= m.b734", "= Constraint(expr= m.x421 + 20*m.b631 <= 20) m.c407 = Constraint(expr=(m.x422/(0.001", "== 0) m.c753 = Constraint(expr= m.x510 == 0) m.c754 =", "<= 1) m.c1127 = Constraint(expr= m.b698 + m.b699 <= 1)", "1) m.c1278 = Constraint(expr= m.b773 + m.b775 <= 1) m.c1279", "== 0) m.c299 = Constraint(expr= m.x296 - 15*m.b620 <= 0)", "= Constraint(expr= m.x298 - 15*m.b622 <= 0) m.c302 = Constraint(expr=", "Constraint(expr= m.x360 == 0) m.c346 = Constraint(expr= m.x361 == 0)", "+ m.x849 == 0) m.c997 = Constraint(expr= 3*m.b760 + m.x850", "m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999* m.b606) <= 0) m.c136 =", "Constraint(expr= m.x49 - m.x286 - m.x289 == 0) m.c215 =", "= Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553) m.c282 = Constraint(expr=", "- 0.842233385663186*m.b632 <= 0) m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633", "= Constraint(expr= m.x69 - m.x81 - m.x84 == 0) m.c22", "- m.x32 == 0) m.c12 = Constraint(expr= m.x24 - m.x27", "= Constraint(expr= 4*m.b754 + m.x844 == 0) m.c992 = Constraint(expr=", "Constraint(expr= m.b659 - m.b660 <= 0) m.c1077 = Constraint(expr= m.b659", "0) m.c484 = Constraint(expr= m.x121 - m.x436 - m.x439 ==", "<= 0) m.c1337 = Constraint(expr= m.b650 - m.b740 <= 0)", "Constraint(expr= - 0.5*m.x512 + m.x536 == 0) m.c750 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749", "= Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686) m.c460 = Constraint(expr=", "m.x522 == 0) m.c706 = Constraint(expr= m.x175 - m.x520 -", "== 0) m.c798 = Constraint(expr= m.x183 - m.x543 - m.x546", "0) m.c1001 = Constraint(expr= 7*m.b764 + m.x854 == 0) m.c1002", "+ 40*m.x110 + 30*m.x111 + 15*m.x112 + 15*m.x113 + 20*m.x114", "1.26558121681553*m.b618 <= 0) m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <=", "+ m.x417 == 0) m.c370 = Constraint(expr= - m.x376 +", "m.x320 == 0) m.c210 = Constraint(expr= m.x321 == 0) m.c211", "m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709", "+ m.x448 == 0) m.c533 = Constraint(expr= m.x395 == 0)", "m.c1191 = Constraint(expr= m.b728 + m.b730 <= 1) m.c1192 =", "m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388) m.c222 = Constraint(expr= m.x288 +", "Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0) m.c736 = Constraint(expr= m.x502", "1) m.c1173 = Constraint(expr= m.b719 + m.b721 <= 1) m.c1174", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 =", "m.x277 == 0) m.c263 = Constraint(expr= m.x341 == 0) m.c264", "m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388) m.c250 = Constraint(expr= m.x295 +", "m.b732 <= 0) m.c1330 = Constraint(expr= - m.b641 - m.b642", "Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= - m.b638 - m.b639 + m.b640 - m.b730 <=", "= Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0) m.c157 = Constraint(expr=", "0) m.c1097 = Constraint(expr= m.b680 - m.b681 <= 0) m.c1098", "m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135", "Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)", "1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999* m.b660) <= 0)", "m.c1167 = Constraint(expr= m.b716 + m.b718 <= 1) m.c1168 =", "m.b765 <= 1) m.c1260 = Constraint(expr= m.b764 + m.b766 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x179 - m.x188 - m.x191 - m.x194 == 0) m.c51", "0) m.c935 = Constraint(expr= 6*m.b698 + m.x788 == 0) m.c936", "- 15*m.b668 <= 0) m.c783 = Constraint(expr= m.x537 - 15*m.b669", "m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825", "1.83548069293539) m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539) m.c230", "m.x447 - 9*m.b645 <= 0) m.c553 = Constraint(expr= m.x448 -", "m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451", "- 0.705049913072943*m.b663 <= 0) m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664", "Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5) m.c496 =", "m.c1380 = Constraint(expr= - m.b612 + m.b630 >= 0) m.c1381", "= Constraint(expr= m.x26 - m.x236 - m.x239 == 0) m.c117", "Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c110 = Constraint(expr= m.x239 == 0) m.c111 = Constraint(expr=", "- m.x434 - m.x437 == 0) m.c483 = Constraint(expr= m.x120", "Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c15 = Constraint(expr= m.x39 - m.x48 - m.x51 == 0)", "m.b735 <= 1) m.c1202 = Constraint(expr= m.b735 + m.b736 <=", "m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0) m.c838 =", "= Constraint(expr= m.b626 - m.b647 >= 0) m.c1449 = Constraint(expr=", "m.x557 == 0) m.c879 = Constraint(expr= m.x189 - m.x555 -", "m.b763 <= 1) m.c1255 = Constraint(expr= m.b761 + m.b762 <=", "1) m.c1120 = Constraint(expr= m.b693 + m.b694 <= 1) m.c1121", "+ m.b600 - m.b690 <= 0) m.c1288 = Constraint(expr= -", "m.x583 == 0) m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <=", "m.x453 - 1.04900943706034*m.b648 <= 0) m.c580 = Constraint(expr= m.x454 -", "m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376) m.c632 =", "Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 +", "m.x255 - m.x258 == 0) m.c181 = Constraint(expr= m.x37 -", "- 1.26558121681553*m.b636 <= 0) m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637", "model = m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3 =", "- m.b707 <= 0) m.c1305 = Constraint(expr= - m.b617 +", "Constraint(expr= m.b602 - m.b614 >= 0) m.c1416 = Constraint(expr= m.b603", "m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999* m.b665) <= 0) m.c720 =", "<= 0) m.c1090 = Constraint(expr= m.b672 - m.b673 <= 0)", "= Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0) m.c601 = Constraint(expr=", "m.x534 == 0) m.c847 = Constraint(expr= m.x535 == 0) m.c848", "0) m.c1018 = Constraint(expr= m.b600 - m.b601 <= 0) m.c1019", "Constraint(expr= m.b686 + m.b687 <= 1) m.c1106 = Constraint(expr= m.b687", "<= 0) m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171)", "m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376) m.c631 =", "0.940066550763924*m.b668 <= 0.940066550763924) m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <=", "0) m.c924 = Constraint(expr= 4*m.b687 + m.x777 == 0) m.c925", "0) m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001", "- m.x496 - m.x499 == 0) m.c704 = Constraint(expr= m.x173", "= Constraint(expr= - m.b605 - m.b606 + m.b607 - m.b697", "- m.b700 <= 0) m.c1298 = Constraint(expr= m.b611 - m.b701", "+ 30*m.b608 <= 30) m.c195 = Constraint(expr= m.x258 + 30*m.b609", "+ m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999* m.b661) <= 0) m.c668", "m.b737 + m.b739 <= 1) m.c1210 = Constraint(expr= m.b738 +", "m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647", "m.c1424 = Constraint(expr= m.b608 - m.b623 >= 0) m.c1425 =", "m.x148 - m.x151 + m.x154 == 0) m.c41 = Constraint(expr=", "m.x256 - m.x259 == 0) m.c182 = Constraint(expr= m.x44 -", "- 15*m.b623 <= 0) m.c327 = Constraint(expr= m.x303 - 15*m.b624", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 =", "0) m.c1420 = Constraint(expr= m.b607 - m.b619 >= 0) m.c1421", "m.x58 - m.x304 - m.x307 == 0) m.c323 = Constraint(expr=", "<= 0) m.c1358 = Constraint(expr= m.b671 - m.b761 <= 0)", "== 0) m.c26 = Constraint(expr= m.x74 - m.x95 - m.x98", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x247 == 0) m.c140 = Constraint(expr= m.x272 ==", "0.705049913072943*m.b671 <= 0.705049913072943) m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <=", "= Constraint(expr= m.x565 + 15*m.b685 <= 15) m.c917 = Constraint(expr=", "+ 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999*", "<= 0) m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 +", "15) m.c917 = Constraint(expr= m.x590 - 9*m.b683 <= 0) m.c918", "m.c919 = Constraint(expr= m.x592 - 9*m.b685 <= 0) m.c920 =", "== 0) m.c217 = Constraint(expr= m.x64 - m.x316 - m.x322", "m.b716 + m.b718 <= 1) m.c1168 = Constraint(expr= m.b717 +", "Constraint(expr= m.x146 - m.x482 - m.x485 == 0) m.c651 =", "= Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672) m.c867 = Constraint(expr=", "+ 1.26558121681553*m.b637 <= 1.26558121681553) m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635", "0) m.c880 = Constraint(expr= m.x190 - m.x556 - m.x559 ==", "- m.x351 - m.x354 == 0) m.c325 = Constraint(expr= m.x76", "= Constraint(expr= - m.b626 + m.b627 - m.b717 <= 0)", "m.c995 = Constraint(expr= 10*m.b758 + m.x848 == 0) m.c996 =", "m.c476 = Constraint(expr= m.x83 - m.x368 - m.x371 == 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789 =", "= Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001", "= Constraint(expr= m.x396 == 0) m.c535 = Constraint(expr= m.x397 ==", "m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103) m.c609 = Constraint(expr= m.x462 +", "1.26558121681553) m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553) m.c490", "m.b733 <= 0) m.c1331 = Constraint(expr= m.b644 - m.b734 <=", "m.b606 - m.b607 <= 0) m.c1025 = Constraint(expr= m.b608 -", "13.5*m.b620 <= 0) m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <=", "= Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0) m.c252 = Constraint(expr=", "m.x305 == 0) m.c315 = Constraint(expr= m.x306 == 0) m.c316", "m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50", "m.x502 - 0.940066550763924*m.b667 <= 0) m.c737 = Constraint(expr= m.x503 +", "m.b607 + m.b619 >= 0) m.c1388 = Constraint(expr= - m.b617", "m.x135 - m.x465 - m.x468 == 0) m.c625 = Constraint(expr=", "0.999* m.b614) <= 0) m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165", "- m.x218 - m.x221 == 0) m.c90 = Constraint(expr= m.x9", "m.x416 - m.x419 == 0) m.c387 = Constraint(expr= m.x111 -", "- m.x560 - m.x563 == 0) m.c906 = Constraint(expr= m.x192", "m.x798 == 0) m.c946 = Constraint(expr= 7*m.b709 + m.x799 ==", "m.x531 == 0) m.c727 = Constraint(expr= m.x532 == 0) m.c728", "Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 +", "= Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001", "- 15*m.b628 <= 0) m.c356 = Constraint(expr= m.x311 + 15*m.b626", "= Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001", "= Constraint(expr= m.b662 - m.b752 <= 0) m.c1350 = Constraint(expr=", "m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531", "<= 1) m.c1264 = Constraint(expr= m.b765 + m.b766 <= 1)", "Constraint(expr= - m.b614 + m.b615 - m.b705 <= 0) m.c1303", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 =", "m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917) m.c842 =", "m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238", ">= 0) m.c1443 = Constraint(expr= m.b624 - m.b642 >= 0)", "Constraint(expr= m.x30 - m.x243 - m.x246 == 0) m.c145 =", "== 0) m.c883 = Constraint(expr= m.x208 - m.x586 - m.x589", "m.b616 <= 0) m.c1033 = Constraint(expr= m.b615 - m.b616 <=", "0) m.c932 = Constraint(expr= 10*m.b695 + m.x785 == 0) m.c933", "m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0) m.c186 =", "m.c784 = Constraint(expr= m.x538 - 15*m.b670 <= 0) m.c785 =", "m.c1000 = Constraint(expr= 7*m.b763 + m.x853 == 0) m.c1001 =", "- log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632) <= 0)", "<= 0) m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0)", "Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001 +", "+ m.b712 <= 1) m.c1156 = Constraint(expr= m.b711 + m.b712", "m.b690 <= 0) m.c1288 = Constraint(expr= - m.b599 - m.b600", "3.04984759446376*m.b652 <= 3.04984759446376) m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <=", "m.c795 = Constraint(expr= m.x570 == 0) m.c796 = Constraint(expr= m.x571", "= Constraint(expr= m.x447 - 9*m.b645 <= 0) m.c553 = Constraint(expr=", "Constraint(expr= m.x116 - m.x428 - m.x431 == 0) m.c447 =", "m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376) m.c363 =", "Constraint(expr= m.b650 - m.b652 <= 0) m.c1069 = Constraint(expr= m.b651", "<= 0.705049913072943) m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943)", "Constraint(expr= m.x65 - m.x329 - m.x335 == 0) m.c417 =", "m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506) m.c862 = Constraint(expr= m.x535 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c906 = Constraint(expr= m.x192 - m.x561 - m.x564", ">= 0) m.c1449 = Constraint(expr= m.b627 - m.b648 >= 0)", "= Constraint(expr= m.b684 - m.b685 <= 0) m.c1103 = Constraint(expr=", "m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633", "+ m.x805 == 0) m.c953 = Constraint(expr= 3*m.b716 + m.x806", "<= 0) m.c587 = Constraint(expr= m.x407 == 0) m.c588 =", "m.b682 <= 0) m.c1100 = Constraint(expr= m.b683 - m.b684 <=", "0.5*m.x254 + m.x278 == 0) m.c165 = Constraint(expr= - 0.5*m.x255", "13.5) m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5) m.c311", "m.c620 = Constraint(expr= m.x107 - m.x410 - m.x413 == 0)", "- m.b614 + m.b632 >= 0) m.c1383 = Constraint(expr= -", "Constraint(expr= m.b729 + m.b730 <= 1) m.c1191 = Constraint(expr= m.b728", "1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999* m.b626) <= 0)", "m.b617 - m.b638 >= 0) m.c1440 = Constraint(expr= m.b618 -", "15) m.c358 = Constraint(expr= m.x313 + 15*m.b628 <= 15) m.c359", "m.c1357 = Constraint(expr= - m.b668 - m.b669 + m.b670 -", "4.45628648004517*m.b608 <= 0) m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <=", "m.b620 - m.b621 + m.b622 - m.b712 <= 0) m.c1310", "Constraint(expr= m.x11 - m.x224 - m.x227 == 0) m.c66 =", "0 0 0 0 0 0 # # Nonzero counts", "- m.x316 - m.x322 == 0) m.c218 = Constraint(expr= m.x284", "+ 3.34221486003388*m.b615 <= 3.34221486003388) m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616", "0) m.c1327 = Constraint(expr= - m.b638 - m.b639 + m.b640", "m.b713 + m.b715 <= 1) m.c1162 = Constraint(expr= m.b714 +", "0.999* m.b601) <= 0) m.c83 = Constraint(expr= m.x221 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 3.34221486003388) m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388)", "- m.b745 <= 0) m.c1343 = Constraint(expr= m.b656 - m.b746", "m.b618 >= 0) m.c1387 = Constraint(expr= - m.b607 + m.b619", "m.c207 = Constraint(expr= m.x288 == 0) m.c208 = Constraint(expr= m.x289", "Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 2.54515263975353) m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353)", "m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 +", "Constraint(expr= m.x86 - m.x374 - m.x377 == 0) m.c384 =", "m.c1177 = Constraint(expr= m.b722 + m.b723 <= 1) m.c1178 =", "m.c265 = Constraint(expr= m.x343 == 0) m.c266 = Constraint(expr= m.x41", "0) m.c1456 = Constraint(expr= m.b628 - m.b655 >= 0) m.c1457", "- m.x402 == 0) m.c568 = Constraint(expr= m.x103 - m.x400", "= Constraint(expr= m.b764 + m.b765 <= 1) m.c1260 = Constraint(expr=", "m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466", "m.c1289 = Constraint(expr= m.b602 - m.b692 <= 0) m.c1290 =", "m.x5 - m.x212 - m.x215 == 0) m.c63 = Constraint(expr=", "13.5*m.b622 <= 13.5) m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350", "0) m.c1047 = Constraint(expr= m.b629 - m.b631 <= 0) m.c1048", "<= 0) m.c1087 = Constraint(expr= m.b669 - m.b670 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 =", "Constraint(expr= - m.x387 + m.x441 == 0) m.c505 = Constraint(expr=", "0) m.c1324 = Constraint(expr= - m.b635 - m.b636 + m.b637", "= Constraint(expr= m.x46 - m.x55 - m.x58 - m.x61 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 =", "0) m.c326 = Constraint(expr= m.x302 - 15*m.b623 <= 0) m.c327", "1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001", "<= 1.83548069293539) m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539)", "= Constraint(expr= m.x591 - 9*m.b684 <= 0) m.c919 = Constraint(expr=", "m.c1062 = Constraint(expr= m.b644 - m.b646 <= 0) m.c1063 =", "m.x477 - 1.18887736200171*m.b660 <= 0) m.c682 = Constraint(expr= m.x478 -", "m.b702 <= 1) m.c1134 = Constraint(expr= m.b701 + m.b703 <=", "m.x431 == 0) m.c447 = Constraint(expr= m.x117 - m.x429 -", "= Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0) m.c575 = Constraint(expr=", "m.x363 - 1.26558121681553*m.b636 <= 0) m.c451 = Constraint(expr= m.x364 -", "= Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388) m.c222 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999* m.b627) <= 0) m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) -", "- m.b764 <= 0) m.c1362 = Constraint(expr= - m.b674 +", "Constraint(expr= m.x570 == 0) m.c796 = Constraint(expr= m.x571 == 0)", "Constraint(expr= 9*m.b766 + m.x856 == 0) m.c1004 = Constraint(expr= 4*m.b767", "m.b611) <= 0) m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1", "m.c649 = Constraint(expr= m.x142 - m.x472 - m.x475 == 0)", "Constraint(expr= m.b623 - m.b625 <= 0) m.c1042 = Constraint(expr= m.b624", "+ m.b697 <= 1) m.c1126 = Constraint(expr= m.b696 + m.b697", "Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x65 - m.x326 - m.x332 == 0) m.c243 =", "m.c183 = Constraint(expr= m.x45 - m.x279 - m.x282 == 0)", "m.c1256 = Constraint(expr= m.b762 + m.b763 <= 1) m.c1257 =", "m.x8 - m.x218 - m.x221 == 0) m.c90 = Constraint(expr=", "m.b609 - m.b624 >= 0) m.c1426 = Constraint(expr= m.b610 -", "m.c395 = Constraint(expr= m.x374 - 20*m.b629 <= 0) m.c396 =", "Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0)", "m.c242 = Constraint(expr= m.x65 - m.x326 - m.x332 == 0)", "== 0) m.c541 = Constraint(expr= m.x100 - m.x394 - m.x397", "- m.x215 == 0) m.c63 = Constraint(expr= m.x6 - m.x213", "= Constraint(expr= m.x348 == 0) m.c292 = Constraint(expr= m.x349 ==", "m.c1251 = Constraint(expr= m.b758 + m.b760 <= 1) m.c1252 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 =", "m.x325 == 0) m.c374 = Constraint(expr= m.x377 == 0) m.c375", "0.999* m.b654) <= 0) m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) -", "m.c1433 = Constraint(expr= m.b614 - m.b632 >= 0) m.c1434 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 =", "= Constraint(expr= - m.b629 - m.b630 + m.b631 - m.b721", "- m.b631 <= 0) m.c1049 = Constraint(expr= m.b632 - m.b633", "m.b683 - m.b684 <= 0) m.c1101 = Constraint(expr= m.b683 -", "== 0) m.c507 = Constraint(expr= m.x390 == 0) m.c508 =", "<= 1) m.c1156 = Constraint(expr= m.b711 + m.b712 <= 1)", "0) m.c212 = Constraint(expr= m.x47 - m.x284 - m.x287 ==", "m.c1049 = Constraint(expr= m.b632 - m.b633 <= 0) m.c1050 =", "<= 0) m.c1297 = Constraint(expr= - m.b608 - m.b609 +", "<= 0) m.c260 = Constraint(expr= m.x275 == 0) m.c261 =", "Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 +", "1.26558121681553*m.b638 <= 0) m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <=", "- m.b735 <= 0) m.c1333 = Constraint(expr= - m.b644 -", "m.c1306 = Constraint(expr= - m.b617 - m.b618 + m.b619 -", "15) m.c357 = Constraint(expr= m.x312 + 15*m.b627 <= 15) m.c358", "m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924) m.c774 = Constraint(expr= m.x510 +", "- m.x349 == 0) m.c299 = Constraint(expr= m.x296 - 15*m.b620", "+ m.x804 == 0) m.c952 = Constraint(expr= 4*m.b715 + m.x805", "= Constraint(expr= m.x257 + 30*m.b608 <= 30) m.c195 = Constraint(expr=", "== 0) m.c699 = Constraint(expr= m.x522 == 0) m.c700 =", "<= 0) m.c1078 = Constraint(expr= m.b660 - m.b661 <= 0)", "m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420", "m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376) m.c578 =", "+ m.x794 == 0) m.c942 = Constraint(expr= 3*m.b705 + m.x795", "m.b685 <= 0) m.c1102 = Constraint(expr= m.b684 - m.b685 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 =", "<= 0) m.c1343 = Constraint(expr= m.b656 - m.b746 <= 0)", "0.994083415506506*m.b667 <= 0) m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <=", "m.b687 <= 1) m.c1106 = Constraint(expr= m.b687 + m.b688 <=", "- 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999* m.b672) <=", "m.c1338 = Constraint(expr= - m.b650 + m.b651 - m.b741 <=", "0) m.c569 = Constraint(expr= m.x128 - m.x452 - m.x455 ==", "0) m.c324 = Constraint(expr= m.x75 - m.x351 - m.x354 ==", "Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0) m.c602 = Constraint(expr= m.x407", "- 9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700", "m.b653 + m.b654 - m.b744 <= 0) m.c1342 = Constraint(expr=", "= Constraint(expr= m.x293 == 0) m.c234 = Constraint(expr= m.x294 ==", "0) m.c986 = Constraint(expr= 9*m.b749 + m.x839 == 0) m.c987", "- m.x443 == 0) m.c516 = Constraint(expr= m.x123 - m.x441", "0.999*m.b632)))*(0.001 + 0.999*m.b632) <= 0) m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633)", "== 0) m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0)", "Constraint(expr= m.b596 - m.b598 <= 0) m.c1015 = Constraint(expr= m.b597", "m.x151 + m.x154 == 0) m.c41 = Constraint(expr= m.x152 -", "= Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0) m.c151 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 =", "m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24", "0) m.c919 = Constraint(expr= m.x592 - 9*m.b685 <= 0) m.c920", "# Total E G L N X C B #", "m.c443 = Constraint(expr= m.x80 - m.x362 - m.x365 == 0)", "== 0) m.c875 = Constraint(expr= m.x587 == 0) m.c876 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1) m.c1135 = Constraint(expr= m.b701 + m.b702 <= 1)", "m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552", "m.b760 <= 1) m.c1251 = Constraint(expr= m.b758 + m.b760 <=", "Constraint(expr= m.b722 + m.b723 <= 1) m.c1176 = Constraint(expr= m.b722", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 =", "m.c1221 = Constraint(expr= m.b743 + m.b745 <= 1) m.c1222 =", "0) m.c1449 = Constraint(expr= m.b627 - m.b648 >= 0) m.c1450", "m.x497 == 0) m.c702 = Constraint(expr= m.x162 - m.x495 -", "= Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0) m.c661 = Constraint(expr=", "= Constraint(expr= m.b610 - m.b625 >= 0) m.c1427 = Constraint(expr=", "m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298", "- m.b684 >= 0) m.c1486 = Constraint(expr= m.b670 - m.b685", "1.32154609891348) m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348) m.c257", "- m.x513 - m.x516 == 0) m.c766 = Constraint(expr= m.x172", "+ 0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677)", "Constraint(expr= m.x449 == 0) m.c537 = Constraint(expr= m.x450 == 0)", "= Constraint(expr= m.x569 == 0) m.c795 = Constraint(expr= m.x570 ==", "m.c872 = Constraint(expr= m.x557 == 0) m.c873 = Constraint(expr= m.x558", "Constraint(expr= m.b647 - m.b649 <= 0) m.c1066 = Constraint(expr= m.b648", "m.c292 = Constraint(expr= m.x349 == 0) m.c293 = Constraint(expr= m.x53", "m.c317 = Constraint(expr= m.x353 == 0) m.c318 = Constraint(expr= m.x354", "= Constraint(expr= m.x218 - 40*m.b599 <= 0) m.c96 = Constraint(expr=", "== 0) m.c87 = Constraint(expr= m.x234 == 0) m.c88 =", "1) m.c1164 = Constraint(expr= m.b716 + m.b718 <= 1) m.c1165", "m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363", "m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924) m.c691 = Constraint(expr= m.x493 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 =", "= Constraint(expr= m.x159 - m.x162 - m.x165 - m.x168 ==", "0) m.c290 = Constraint(expr= m.x347 == 0) m.c291 = Constraint(expr=", "+ m.b720 <= 1) m.c1172 = Constraint(expr= m.b720 + m.b721", "m.b746 + m.b748 <= 1) m.c1228 = Constraint(expr= m.b747 +", "m.b762 + m.b763 <= 1) m.c1259 = Constraint(expr= m.b764 +", "Constraint(expr= m.b768 + m.b769 <= 1) m.c1269 = Constraint(expr= m.b767", "<= 0) m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171)", "= Constraint(expr= m.b708 + m.b709 <= 1) m.c1151 = Constraint(expr=", "0) m.c1073 = Constraint(expr= m.b656 - m.b657 <= 0) m.c1074", "m.b772 <= 1) m.c1277 = Constraint(expr= m.b773 + m.b774 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x837 == 0) m.c985 = Constraint(expr= 2*m.b748 + m.x838", "Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 +", "0) m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001", "<= 15) m.c201 = Constraint(expr= m.x282 + 15*m.b609 <= 15)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b607 <= 0) m.c1025 = Constraint(expr= m.b608 - m.b609 <=", "0) m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0) m.c735", "m.x422 - m.x425 == 0) m.c420 = Constraint(expr= m.x114 -", "- m.b653 >= 0) m.c1455 = Constraint(expr= m.b627 - m.b654", "m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756", "<= 1) m.c1216 = Constraint(expr= m.b741 + m.b742 <= 1)", "<= 0) m.c1312 = Constraint(expr= - m.b623 - m.b624 +", "m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269", "E G L N X C B # 1486 571", "= Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0) m.c247 = Constraint(expr=", "<= 15) m.c787 = Constraint(expr= m.x541 + 15*m.b670 <= 15)", "- m.b603 <= 0) m.c1020 = Constraint(expr= m.b602 - m.b604", "0) m.c767 = Constraint(expr= m.x179 - m.x536 - m.x539 ==", "Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353) m.c159 = Constraint(expr= m.x273", "Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0) m.c581 = Constraint(expr= m.x455", "= Constraint(expr= m.b750 + m.b751 <= 1) m.c1235 = Constraint(expr=", "- 0.940066550763924*m.b660 <= 0) m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661", "<= 0) m.c1285 = Constraint(expr= - m.b596 - m.b597 +", "- 3.04984759446376*m.b653 <= 0) m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654", "m.b731 + m.b733 <= 1) m.c1198 = Constraint(expr= m.b732 +", "0) m.c1315 = Constraint(expr= - m.b626 - m.b627 + m.b628", "Constraint(expr= m.x94 - m.x382 - m.x385 == 0) m.c482 =", "- m.b769 <= 0) m.c1367 = Constraint(expr= m.b680 - m.b770", "= Constraint(expr= m.b713 + m.b715 <= 1) m.c1162 = Constraint(expr=", "Constraint(expr= 2*m.b731 + m.x821 == 0) m.c969 = Constraint(expr= 5*m.b732", "== 0) m.c484 = Constraint(expr= m.x121 - m.x436 - m.x439", "= Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0) m.c607 = Constraint(expr=", "Constraint(expr= m.b638 - m.b639 <= 0) m.c1056 = Constraint(expr= m.b638", "= Constraint(expr= m.x534 == 0) m.c847 = Constraint(expr= m.x535 ==", "0) m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0) m.c103", "0) m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0) m.c634", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 =", "- 0.9*m.x319 + m.x418 == 0) m.c368 = Constraint(expr= -", "Constraint(expr= m.b711 + m.b712 <= 1) m.c1157 = Constraint(expr= m.b713", "= Constraint(expr= m.b764 + m.b766 <= 1) m.c1261 = Constraint(expr=", "= Constraint(expr= m.b603 - m.b615 >= 0) m.c1417 = Constraint(expr=", "= Constraint(expr= m.x107 - m.x410 - m.x413 == 0) m.c621", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 =", "m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5) m.c310 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b693 <= 1) m.c1118 = Constraint(expr= m.b693 + m.b694 <=", "m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376", "= Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0) m.c124 = Constraint(expr=", "m.c1437 = Constraint(expr= m.b618 - m.b636 >= 0) m.c1438 =", "1) m.c1259 = Constraint(expr= m.b764 + m.b765 <= 1) m.c1260", "= Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5 =", "Constraint(expr= 5*m.b711 + m.x801 == 0) m.c949 = Constraint(expr= 2*m.b712", "m.c568 = Constraint(expr= m.x103 - m.x400 - m.x403 == 0)", "= Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0) m.c248 = Constraint(expr=", "0) m.c111 = Constraint(expr= m.x240 == 0) m.c112 = Constraint(expr=", "0) m.c289 = Constraint(expr= m.x301 == 0) m.c290 = Constraint(expr=", "<= 9) m.c524 = Constraint(expr= m.x440 - 9*m.b641 <= 0)", "= Constraint(expr= - m.b668 + m.b669 - m.b759 <= 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799 =", "<= 0) m.c1032 = Constraint(expr= m.b614 - m.b616 <= 0)", "m.x520 - m.x523 == 0) m.c707 = Constraint(expr= m.x494 -", "m.b758 + m.b760 <= 1) m.c1249 = Constraint(expr= m.b758 +", "m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143", "m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306", "Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c969 = Constraint(expr= 5*m.b732 + m.x822 == 0)", "0) m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001", "m.x333 == 0) m.c238 = Constraint(expr= m.x334 == 0) m.c239", "== 0) m.c120 = Constraint(expr= m.x39 - m.x261 - m.x264", "Constraint(expr= m.x188 - m.x554 - m.x557 == 0) m.c879 =", "m.b596 - m.b686 <= 0) m.c1284 = Constraint(expr= - m.b596", "m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517) m.c154 = Constraint(expr= m.x247 +", "- m.b614 >= 0) m.c1416 = Constraint(expr= m.b603 - m.b615", "Constraint(expr= m.x376 - 20*m.b631 <= 0) m.c398 = Constraint(expr= m.x377", "= Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0) m.c773 = Constraint(expr=", "<= 0) m.c1325 = Constraint(expr= m.b638 - m.b728 <= 0)", "m.b596 + m.b599 - m.b602 >= 0) m.c1404 = Constraint(expr=", "m.b653 - m.b655 <= 0) m.c1072 = Constraint(expr= m.b654 -", "m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718", "- m.x33 == 0) m.c13 = Constraint(expr= m.x25 - m.x28", "0) m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0) m.c457", "0) m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0) m.c601", "= Constraint(expr= m.b662 - m.b674 >= 0) m.c1476 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 =", "Constraint(expr= m.b714 + m.b715 <= 1) m.c1161 = Constraint(expr= m.b713", "m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924) m.c690 =", "<= 0) m.c1361 = Constraint(expr= m.b674 - m.b764 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 =", "0) m.c571 = Constraint(expr= m.x130 - m.x454 - m.x457 ==", "== 0) m.c874 = Constraint(expr= m.x559 == 0) m.c875 =", "- m.b658 <= 0) m.c1075 = Constraint(expr= m.b657 - m.b658", "- m.b632 + m.b633 - m.b723 <= 0) m.c1321 =", "m.b624 + m.b627 >= 0) m.c1393 = Constraint(expr= - m.b610", "m.x248 - m.x251 == 0) m.c177 = Constraint(expr= m.x33 -", "+ m.x819 == 0) m.c967 = Constraint(expr= m.b730 + m.x820", "+ 0.999*m.b634) <= 0) m.c410 = Constraint(expr= m.x335 == 0)", "Constraint(expr= m.x235 == 0) m.c89 = Constraint(expr= m.x8 - m.x218", "= Constraint(expr= m.x497 == 0) m.c696 = Constraint(expr= m.x498 ==", "0) m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538 == 0)", "m.c1223 = Constraint(expr= m.b746 + m.b747 <= 1) m.c1224 =", "m.b625 >= 0) m.c1427 = Constraint(expr= m.b608 - m.b626 >=", ">= 0) m.c1411 = Constraint(expr= m.b598 + m.b601 - m.b610", "m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347", "0) m.c1328 = Constraint(expr= m.b641 - m.b731 <= 0) m.c1329", "Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x309 - 15*m.b627 <= 0) m.c355 = Constraint(expr= m.x310", "m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546", "= Constraint(expr= m.x391 == 0) m.c509 = Constraint(expr= m.x443 ==", "1.26558121681553) m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553) m.c284", "0) m.c525 = Constraint(expr= m.x441 - 9*m.b642 <= 0) m.c526", "m.c615 = Constraint(expr= m.x414 == 0) m.c616 = Constraint(expr= m.x415", "m.x485 == 0) m.c645 = Constraint(expr= m.x486 == 0) m.c646", "m.c1149 = Constraint(expr= m.b707 + m.b709 <= 1) m.c1150 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c671 = Constraint(expr= m.x491 == 0) m.c672 =", "m.x593 == 0) m.c909 = Constraint(expr= m.x210 - m.x591 -", "0) m.c1065 = Constraint(expr= m.b647 - m.b649 <= 0) m.c1066", "m.b754 <= 1) m.c1237 = Constraint(expr= m.b752 + m.b753 <=", "0) m.c341 = Constraint(expr= m.x311 == 0) m.c342 = Constraint(expr=", "m.c410 = Constraint(expr= m.x335 == 0) m.c411 = Constraint(expr= m.x336", "= Constraint(expr= m.b620 - m.b621 <= 0) m.c1038 = Constraint(expr=", "m.x546 == 0) m.c793 = Constraint(expr= m.x547 == 0) m.c794", "2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643 =", "0) m.c162 = Constraint(expr= - m.x249 + m.x279 == 0)", "m.c535 = Constraint(expr= m.x397 == 0) m.c536 = Constraint(expr= m.x449", "Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0) m.c574 = Constraint(expr= m.x400", "<= 0) m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 +", "m.c948 = Constraint(expr= 5*m.b711 + m.x801 == 0) m.c949 =", "Constraint(expr= m.x503 == 0) m.c723 = Constraint(expr= m.x504 == 0)", "- m.x516 == 0) m.c766 = Constraint(expr= m.x172 - m.x514", "<= 0.78338879230327) m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327)", "Constraint(expr= m.x159 - m.x162 - m.x165 - m.x168 == 0)", "= Constraint(expr= m.x408 == 0) m.c589 = Constraint(expr= m.x409 ==", "m.x240 == 0) m.c118 = Constraint(expr= m.x28 - m.x238 -", "- m.x505 == 0) m.c731 = Constraint(expr= m.x176 - m.x524", "m.b770 + m.b771 <= 1) m.c1274 = Constraint(expr= m.b771 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x99 == 0) m.c28 = Constraint(expr= m.x76 - m.x97", "m.x581 == 0) m.c849 = Constraint(expr= m.x582 == 0) m.c850", "m.b621 + m.b639 >= 0) m.c1396 = Constraint(expr= - m.b622", "m.x107 == 0) m.c30 = Constraint(expr= m.x78 - m.x102 -", "Constraint(expr= m.x359 == 0) m.c345 = Constraint(expr= m.x360 == 0)", "<= 1) m.c1110 = Constraint(expr= m.b689 + m.b691 <= 1)", "m.x182 - m.x542 - m.x545 == 0) m.c798 = Constraint(expr=", "m.b604 >= 0) m.c1406 = Constraint(expr= m.b596 + m.b599 -", "0) m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0) m.c77", "m.c446 = Constraint(expr= m.x116 - m.x428 - m.x431 == 0)", "0.994083415506506*m.b678 <= 0.994083415506506) m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <=", "0) m.c1055 = Constraint(expr= m.b638 - m.b639 <= 0) m.c1056", "m.c1447 = Constraint(expr= m.b625 - m.b646 >= 0) m.c1448 =", "m.x391 + 9*m.b643 <= 9) m.c524 = Constraint(expr= m.x440 -", "= Constraint(expr= m.b692 + m.b694 <= 1) m.c1120 = Constraint(expr=", "m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310", "= Constraint(expr= m.x50 - m.x290 - m.x293 == 0) m.c240", "m.x258 == 0) m.c181 = Constraint(expr= m.x37 - m.x256 -", "- m.x571 == 0) m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671", "m.c1366 = Constraint(expr= - m.b677 - m.b678 + m.b679 -", "0) m.c759 = Constraint(expr= m.x540 == 0) m.c760 = Constraint(expr=", "8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761 -", "Constraint(expr= - m.b608 - m.b609 + m.b610 - m.b700 <=", "m.x113 - m.x422 - m.x425 == 0) m.c420 = Constraint(expr=", "<= 9) m.c556 = Constraint(expr= m.x451 + 9*m.b646 <= 9)", "<= 0) m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924)", "= Constraint(expr= 7*m.b702 + m.x792 == 0) m.c940 = Constraint(expr=", "<= 0) m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0)", "m.b673 <= 0) m.c1091 = Constraint(expr= m.b674 - m.b675 <=", "m.x38 - m.x47 - m.x50 == 0) m.c15 = Constraint(expr=", "m.x337 == 0) m.c413 = Constraint(expr= m.x425 == 0) m.c414", "+ 120*m.x196 + 285*m.x197 + 390*m.x198 + 350*m.x199 + 290*m.x200", "0) m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376) m.c576", "<= 0) m.c1072 = Constraint(expr= m.b654 - m.b655 <= 0)", "9) m.c528 = Constraint(expr= m.x444 + 9*m.b642 <= 9) m.c529", "<= 0) m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 +", "0) m.c625 = Constraint(expr= m.x136 - m.x466 - m.x469 ==", "= Constraint(expr= m.x85 - m.x370 - m.x373 == 0) m.c479", "0.999*m.b674)))*(0.001 + 0.999* m.b674) <= 0) m.c816 = Constraint(expr=(m.x573/(0.001 +", "== 0) m.c39 = Constraint(expr= - m.x147 - m.x150 +", "m.c1367 = Constraint(expr= m.b680 - m.b770 <= 0) m.c1368 =", "<= 1.26558121681553) m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344 ==", "m.c70 = Constraint(expr= m.x214 - 40*m.b598 <= 0) m.c71 =", "= Constraint(expr= m.x281 == 0) m.c174 = Constraint(expr= m.x282 ==", "- m.x556 - m.x559 == 0) m.c881 = Constraint(expr= m.x206", "m.b722 + m.b724 <= 1) m.c1180 = Constraint(expr= m.b723 +", "m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746", "<= 1) m.c1243 = Constraint(expr= m.b755 + m.b756 <= 1)", "0 0 0 # # Nonzero counts # Total const", "= Constraint(expr= m.x216 == 0) m.c58 = Constraint(expr= m.x217 ==", "3*m.b705 + m.x795 == 0) m.c943 = Constraint(expr= 2*m.b706 +", "m.c19 = Constraint(expr= m.x46 - m.x55 - m.x58 - m.x61", "m.b735 <= 1) m.c1200 = Constraint(expr= m.b734 + m.b736 <=", "- m.b677 >= 0) m.c1479 = Constraint(expr= m.b666 - m.b678", "m.b689 + m.b691 <= 1) m.c1111 = Constraint(expr= m.b689 +", "m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785", "1) m.c1228 = Constraint(expr= m.b747 + m.b748 <= 1) m.c1229", "<= 9) m.c523 = Constraint(expr= m.x391 + 9*m.b643 <= 9)", "= Constraint(expr= m.x517 == 0) m.c758 = Constraint(expr= m.x539 ==", "m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618) <= 0) m.c259 = Constraint(expr=(m.x340/(0.001", "m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203", "Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x514 - m.x517 == 0) m.c767 = Constraint(expr= m.x179", "= Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0) m.c150 = Constraint(expr=", "= Constraint(expr= m.b704 + m.b706 <= 1) m.c1141 = Constraint(expr=", "m.b608 + m.b609 - m.b699 <= 0) m.c1297 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103) m.c609", "Constraint(expr= - m.b605 + m.b617 >= 0) m.c1386 = Constraint(expr=", "= Constraint(expr= - m.b617 + m.b635 + m.b638 >= 0)", "m.c1079 = Constraint(expr= m.b662 - m.b663 <= 0) m.c1080 =", "m.x34 - m.x250 - m.x253 == 0) m.c179 = Constraint(expr=", "+ 0.999* m.b647) <= 0) m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648)", "Constraint(expr= m.x308 - 15*m.b626 <= 0) m.c354 = Constraint(expr= m.x309", "= Constraint(expr= m.x189 - m.x555 - m.x558 == 0) m.c880", "+ m.x280 == 0) m.c164 = Constraint(expr= - 0.5*m.x254 +", "m.b633 >= 0) m.c1384 = Constraint(expr= - m.b616 + m.b634", "= Constraint(expr= - m.x72 - m.x90 + m.x93 == 0)", "m.b693 + m.b694 <= 1) m.c1121 = Constraint(expr= m.b695 +", "0) m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0) m.c280", "m.x116 - m.x428 - m.x431 == 0) m.c447 = Constraint(expr=", "== 0) m.c445 = Constraint(expr= m.x82 - m.x364 - m.x367", "m.c144 = Constraint(expr= m.x30 - m.x243 - m.x246 == 0)", "- m.x553 == 0) m.c827 = Constraint(expr= m.x200 - m.x572", "Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c768 = Constraint(expr= m.x180 - m.x537 - m.x540 == 0)", "m.b607) <= 0) m.c137 = Constraint(expr= m.x245 == 0) m.c138", "m.x391 == 0) m.c515 = Constraint(expr= m.x122 - m.x440 -", "m.x333 == 0) m.c244 = Constraint(expr= m.x67 - m.x328 -", "+ m.b688 <= 1) m.c1105 = Constraint(expr= m.b686 + m.b687", "+ m.x591 == 0) m.c898 = Constraint(expr= - 0.6*m.x562 +", "+ m.b751 <= 1) m.c1233 = Constraint(expr= m.b749 + m.b751", "= Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388) m.c223 = Constraint(expr=", "m.b690 + m.b691 <= 1) m.c1113 = Constraint(expr= m.b689 +", "m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 =", "m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819", "== 0) m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418 ==", "0) m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686) m.c459", "m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170", "= Constraint(expr= m.x176 - m.x527 - m.x533 == 0) m.c852", "m.x855 == 0) m.c1003 = Constraint(expr= 9*m.b766 + m.x856 ==", "Constraint(expr= - 0.75*m.x496 + m.x520 == 0) m.c695 = Constraint(expr=", "0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999* m.b652)", "<= 0) m.c547 = Constraint(expr= m.x394 - 9*m.b646 <= 0)", "m.x18 == 0) m.c7 = Constraint(expr= - m.x13 - m.x16", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 =", "0) m.c820 = Constraint(expr= m.x553 == 0) m.c821 = Constraint(expr=", "m.c1077 = Constraint(expr= m.b659 - m.b661 <= 0) m.c1078 =", "- m.x519 - m.x522 == 0) m.c706 = Constraint(expr= m.x175", "== 0) m.c317 = Constraint(expr= m.x353 == 0) m.c318 =", "- m.b713 <= 0) m.c1311 = Constraint(expr= - m.b623 +", "Constraint(expr= m.x45 - m.x279 - m.x282 == 0) m.c184 =", "m.c118 = Constraint(expr= m.x28 - m.x238 - m.x241 == 0)", "0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999* m.b653)", "Constraint(expr= m.x381 - 33.5*m.b639 <= 0) m.c493 = Constraint(expr= m.x382", "Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)", "- 4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691", "m.b597 - m.b687 <= 0) m.c1285 = Constraint(expr= - m.b596", "- m.x314 - m.x320 == 0) m.c216 = Constraint(expr= m.x63", "Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0) m.c709 = Constraint(expr= m.x496", "m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744", "m.b725 + m.b727 <= 1) m.c1183 = Constraint(expr= m.b725 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 =", "m.c266 = Constraint(expr= m.x41 - m.x269 - m.x275 == 0)", "m.x175 - m.x520 - m.x523 == 0) m.c707 = Constraint(expr=", "= Constraint(expr= m.b641 - m.b642 <= 0) m.c1059 = Constraint(expr=", "<= 0) m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0)", "m.c787 = Constraint(expr= m.x541 + 15*m.b670 <= 15) m.c788 =", "- m.b665 + m.b677 >= 0) m.c1464 = Constraint(expr= -", "m.x372 == 0) m.c478 = Constraint(expr= m.x85 - m.x370 -", "m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0) m.c634 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 =", "+ 35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126", "== 0) m.c879 = Constraint(expr= m.x189 - m.x555 - m.x558", "- m.b632 - m.b633 + m.b634 - m.b724 <= 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653 =", "0) m.c354 = Constraint(expr= m.x309 - 15*m.b627 <= 0) m.c355", "m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999* m.b661) <= 0) m.c668 =", "- m.x531 == 0) m.c733 = Constraint(expr= m.x178 - m.x526", "+ m.b630 - m.b720 <= 0) m.c1318 = Constraint(expr= -", "0) m.c1430 = Constraint(expr= m.b611 - m.b629 >= 0) m.c1431", "<= 40) m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0)", "9*m.b644 <= 0) m.c552 = Constraint(expr= m.x447 - 9*m.b645 <=", "0) m.c541 = Constraint(expr= m.x100 - m.x394 - m.x397 ==", "== 0) m.c881 = Constraint(expr= m.x206 - m.x584 - m.x587", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 =", "0) m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0) m.c187", "- m.b631 >= 0) m.c1433 = Constraint(expr= m.b614 - m.b632", "m.b627 - m.b651 >= 0) m.c1453 = Constraint(expr= m.b628 -", "m.c818 = Constraint(expr= m.x551 == 0) m.c819 = Constraint(expr= m.x552", "6*m.b726 + m.x816 == 0) m.c964 = Constraint(expr= 3*m.b727 +", "m.x778 == 0) m.c926 = Constraint(expr= 8*m.b689 + m.x779 ==", ">= 0) m.c1455 = Constraint(expr= m.b627 - m.b654 >= 0)", "+ 30*m.x111 + 15*m.x112 + 15*m.x113 + 20*m.x114 + 25*m.x115", "m.b766 <= 1) m.c1265 = Constraint(expr= m.b767 + m.b768 <=", "m.c998 = Constraint(expr= 4*m.b761 + m.x851 == 0) m.c999 =", "= Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719) m.c815 = Constraint(expr=(m.x572/(0.001", "m.x574 - m.x577 == 0) m.c830 = Constraint(expr= m.x548 -", "0.999* m.b640) <= 0) m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) -", "1.26558121681553*m.b619 <= 0) m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <=", "m.b677 - m.b678 <= 0) m.c1095 = Constraint(expr= m.b677 -", "9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753 -", "0) m.c1467 = Constraint(expr= m.b654 - m.b657 >= 0) m.c1468", "m.x164 - m.x167 == 0) m.c45 = Constraint(expr= m.x159 -", "1.04900943706034*m.b649 <= 1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1", "0) m.c521 = Constraint(expr= m.x389 + 9*m.b641 <= 9) m.c522", "1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999* m.b601) <= 0)", "= Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001", "m.c507 = Constraint(expr= m.x390 == 0) m.c508 = Constraint(expr= m.x391", "== 0) m.c618 = Constraint(expr= m.x468 == 0) m.c619 =", "m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345", "m.b658 <= 0) m.c1075 = Constraint(expr= m.b657 - m.b658 <=", "- m.b737 <= 0) m.c1335 = Constraint(expr= - m.b647 +", "0 0 # FX 0 0 0 0 0 0", "== 0) m.c385 = Constraint(expr= m.x88 - m.x376 - m.x379", "== 0) m.c593 = Constraint(expr= m.x104 - m.x404 - m.x407", "m.x257 == 0) m.c180 = Constraint(expr= m.x36 - m.x255 -", "m.b665 - m.b667 <= 0) m.c1084 = Constraint(expr= m.b666 -", "= Constraint(expr= m.x545 == 0) m.c792 = Constraint(expr= m.x546 ==", "- m.x507 - m.x510 == 0) m.c763 = Constraint(expr= m.x169", "m.x363 - m.x366 == 0) m.c445 = Constraint(expr= m.x82 -", "= Constraint(expr= - m.x147 - m.x150 + m.x153 == 0)", "= Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517) m.c126 = Constraint(expr=", "- m.x317 - m.x323 == 0) m.c381 = Constraint(expr= m.x63", "== 0) m.c67 = Constraint(expr= m.x13 - m.x226 - m.x229", "<= 33.5) m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5)", "0) m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001", "13.5*m.b680 <= 13.5) m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x515 == 0) m.c756 = Constraint(expr= m.x516 == 0)", "1.26558121681553*m.b639 <= 0) m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <=", "Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376) m.c576 = Constraint(expr= m.x402", "m.x321 == 0) m.c217 = Constraint(expr= m.x64 - m.x316 -", "m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517) m.c127 =", "- m.b621 >= 0) m.c1423 = Constraint(expr= m.b610 - m.b622", "m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803", "<= 0) m.c1336 = Constraint(expr= - m.b647 - m.b648 +", "Constraint(expr= - 0.75*m.x238 + m.x262 == 0) m.c110 = Constraint(expr=", "m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122", "<= 3.71357206670431) m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0) m.c486 = Constraint(expr= m.x369", "m.b601 == 1) m.c1376 = Constraint(expr= - m.b602 + m.b611", "m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999* m.b605) <= 0) m.c135 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699 =", "0) m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0) m.c655", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 =", "0) m.c396 = Constraint(expr= m.x375 - 20*m.b630 <= 0) m.c397", "+ m.b657 + m.b660 >= 0) m.c1459 = Constraint(expr= -", "m.c378 = Constraint(expr= m.x420 == 0) m.c379 = Constraint(expr= m.x421", "== 0) m.c797 = Constraint(expr= m.x182 - m.x542 - m.x545", "+ m.x826 == 0) m.c974 = Constraint(expr= 5*m.b737 + m.x827", "1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999* m.b667) <= 0)", "m.x784 == 0) m.c932 = Constraint(expr= 10*m.b695 + m.x785 ==", "m.c1241 = Constraint(expr= m.b755 + m.b756 <= 1) m.c1242 =", "2.54515263975353*m.b619 <= 2.54515263975353) m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <=", "- m.x288 == 0) m.c214 = Constraint(expr= m.x49 - m.x286", "= Constraint(expr= m.x312 + 15*m.b627 <= 15) m.c358 = Constraint(expr=", "20*m.b631 <= 0) m.c404 = Constraint(expr= m.x419 + 20*m.b629 <=", "m.x540 == 0) m.c760 = Constraint(expr= m.x541 == 0) m.c761", "- m.x387 + m.x441 == 0) m.c505 = Constraint(expr= -", "= Constraint(expr= m.x361 == 0) m.c347 = Constraint(expr= m.x59 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 =", "m.x235 == 0) m.c89 = Constraint(expr= m.x8 - m.x218 -", "Constraint(expr= m.x449 + 9*m.b644 <= 9) m.c555 = Constraint(expr= m.x450", "== 0) m.c48 = Constraint(expr= m.x174 - m.x183 - m.x186", "m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0) m.c633 =", "- 0.940066550763924*m.b665 <= 0) m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666", "m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539) m.c228 = Constraint(expr= m.x321 +", "m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0) m.c771 =", "= Constraint(expr= m.x551 == 0) m.c819 = Constraint(expr= m.x552 ==", "0.999* m.b639) <= 0) m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) -", "= Constraint(expr= 7*m.b701 + m.x791 == 0) m.c939 = Constraint(expr=", "Constraint(expr= m.b668 - m.b680 >= 0) m.c1482 = Constraint(expr= m.b669", "m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0) m.c773 =", "0) m.c41 = Constraint(expr= m.x152 - m.x155 - m.x158 ==", "Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517) m.c190 = Constraint(expr= m.x253", "Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506) m.c861 = Constraint(expr= m.x534", "= Constraint(expr= m.x438 == 0) m.c475 = Constraint(expr= m.x439 ==", "m.b598 + m.b601 - m.b604 >= 0) m.c1406 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129", "+ 3.34221486003388*m.b603 <= 3.34221486003388) m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604", "0) m.c1383 = Constraint(expr= - m.b615 + m.b633 >= 0)", "m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403", "m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735", "m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45", "m.c143 = Constraint(expr= m.x29 - m.x242 - m.x245 == 0)", "300*m.x211 - 5*m.b686 - 4*m.b687 - 6*m.b688 - 8*m.b689 -", "0) m.c384 = Constraint(expr= m.x87 - m.x375 - m.x378 ==", "- m.x481 == 0) m.c677 = Constraint(expr= m.x149 - m.x488", "m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471", "m.x801 == 0) m.c949 = Constraint(expr= 2*m.b712 + m.x802 ==", "- m.b658 <= 0) m.c1076 = Constraint(expr= m.b659 - m.b660", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 =", "<= 9) m.c529 = Constraint(expr= m.x445 + 9*m.b643 <= 9)", "m.x146 - m.x149 + m.x152 == 0) m.c39 = Constraint(expr=", "= Constraint(expr= m.x125 - m.x446 - m.x449 == 0) m.c543", "Constraint(expr= m.x559 + 15*m.b682 <= 15) m.c890 = Constraint(expr= m.x584", "+ m.b688 <= 1) m.c1108 = Constraint(expr= m.b687 + m.b688", "m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771", "- 0.5*m.x512 + m.x536 == 0) m.c750 = Constraint(expr= -", ">= 0) m.c1478 = Constraint(expr= m.b665 - m.b677 >= 0)", "<= 1) m.c1214 = Constraint(expr= m.b741 + m.b742 <= 1)", "= Constraint(expr= m.x40 - m.x49 - m.x52 == 0) m.c17", "m.c1115 = Constraint(expr= m.b692 + m.b693 <= 1) m.c1116 =", "m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 +", "m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682", "m.x202 - m.x574 - m.x577 == 0) m.c830 = Constraint(expr=", "m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353) m.c161 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c4 = Constraint(expr= m.x4 - m.x7 - m.x10 ==", "- m.x541 == 0) m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668", "<= 33.5) m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0)", "+ m.b649 - m.b739 <= 0) m.c1337 = Constraint(expr= m.b650", "m.b626 + m.b627 - m.b717 <= 0) m.c1315 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 =", "0) m.c928 = Constraint(expr= 6*m.b691 + m.x781 == 0) m.c929", "- m.x356 - m.x359 == 0) m.c351 = Constraint(expr= m.x78", "= Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186) m.c432 = Constraint(expr=", "m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695", "m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c466 =", "2.54515263975353) m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353) m.c161", "= Constraint(expr= m.x313 == 0) m.c344 = Constraint(expr= m.x359 ==", "Constraint(expr= m.x306 == 0) m.c316 = Constraint(expr= m.x307 == 0)", "<= 1.11894339953103) m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103)", "== 0) m.c793 = Constraint(expr= m.x547 == 0) m.c794 =", "m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <= 0) m.c893 =", "0) m.c902 = Constraint(expr= m.x593 == 0) m.c903 = Constraint(expr=", "m.x270 - m.x276 == 0) m.c268 = Constraint(expr= m.x43 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.b749 + m.b751 <= 1) m.c1231 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 =", "counts # Total E G L N X C B", "m.c696 = Constraint(expr= m.x498 == 0) m.c697 = Constraint(expr= m.x499", "40*m.x157 - m.x170 - m.x171 - m.x172 + 80*m.x194 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x386 - 9*m.b641 <= 0) m.c519 = Constraint(expr= m.x387 -", "m.x494 - m.x497 == 0) m.c702 = Constraint(expr= m.x162 -", "- 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999* m.b636) <=", "= Constraint(expr= m.x397 == 0) m.c536 = Constraint(expr= m.x449 ==", "m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0) m.c832 =", "0) m.c941 = Constraint(expr= 4*m.b704 + m.x794 == 0) m.c942", "Constraint(expr= m.x19 - m.x22 - m.x25 == 0) m.c11 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 =", "+ m.x536 == 0) m.c747 = Constraint(expr= - m.x507 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b678 = Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b614) <= 0) m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1", "m.b711 <= 0) m.c1309 = Constraint(expr= - m.b620 - m.b621", "Constraint(expr= m.x536 - 15*m.b668 <= 0) m.c783 = Constraint(expr= m.x537", "0) m.c1296 = Constraint(expr= - m.b608 + m.b609 - m.b699", "+ m.b643 + m.b646 >= 0) m.c1400 = Constraint(expr= -", "0) m.c23 = Constraint(expr= - m.x71 - m.x89 + m.x92", "m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450", "= Constraint(expr= m.x32 - m.x248 - m.x251 == 0) m.c177", "Constraint(expr= m.x192 - m.x561 - m.x564 == 0) m.c907 =", "m.x561 - 15*m.b684 <= 0) m.c913 = Constraint(expr= m.x562 -", "m.b599 + m.b600 - m.b690 <= 0) m.c1288 = Constraint(expr=", "- m.x16 + m.x19 == 0) m.c8 = Constraint(expr= m.x17", "m.x149 + m.x152 == 0) m.c39 = Constraint(expr= - m.x147", "== 0) m.c244 = Constraint(expr= m.x67 - m.x328 - m.x334", "# # Equation counts # Total E G L N", "m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140", "m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796", "m.c747 = Constraint(expr= - m.x507 + m.x537 == 0) m.c748", "+ m.b721 <= 1) m.c1174 = Constraint(expr= m.b720 + m.b721", "m.b748 <= 1) m.c1227 = Constraint(expr= m.b746 + m.b748 <=", "m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510", "Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b672 - m.b762 <= 0) m.c1360 = Constraint(expr= - m.b671", "Constraint(expr= m.b663 - m.b675 >= 0) m.c1477 = Constraint(expr= m.b664", "Constraint(expr= 2*m.b720 + m.x810 == 0) m.c958 = Constraint(expr= 9*m.b721", "m.c1036 = Constraint(expr= m.b618 - m.b619 <= 0) m.c1037 =", "Constraint(expr= m.x209 - m.x590 - m.x593 == 0) m.c909 =", "m.c1276 = Constraint(expr= m.b771 + m.b772 <= 1) m.c1277 =", "+ m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597) <= 0) m.c55 =", "0) m.c1023 = Constraint(expr= m.b605 - m.b607 <= 0) m.c1024", "= Constraint(expr= - m.b618 + m.b636 + m.b639 >= 0)", "5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701 -", "m.x445 == 0) m.c518 = Constraint(expr= m.x386 - 9*m.b641 <=", "m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186) m.c432 =", "== 0) m.c508 = Constraint(expr= m.x391 == 0) m.c509 =", "8*m.b768 + m.x858 == 0) m.c1006 = Constraint(expr= 6*m.b769 +", "- 3.04984759446376*m.b648 <= 0) m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649", "0) m.c329 = Constraint(expr= m.x305 + 15*m.b623 <= 15) m.c330", "Constraint(expr= m.x106 - m.x406 - m.x409 == 0) m.c596 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 =", "0.705049913072943*m.b663 <= 0.705049913072943) m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <=", "m.c916 = Constraint(expr= m.x565 + 15*m.b685 <= 15) m.c917 =", "- m.x397 == 0) m.c542 = Constraint(expr= m.x125 - m.x446", "= Constraint(expr= - m.b680 + m.b681 - m.b771 <= 0)", "0) m.c321 = Constraint(expr= m.x57 - m.x303 - m.x306 ==", "- m.b651 + m.b652 - m.b742 <= 0) m.c1340 =", "<= 0.690184503917672) m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584 ==", "+ 30*m.b669 <= 30) m.c781 = Constraint(expr= m.x517 + 30*m.b670", "m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924) m.c774 =", "Constraint(expr= m.b605 - m.b606 <= 0) m.c1023 = Constraint(expr= m.b605", "Constraint(expr= m.x67 - m.x331 - m.x337 == 0) m.c419 =", "- m.x305 == 0) m.c321 = Constraint(expr= m.x57 - m.x303", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 =", ">= 0) m.c1441 = Constraint(expr= m.b619 - m.b640 >= 0)", "= Constraint(expr= m.x305 == 0) m.c315 = Constraint(expr= m.x306 ==", "= Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0) m.c687 = Constraint(expr=", "- m.x540 == 0) m.c769 = Constraint(expr= m.x181 - m.x538", "== 0) m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 =", "m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b612 =", "m.x390 == 0) m.c508 = Constraint(expr= m.x391 == 0) m.c509", "== 0) m.c996 = Constraint(expr= 6*m.b759 + m.x849 == 0)", "Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0) m.c654 = Constraint(expr= m.x471", "= Constraint(expr= m.x541 == 0) m.c761 = Constraint(expr= m.x167 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0) m.c273", "0) m.c1407 = Constraint(expr= m.b597 + m.b600 - m.b606 >=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b674 - m.b676 <= 0) m.c1093 = Constraint(expr= m.b675 -", "3.34221486003388*m.b616 <= 3.34221486003388) m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <=", "m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527", "m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c193 = Constraint(expr= m.x256 - 30*m.b610 <= 0) m.c194 =", "m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0) m.c458 =", "- m.x228 == 0) m.c67 = Constraint(expr= m.x13 - m.x226", "0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001", "Constraint(expr= m.b695 + m.b697 <= 1) m.c1126 = Constraint(expr= m.b696", "m.c30 = Constraint(expr= m.x78 - m.x102 - m.x105 - m.x108", "m.c1084 = Constraint(expr= m.b666 - m.b667 <= 0) m.c1085 =", "= Constraint(expr= m.x359 == 0) m.c345 = Constraint(expr= m.x360 ==", "9*m.b717 + m.x807 == 0) m.c955 = Constraint(expr= 3*m.b718 +", "<= 0) m.c1290 = Constraint(expr= - m.b602 + m.b603 -", "<= 0) m.c1300 = Constraint(expr= - m.b611 - m.b612 +", "- m.b680 + m.b681 - m.b771 <= 0) m.c1369 =", "Constraint(expr= m.b692 + m.b693 <= 1) m.c1118 = Constraint(expr= m.b693", "Constraint(expr= m.b669 - m.b684 >= 0) m.c1486 = Constraint(expr= m.b670", "Constraint(expr= m.b605 - m.b617 >= 0) m.c1419 = Constraint(expr= m.b606", "0) m.c1460 = Constraint(expr= - m.b662 + m.b671 + m.b674", "m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557", "0.999*m.b651)))*(0.001 + 0.999* m.b651) <= 0) m.c586 = Constraint(expr=(m.x460/(0.001 +", "+ m.x418 == 0) m.c368 = Constraint(expr= - m.x374 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778 =", "= Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506) m.c744 = Constraint(expr=", "m.x44 - m.x53 - m.x56 - m.x59 == 0) m.c18", "= Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0) m.c838 = Constraint(expr=", "m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66", "m.c437 = Constraint(expr= m.x365 == 0) m.c438 = Constraint(expr= m.x366", "<= 1) m.c1114 = Constraint(expr= m.b690 + m.b691 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 =", "m.x317 - m.x323 == 0) m.c381 = Constraint(expr= m.x63 -", "+ m.b753 <= 1) m.c1238 = Constraint(expr= m.b753 + m.b754", "Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x143 == 0) m.c36 = Constraint(expr= m.x138 - m.x141", "m.x493 == 0) m.c674 = Constraint(expr= m.x143 - m.x476 -", "m.c1064 = Constraint(expr= m.b647 - m.b648 <= 0) m.c1065 =", "m.b636 - m.b726 <= 0) m.c1324 = Constraint(expr= - m.b635", "m.x396 + 9*m.b645 <= 9) m.c550 = Constraint(expr= m.x397 +", "Constraint(expr= - m.b665 - m.b666 + m.b667 - m.b757 <=", "Constraint(expr= m.x523 == 0) m.c701 = Constraint(expr= m.x161 - m.x494", "- m.x219 - m.x222 == 0) m.c91 = Constraint(expr= m.x10", "m.c1309 = Constraint(expr= - m.b620 - m.b621 + m.b622 -", "Constraint(expr= m.x392 - 9*m.b644 <= 0) m.c546 = Constraint(expr= m.x393", "m.c731 = Constraint(expr= m.x176 - m.x524 - m.x530 == 0)", "= Constraint(expr= m.x564 + 15*m.b684 <= 15) m.c916 = Constraint(expr=", "+ m.x830 == 0) m.c978 = Constraint(expr= 8*m.b741 + m.x831", "Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0)", "9*m.b623 <= 9) m.c336 = Constraint(expr= m.x354 + 9*m.b624 <=", "Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539) m.c394 = Constraint(expr= m.x325", "m.b662 + m.b671 + m.b674 >= 0) m.c1461 = Constraint(expr=", "3*m.b734 - 4*m.b735 - 3*m.b736 - 5*m.b737 - 7*m.b738 -", "m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553) m.c490 = Constraint(expr= m.x373 +", "m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943) m.c809 =", "m.b775 <= 1) m.c1281 = Constraint(expr= m.b773 + m.b775 <=", "= Constraint(expr= m.b662 - m.b671 >= 0) m.c1473 = Constraint(expr=", "= Constraint(expr= - m.b663 + m.b672 + m.b675 >= 0)", "+ m.b756 <= 1) m.c1242 = Constraint(expr= m.b755 + m.b757", "m.b663 - m.b672 >= 0) m.c1474 = Constraint(expr= m.b664 -", "m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388) m.c249 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 =", "m.x410 - 3.04984759446376*m.b653 <= 0) m.c627 = Constraint(expr= m.x411 -", "<= 30) m.c781 = Constraint(expr= m.x517 + 30*m.b670 <= 30)", "= Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517) m.c127 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 =", "m.b632 - m.b634 <= 0) m.c1051 = Constraint(expr= m.b633 -", "= Constraint(expr= m.x223 + 40*m.b601 <= 40) m.c101 = Constraint(expr=", "m.x105 - m.x405 - m.x408 == 0) m.c595 = Constraint(expr=", "= Constraint(expr= m.x94 - m.x382 - m.x385 == 0) m.c482", "0) m.c1103 = Constraint(expr= m.b686 + m.b687 <= 1) m.c1104", "m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783 =", "m.c1443 = Constraint(expr= m.b624 - m.b642 >= 0) m.c1444 =", "40*m.b600 <= 40) m.c100 = Constraint(expr= m.x223 + 40*m.b601 <=", "m.x548 - m.x551 == 0) m.c825 = Constraint(expr= m.x186 -", "m.c553 = Constraint(expr= m.x448 - 9*m.b646 <= 0) m.c554 =", "0) m.c241 = Constraint(expr= m.x52 - m.x292 - m.x295 ==", "+ m.b757 <= 1) m.c1245 = Constraint(expr= m.b755 + m.b757", "m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664", "m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 +", "m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585 == 0) m.c871", "Constraint(expr= m.b597 + m.b600 - m.b606 >= 0) m.c1408 =", ">= 0) m.c1439 = Constraint(expr= m.b617 - m.b638 >= 0)", "log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596) <= 0) m.c54", "Constraint(expr= m.x64 - m.x319 - m.x325 == 0) m.c383 =", "m.b606 >= 0) m.c1408 = Constraint(expr= m.b598 + m.b601 -", "+ 3.71357206670431*m.b596 <= 3.71357206670431) m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597", "= Constraint(expr= - m.x394 + m.x448 == 0) m.c533 =", "- 7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723", "m.c1326 = Constraint(expr= - m.b638 + m.b639 - m.b729 <=", "Constraint(expr= 6*m.b688 + m.x778 == 0) m.c926 = Constraint(expr= 8*m.b689", "0) m.c855 = Constraint(expr= m.x204 - m.x579 - m.x582 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 =", "m.c999 = Constraint(expr= 8*m.b762 + m.x852 == 0) m.c1000 =", "m.x198 - m.x567 - m.x570 == 0) m.c802 = Constraint(expr=", "m.c42 = Constraint(expr= m.x153 - m.x156 - m.x159 == 0)", "m.b680 - m.b770 <= 0) m.c1368 = Constraint(expr= - m.b680", "Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 +", "+ m.b638 >= 0) m.c1389 = Constraint(expr= - m.b618 +", "+ m.b652 + m.b655 >= 0) m.c1403 = Constraint(expr= m.b596", "m.x185 - m.x548 - m.x551 == 0) m.c825 = Constraint(expr=", "= Constraint(expr= m.x425 == 0) m.c414 = Constraint(expr= m.x426 ==", "Constraint(expr= m.x181 - m.x538 - m.x541 == 0) m.c770 =", "= Constraint(expr= - m.b653 + m.b656 + m.b659 >= 0)", "- m.x475 == 0) m.c650 = Constraint(expr= m.x146 - m.x482", "Constraint(expr= m.x513 - 30*m.b669 <= 0) m.c778 = Constraint(expr= m.x514", "- 30*m.b608 <= 0) m.c192 = Constraint(expr= m.x255 - 30*m.b609", "= Constraint(expr= m.b767 + m.b768 <= 1) m.c1268 = Constraint(expr=", "- m.x479 == 0) m.c675 = Constraint(expr= m.x144 - m.x477", "= Constraint(expr= m.x122 - m.x440 - m.x443 == 0) m.c516", "m.c1008 = Constraint(expr= m.b771 + m.x861 == 0) m.c1009 =", "m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578", "Constraint(expr= m.x274 == 0) m.c143 = Constraint(expr= m.x29 - m.x242", "= Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517) m.c128 = Constraint(expr=", "m.b768 + m.b769 <= 1) m.c1269 = Constraint(expr= m.b767 +", "Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0) m.c682 = Constraint(expr= m.x478", "m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262 == 0) m.c110", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 =", "0) m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001", "<= 0) m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034)", "Constraint(expr= m.x128 - m.x452 - m.x455 == 0) m.c570 =", "- m.b775 <= 0) m.c1373 = Constraint(expr= m.b596 + m.b599", "m.x566 - m.x569 == 0) m.c801 = Constraint(expr= m.x198 -", "m.b712 <= 1) m.c1153 = Constraint(expr= m.b710 + m.b711 <=", "m.c1431 = Constraint(expr= m.b612 - m.b630 >= 0) m.c1432 =", "Constraint(expr= m.b648 - m.b649 <= 0) m.c1067 = Constraint(expr= m.b650", "m.x446 - m.x449 == 0) m.c543 = Constraint(expr= m.x126 -", "- m.b662 + m.b663 - m.b753 <= 0) m.c1351 =", "Constraint(expr= m.x305 + 15*m.b623 <= 15) m.c330 = Constraint(expr= m.x306", "= Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5) m.c895 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.b719 + m.b721 <= 1) m.c1174 = Constraint(expr=", "Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 +", "= Constraint(expr= m.x570 == 0) m.c796 = Constraint(expr= m.x571 ==", "- m.b685 <= 0) m.c1102 = Constraint(expr= m.b684 - m.b685", "m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763", "m.b637 - m.b727 <= 0) m.c1325 = Constraint(expr= m.b638 -", "= Constraint(expr= m.x5 - m.x212 - m.x215 == 0) m.c63", "= Constraint(expr= m.b619 - m.b637 >= 0) m.c1439 = Constraint(expr=", "m.b666 <= 0) m.c1083 = Constraint(expr= m.b665 - m.b667 <=", "== 0) m.c89 = Constraint(expr= m.x8 - m.x218 - m.x221", "Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c1049 = Constraint(expr= m.b632 - m.b633 <= 0)", "+ 0.999*m.b665)))*(0.001 + 0.999* m.b665) <= 0) m.c720 = Constraint(expr=(m.x525/(0.001", "m.c68 = Constraint(expr= m.x212 - 40*m.b596 <= 0) m.c69 =", "m.c1260 = Constraint(expr= m.b764 + m.b766 <= 1) m.c1261 =", "m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307", "<= 0) m.c329 = Constraint(expr= m.x305 + 15*m.b623 <= 15)", "m.c1458 = Constraint(expr= - m.b654 + m.b657 + m.b660 >=", "- 3.04984759446376*m.b627 <= 0) m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628", "m.x824 == 0) m.c972 = Constraint(expr= 4*m.b735 + m.x825 ==", "= Constraint(expr= m.x437 == 0) m.c474 = Constraint(expr= m.x438 ==", "m.c1327 = Constraint(expr= - m.b638 - m.b639 + m.b640 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743", "Constraint(expr= m.x166 - m.x502 - m.x505 == 0) m.c731 =", "= Constraint(expr= - 0.9*m.x296 + m.x344 == 0) m.c285 =", "Constraint(expr= m.x409 == 0) m.c590 = Constraint(expr= m.x461 == 0)", "= Constraint(expr= m.x23 - m.x26 - m.x29 - m.x32 ==", "m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628", "+ 0.940066550763924*m.b663 <= 0.940066550763924) m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664", "== 0) m.c759 = Constraint(expr= m.x540 == 0) m.c760 =", "- m.x536 - m.x539 == 0) m.c768 = Constraint(expr= m.x180", "m.b673) <= 0) m.c791 = Constraint(expr= m.x545 == 0) m.c792", "m.x544 - 0.705049913072943*m.b673 <= 0) m.c806 = Constraint(expr= m.x545 +", "m.x60 == 0) m.c19 = Constraint(expr= m.x46 - m.x55 -", "Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 =", "0) m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0) m.c150", "m.c372 = Constraint(expr= m.x324 == 0) m.c373 = Constraint(expr= m.x325", "m.x435 - 2.30162356062425*m.b639 <= 0) m.c499 = Constraint(expr= m.x436 -", "== 0) m.c1011 = Constraint(expr= 3*m.b774 + m.x864 == 0)", "<= 1) m.c1173 = Constraint(expr= m.b719 + m.b721 <= 1)", "- m.x466 - m.x469 == 0) m.c626 = Constraint(expr= m.x410", "m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446", "- 0.9*m.x318 + m.x417 == 0) m.c367 = Constraint(expr= -", "m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 +", "m.x464 - 1.18887736200171*m.b653 <= 0) m.c633 = Constraint(expr= m.x465 -", "m.c170 = Constraint(expr= m.x257 == 0) m.c171 = Constraint(expr= m.x258", "m.c321 = Constraint(expr= m.x57 - m.x303 - m.x306 == 0)", "m.b771 + m.x861 == 0) m.c1009 = Constraint(expr= 3*m.b772 +", "m.x191 - m.x560 - m.x563 == 0) m.c906 = Constraint(expr=", "1) m.c1162 = Constraint(expr= m.b714 + m.b715 <= 1) m.c1163", "Constraint(expr= m.x179 - m.x188 - m.x191 - m.x194 == 0)", "- 5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710 - 5*m.b711", "Constraint(expr= m.b740 + m.b741 <= 1) m.c1212 = Constraint(expr= m.b740", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 =", "0) m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0) m.c458", "1) m.c1129 = Constraint(expr= m.b698 + m.b699 <= 1) m.c1130", "m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0) m.c279 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 =", "== 0) m.c509 = Constraint(expr= m.x443 == 0) m.c510 =", "== 0) m.c826 = Constraint(expr= m.x187 - m.x550 - m.x553", "- 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999* m.b649) <=", "<= 0) m.c1318 = Constraint(expr= - m.b629 - m.b630 +", "- m.x237 - m.x240 == 0) m.c118 = Constraint(expr= m.x28", "- 2.54515263975353*m.b607 <= 0) m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605", "+ m.x278 == 0) m.c162 = Constraint(expr= - m.x249 +", "m.x329 - m.x335 == 0) m.c417 = Constraint(expr= m.x66 -", "- m.x504 == 0) m.c730 = Constraint(expr= m.x166 - m.x502", "+ 10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155", "m.x459 - m.x462 == 0) m.c598 = Constraint(expr= m.x133 -", "= Constraint(expr= m.x274 == 0) m.c143 = Constraint(expr= m.x29 -", "m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517) m.c191 =", "<= 0) m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0)", "- m.b624 <= 0) m.c1041 = Constraint(expr= m.b623 - m.b625", "Constraint(expr= m.x24 - m.x27 - m.x30 - m.x33 == 0)", "Constraint(expr= m.x239 == 0) m.c111 = Constraint(expr= m.x240 == 0)", "- 3.34221486003388*m.b614 <= 0) m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615", "m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553) m.c283 =", "<= 1) m.c1209 = Constraint(expr= m.b737 + m.b739 <= 1)", "Constraint(expr= m.x10 - m.x220 - m.x223 == 0) m.c92 =", "m.c1037 = Constraint(expr= m.b620 - m.b621 <= 0) m.c1038 =", "m.b686 + m.b687 <= 1) m.c1104 = Constraint(expr= m.b686 +", "0) m.c777 = Constraint(expr= m.x513 - 30*m.b669 <= 0) m.c778", "+ m.x778 == 0) m.c926 = Constraint(expr= 8*m.b689 + m.x779", "<= 0) m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0)", "m.b746 + m.b747 <= 1) m.c1226 = Constraint(expr= m.b747 +", "+ m.b739 <= 1) m.c1210 = Constraint(expr= m.b738 + m.b739", "m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736", "m.x261 - 3.34221486003388*m.b603 <= 0) m.c130 = Constraint(expr= m.x262 -", "0.999*m.b634)))*(0.001 + 0.999*m.b634) <= 0) m.c410 = Constraint(expr= m.x335 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 =", "# 1486 571 111 804 0 0 0 0 #", "Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 +", "m.x237 - 4.45628648004517*m.b603 <= 0) m.c124 = Constraint(expr= m.x238 -", "+ m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999* m.b653) <= 0) m.c612", "== 0) m.c598 = Constraint(expr= m.x133 - m.x460 - m.x463", "Constraint(expr= 7*m.b701 + m.x791 == 0) m.c939 = Constraint(expr= 7*m.b702", "m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388) m.c251 =", "m.x12 - m.x225 - m.x228 == 0) m.c67 = Constraint(expr=", "m.x324 == 0) m.c382 = Constraint(expr= m.x64 - m.x319 -", "10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120 +", "Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0) m.c391 = Constraint(expr= m.x319", "1) m.c1275 = Constraint(expr= m.b770 + m.b772 <= 1) m.c1276", "+ m.b600 - m.b606 >= 0) m.c1408 = Constraint(expr= m.b598", "== 0) m.c421 = Constraint(expr= m.x115 - m.x424 - m.x427", "m.c962 = Constraint(expr= 2*m.b725 + m.x815 == 0) m.c963 =", "== 0) m.c64 = Constraint(expr= m.x7 - m.x214 - m.x217", ">= 0) m.c1410 = Constraint(expr= m.b597 + m.b600 - m.b609", "m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239", "m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511", "<= 0) m.c1057 = Constraint(expr= m.b639 - m.b640 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b639 >= 0) m.c1441 = Constraint(expr= m.b619 - m.b640", "m.x854 == 0) m.c1002 = Constraint(expr= 3*m.b765 + m.x855 ==", "<= 0) m.c1080 = Constraint(expr= m.b662 - m.b664 <= 0)", "Constraint(expr= - m.x507 + m.x537 == 0) m.c748 = Constraint(expr=", "m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828", "m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355", "m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589", "2.54515263975353) m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353) m.c278", "m.b638 - m.b639 <= 0) m.c1056 = Constraint(expr= m.b638 -", "m.c519 = Constraint(expr= m.x387 - 9*m.b642 <= 0) m.c520 =", "m.c1224 = Constraint(expr= m.b746 + m.b748 <= 1) m.c1225 =", "m.c217 = Constraint(expr= m.x64 - m.x316 - m.x322 == 0)", "== 0) m.c238 = Constraint(expr= m.x334 == 0) m.c239 =", "+ m.x844 == 0) m.c992 = Constraint(expr= 2*m.b755 + m.x845", "= Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001", "m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687", "= Constraint(expr= m.x178 - m.x529 - m.x535 == 0) m.c854", "= Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553) m.c490 = Constraint(expr=", "== 0) m.c939 = Constraint(expr= 7*m.b702 + m.x792 == 0)", "== 0) m.c988 = Constraint(expr= 9*m.b751 + m.x841 == 0)", "+ 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999*", "m.x352 == 0) m.c314 = Constraint(expr= m.x305 == 0) m.c315", "0) m.c1483 = Constraint(expr= m.b670 - m.b682 >= 0) m.c1484", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 =", "m.c1388 = Constraint(expr= - m.b617 + m.b635 + m.b638 >=", "- 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999* m.b601) <=", "+ 0.999*m.b677) <= 0) m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) -", "Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0) m.c361 = Constraint(expr= m.x358", "+ m.b751 <= 1) m.c1235 = Constraint(expr= m.b752 + m.b753", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 =", "0) m.c539 = Constraint(expr= m.x98 - m.x392 - m.x395 ==", "<= 0) m.c919 = Constraint(expr= m.x592 - 9*m.b685 <= 0)", ">= 0) m.c1408 = Constraint(expr= m.b598 + m.b601 - m.b607", "m.x406 - m.x409 == 0) m.c596 = Constraint(expr= m.x131 -", "3.34221486003388*m.b614 <= 0) m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <=", "4*m.b703 + m.x793 == 0) m.c941 = Constraint(expr= 4*m.b704 +", "m.x549 - 0.705049913072943*m.b675 <= 0) m.c832 = Constraint(expr= m.x550 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629 =", "m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0) m.c573 =", "<= 1) m.c1122 = Constraint(expr= m.b695 + m.b697 <= 1)", "6*m.b691 + m.x781 == 0) m.c929 = Constraint(expr= 6*m.b692 +", "- m.x592 - m.x595 == 0) m.c911 = Constraint(expr= m.x560", "Constraint(expr= m.x427 == 0) m.c416 = Constraint(expr= m.x65 - m.x329", "== 0) m.c748 = Constraint(expr= - m.x508 + m.x538 ==", "= Constraint(expr= m.x473 == 0) m.c642 = Constraint(expr= m.x474 ==", "0) m.c596 = Constraint(expr= m.x131 - m.x458 - m.x461 ==", "m.b676 >= 0) m.c1463 = Constraint(expr= - m.b665 + m.b677", "= Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001", "== 0) m.c477 = Constraint(expr= m.x84 - m.x369 - m.x372", "m.x283 == 0) m.c176 = Constraint(expr= m.x32 - m.x248 -", "m.x394 - 9*m.b646 <= 0) m.c548 = Constraint(expr= m.x395 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x430 - m.x433 == 0) m.c449 = Constraint(expr= m.x362", "m.b710 + m.b712 <= 1) m.c1156 = Constraint(expr= m.b711 +", "== 0) m.c623 = Constraint(expr= m.x134 - m.x464 - m.x467", "+ m.b766 <= 1) m.c1261 = Constraint(expr= m.b764 + m.b765", "0) m.c410 = Constraint(expr= m.x335 == 0) m.c411 = Constraint(expr=", "m.x129 - m.x453 - m.x456 == 0) m.c571 = Constraint(expr=", "m.b649 <= 0) m.c1066 = Constraint(expr= m.b648 - m.b649 <=", "= Constraint(expr= m.b611 - m.b612 <= 0) m.c1029 = Constraint(expr=", "0.999*m.b657)))*(0.001 + 0.999*m.b657) <= 0) m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658)", "- m.x575 == 0) m.c828 = Constraint(expr= m.x201 - m.x573", "0) m.c294 = Constraint(expr= m.x54 - m.x297 - m.x300 ==", "Constraint(expr= m.x564 + 15*m.b684 <= 15) m.c916 = Constraint(expr= m.x565", "m.b696 + m.b697 <= 1) m.c1127 = Constraint(expr= m.b698 +", "m.x344 - m.x347 == 0) m.c297 = Constraint(expr= m.x72 -", "Constraint(expr= m.x253 == 0) m.c170 = Constraint(expr= m.x257 == 0)", "== 0) m.c52 = Constraint(expr= m.x181 - m.x190 - m.x193", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 =", "Objective(expr= - m.x2 - m.x3 - m.x4 + 5*m.x20 +", "Constraint(expr= m.x214 - 40*m.b598 <= 0) m.c71 = Constraint(expr= m.x215", "- m.b618 + m.b636 + m.b639 >= 0) m.c1390 =", "1.18887736200171*m.b658 <= 0) m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <=", "m.b626 >= 0) m.c1428 = Constraint(expr= m.b609 - m.b627 >=", "m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596) <= 0) m.c54 = Constraint(expr=(m.x225/(0.001", "<= 0) m.c1334 = Constraint(expr= m.b647 - m.b737 <= 0)", "m.x577 == 0) m.c824 = Constraint(expr= m.x185 - m.x548 -", "= Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376) m.c604 = Constraint(expr=", "= Constraint(expr= m.b602 - m.b604 <= 0) m.c1021 = Constraint(expr=", "m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 +", "0) m.c732 = Constraint(expr= m.x177 - m.x525 - m.x531 ==", "Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0) m.c831 = Constraint(expr= m.x549", "== 0) m.c326 = Constraint(expr= m.x302 - 15*m.b623 <= 0)", "+ 2.30162356062425*m.b639 <= 2.30162356062425) m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640", "2*m.b740 + m.x830 == 0) m.c978 = Constraint(expr= 8*m.b741 +", "1) m.c1109 = Constraint(expr= m.b689 + m.b690 <= 1) m.c1110", "= Constraint(expr= m.b726 + m.b727 <= 1) m.c1187 = Constraint(expr=", "m.x525 - m.x531 == 0) m.c733 = Constraint(expr= m.x178 -", "m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924) m.c738 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 =", "0) m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517) m.c153", "0) m.c796 = Constraint(expr= m.x571 == 0) m.c797 = Constraint(expr=", "Constraint(expr= m.b728 + m.b729 <= 1) m.c1188 = Constraint(expr= m.b728", "= Constraint(expr= m.x583 == 0) m.c851 = Constraint(expr= m.x176 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1) m.c1225 = Constraint(expr= m.b746 + m.b747 <= 1)", "m.c616 = Constraint(expr= m.x415 == 0) m.c617 = Constraint(expr= m.x467", "m.c957 = Constraint(expr= 2*m.b720 + m.x810 == 0) m.c958 =", "m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 +", "== 0) m.c381 = Constraint(expr= m.x63 - m.x318 - m.x324", "m.c561 = Constraint(expr= m.x402 == 0) m.c562 = Constraint(expr= m.x403", "+ m.b644 >= 0) m.c1398 = Constraint(expr= - m.b624 +", "m.c1334 = Constraint(expr= m.b647 - m.b737 <= 0) m.c1335 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x222 + 40*m.b600 <= 40) m.c100 = Constraint(expr= m.x223 +", "Constraint(expr= - 0.9*m.x318 + m.x417 == 0) m.c367 = Constraint(expr=", "+ 0.940066550763924*m.b669 <= 0.940066550763924) m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670", "m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001 +", "- 0.666992981045719*m.b671 <= 0) m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672", "m.b628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631", "= Constraint(expr= m.x312 == 0) m.c343 = Constraint(expr= m.x313 ==", "m.x314 - 1.83548069293539*m.b611 <= 0) m.c225 = Constraint(expr= m.x315 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 =", "m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292", "m.b624 - m.b642 >= 0) m.c1444 = Constraint(expr= m.b625 -", "m.x181 - m.x538 - m.x541 == 0) m.c770 = Constraint(expr=", "4.45628648004517) m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0) m.c129", "= Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5) m.c311 = Constraint(expr=", "0) m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <= 0) m.c493", "= Constraint(expr= m.b614 - m.b615 <= 0) m.c1032 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b641 + m.b642 - m.b732 <= 0) m.c1330 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)", "285*m.x197 + 390*m.x198 + 350*m.x199 + 290*m.x200 + 405*m.x201 +", "Constraint(expr= m.b620 - m.b710 <= 0) m.c1308 = Constraint(expr= -", "= Constraint(expr= m.b663 - m.b675 >= 0) m.c1477 = Constraint(expr=", "- 3.04984759446376*m.b647 <= 0) m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648", "1) m.c1195 = Constraint(expr= m.b731 + m.b732 <= 1) m.c1196", "<= 0) m.c1366 = Constraint(expr= - m.b677 - m.b678 +", "Constraint(expr= m.b704 + m.b705 <= 1) m.c1142 = Constraint(expr= m.b705", "= Constraint(expr= m.x282 + 15*m.b609 <= 15) m.c202 = Constraint(expr=", "m.c1436 = Constraint(expr= m.b617 - m.b635 >= 0) m.c1437 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 =", "0) m.c593 = Constraint(expr= m.x104 - m.x404 - m.x407 ==", "m.x108 == 0) m.c31 = Constraint(expr= m.x79 - m.x103 -", "m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 =", "m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431) m.c78 = Constraint(expr= m.x228 +", "m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0) m.c661 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655 =", "== 0) m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0)", "- 4*m.b767 - 8*m.b768 - 6*m.b769 - 2*m.b770 - m.b771", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 =", "- m.x375 - m.x378 == 0) m.c385 = Constraint(expr= m.x88", "9) m.c529 = Constraint(expr= m.x445 + 9*m.b643 <= 9) m.c530", "m.c539 = Constraint(expr= m.x98 - m.x392 - m.x395 == 0)", "= Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924) m.c692 = Constraint(expr=", "m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260", "m.x579 - m.x582 == 0) m.c856 = Constraint(expr= m.x205 -", "m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790", "<= 0) m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0)", "= Constraint(expr= m.x86 - m.x374 - m.x377 == 0) m.c384", "<= 0.690184503917672) m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672)", "m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530", "m.c206 = Constraint(expr= m.x287 == 0) m.c207 = Constraint(expr= m.x288", "- m.x274 == 0) m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605", "== 0) m.c264 = Constraint(expr= m.x342 == 0) m.c265 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 =", "= Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0) m.c226 = Constraint(expr=", "m.b630 <= 0) m.c1047 = Constraint(expr= m.b629 - m.b631 <=", "m.c1408 = Constraint(expr= m.b598 + m.b601 - m.b607 >= 0)", "= Constraint(expr= m.b638 - m.b728 <= 0) m.c1326 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= - m.b622 + m.b640 >= 0) m.c1397 = Constraint(expr=", "- m.x83 == 0) m.c21 = Constraint(expr= m.x69 - m.x81", "<= 0) m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 =", "== 0) m.c68 = Constraint(expr= m.x212 - 40*m.b596 <= 0)", "+ m.x801 == 0) m.c949 = Constraint(expr= 2*m.b712 + m.x802", "+ m.x852 == 0) m.c1000 = Constraint(expr= 7*m.b763 + m.x853", "0) m.c447 = Constraint(expr= m.x117 - m.x429 - m.x432 ==", "= Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0) m.c837 = Constraint(expr=", "0) m.c1088 = Constraint(expr= m.b671 - m.b672 <= 0) m.c1089", "+ 0.999*m.b651)))*(0.001 + 0.999* m.b651) <= 0) m.c586 = Constraint(expr=(m.x460/(0.001", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 =", "= Constraint(expr= m.x222 + 40*m.b600 <= 40) m.c100 = Constraint(expr=", "0.940066550763924*m.b670 <= 0) m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <=", "== 0) m.c29 = Constraint(expr= m.x77 - m.x101 - m.x104", "<= 1) m.c1274 = Constraint(expr= m.b771 + m.b772 <= 1)", "m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605)", "m.b674 - m.b675 <= 0) m.c1092 = Constraint(expr= m.b674 -", "Constraint(expr= - m.b596 + m.b597 - m.b687 <= 0) m.c1285", "- m.x457 == 0) m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647", ">= 0) m.c1444 = Constraint(expr= m.b625 - m.b643 >= 0)", "m.c1162 = Constraint(expr= m.b714 + m.b715 <= 1) m.c1163 =", "- m.x362 - m.x365 == 0) m.c444 = Constraint(expr= m.x81", "m.c905 = Constraint(expr= m.x191 - m.x560 - m.x563 == 0)", "+ m.b693 <= 1) m.c1116 = Constraint(expr= m.b692 + m.b694", "20*m.b631 <= 20) m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1", "Constraint(expr= - m.b623 + m.b641 + m.b644 >= 0) m.c1398", "Constraint(expr= m.x551 == 0) m.c819 = Constraint(expr= m.x552 == 0)", "m.c969 = Constraint(expr= 5*m.b732 + m.x822 == 0) m.c970 =", "0.75*m.x494 + m.x518 == 0) m.c693 = Constraint(expr= - 0.75*m.x495", "= Constraint(expr= - m.b654 + m.b657 + m.b660 >= 0)", "m.c1184 = Constraint(expr= m.b726 + m.b727 <= 1) m.c1185 =", "Constraint(expr= m.x162 - m.x495 - m.x498 == 0) m.c703 =", "Constraint(expr= m.x485 == 0) m.c645 = Constraint(expr= m.x486 == 0)", "Constraint(expr= m.x180 - m.x189 - m.x192 - m.x195 == 0)", "m.x140 - m.x470 - m.x473 == 0) m.c648 = Constraint(expr=", "m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623", "<= 1) m.c1153 = Constraint(expr= m.b710 + m.b711 <= 1)", "= Constraint(expr= m.x138 - m.x141 - m.x144 == 0) m.c37", "== 0) m.c800 = Constraint(expr= m.x197 - m.x566 - m.x569", "- 15*m.b627 <= 0) m.c355 = Constraint(expr= m.x310 - 15*m.b628", "Constraint(expr= m.b644 - m.b646 <= 0) m.c1063 = Constraint(expr= m.b645", "m.b639) <= 0) m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0) m.c575 = Constraint(expr= m.x401", "m.b742 <= 0) m.c1340 = Constraint(expr= m.b653 - m.b743 <=", "- 1.04900943706034*m.b647 <= 0) m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 =", "1.18887736200171) m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171) m.c637", "m.x74 - m.x350 - m.x353 == 0) m.c324 = Constraint(expr=", "m.x220 - 40*m.b601 <= 0) m.c98 = Constraint(expr= m.x221 +", "= Constraint(expr= m.x485 == 0) m.c645 = Constraint(expr= m.x486 ==", "0) m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327) m.c663", "m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171) m.c686 = Constraint(expr= m.x488 -", "- m.x266 - m.x272 == 0) m.c147 = Constraint(expr= m.x42", "m.x449 == 0) m.c543 = Constraint(expr= m.x126 - m.x447 -", "0) m.c769 = Constraint(expr= m.x181 - m.x538 - m.x541 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)", "1.26558121681553*m.b636 <= 1.26558121681553) m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <=", "m.b605 >= 0) m.c1407 = Constraint(expr= m.b597 + m.b600 -", "0.5*m.x514 + m.x538 == 0) m.c752 = Constraint(expr= m.x509 ==", "Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ 1.26558121681553*m.b619 <= 1.26558121681553) m.c284 = Constraint(expr= - 0.9*m.x296 +", "<= 0) m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0)", "m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553) m.c453 =", "m.b733 <= 1) m.c1197 = Constraint(expr= m.b731 + m.b733 <=", "= Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0) m.c219 = Constraint(expr=", "m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0) m.c486 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605 =", "m.b638 - m.b728 <= 0) m.c1326 = Constraint(expr= - m.b638", "m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265", "m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0) m.x863", "m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425) m.c503 =", "+ m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999* m.b666) <= 0) m.c721", "Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x397 == 0) m.c542 = Constraint(expr= m.x125 - m.x446 -", "<= 0) m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0)", "Constraint(expr= m.b614 - m.b616 <= 0) m.c1033 = Constraint(expr= m.b615", "m.x526 - m.x532 == 0) m.c734 = Constraint(expr= m.x500 -", "m.c1264 = Constraint(expr= m.b765 + m.b766 <= 1) m.c1265 =", "- m.b757 <= 0) m.c1355 = Constraint(expr= m.b668 - m.b758", "Constraint(expr= m.x347 == 0) m.c291 = Constraint(expr= m.x348 == 0)", "m.c973 = Constraint(expr= 3*m.b736 + m.x826 == 0) m.c974 =", "Constraint(expr= m.x560 - 15*m.b683 <= 0) m.c912 = Constraint(expr= m.x561", ">= 0) m.c1394 = Constraint(expr= - m.b620 + m.b638 >=", "- m.b602 - m.b603 + m.b604 - m.b694 <= 0)", "Constraint(expr= m.x93 - m.x381 - m.x384 == 0) m.c481 =", "= Constraint(expr= m.b656 - m.b746 <= 0) m.c1344 = Constraint(expr=", "m.x383 == 0) m.c480 = Constraint(expr= m.x93 - m.x381 -", "1) m.c1146 = Constraint(expr= m.b707 + m.b709 <= 1) m.c1147", "== 0) m.c723 = Constraint(expr= m.x504 == 0) m.c724 =", "m.b758 + m.b760 <= 1) m.c1252 = Constraint(expr= m.b759 +", "- m.x409 == 0) m.c596 = Constraint(expr= m.x131 - m.x458", "0) m.c752 = Constraint(expr= m.x509 == 0) m.c753 = Constraint(expr=", "0) m.c1481 = Constraint(expr= m.b668 - m.b680 >= 0) m.c1482", "= Constraint(expr= m.x303 - 15*m.b624 <= 0) m.c328 = Constraint(expr=", "m.b652 >= 0) m.c1454 = Constraint(expr= m.b626 - m.b653 >=", "3.71357206670431*m.b596 <= 3.71357206670431) m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <=", "m.c181 = Constraint(expr= m.x37 - m.x256 - m.x259 == 0)", "= Constraint(expr= m.b678 - m.b679 <= 0) m.c1097 = Constraint(expr=", "0) m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0) m.c805", "m.x417 - m.x420 == 0) m.c388 = Constraint(expr= m.x112 -", "0) m.c968 = Constraint(expr= 2*m.b731 + m.x821 == 0) m.c969", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 =", "m.c746 = Constraint(expr= - m.x506 + m.x536 == 0) m.c747", "0) m.c296 = Constraint(expr= m.x71 - m.x344 - m.x347 ==", "= Constraint(expr= m.b623 - m.b625 <= 0) m.c1042 = Constraint(expr=", "m.c1317 = Constraint(expr= - m.b629 + m.b630 - m.b720 <=", "- m.x260 - m.x263 == 0) m.c120 = Constraint(expr= m.x39", "Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0) m.c225 =", "m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591", "m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348) m.c257 =", "Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5) m.c309 = Constraint(expr= m.x348", "m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376) m.c631 = Constraint(expr= m.x415 +", "0) m.c1322 = Constraint(expr= m.b635 - m.b725 <= 0) m.c1323", "m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564", "Constraint(expr= m.x222 == 0) m.c85 = Constraint(expr= m.x223 == 0)", "m.b656 - m.b657 + m.b658 - m.b748 <= 0) m.c1346", "- 0.666992981045719*m.b672 <= 0) m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673", "<= 0) m.c886 = Constraint(expr= m.x556 - 15*m.b682 <= 0)", "m.b743 + m.x833 == 0) m.c981 = Constraint(expr= 4*m.b744 +", "m.x401 == 0) m.c561 = Constraint(expr= m.x402 == 0) m.c562", "m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284", "== 0) m.c567 = Constraint(expr= m.x102 - m.x399 - m.x402", "Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 +", "Constraint(expr= m.b749 + m.b751 <= 1) m.c1234 = Constraint(expr= m.b750", "m.b695 + m.b697 <= 1) m.c1123 = Constraint(expr= m.b695 +", "m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703", "+ m.b661 - m.b751 <= 0) m.c1349 = Constraint(expr= m.b662", "m.x315 - m.x321 == 0) m.c217 = Constraint(expr= m.x64 -", "0.999* m.b655) <= 0) m.c614 = Constraint(expr= m.x413 == 0)", "0) m.c1072 = Constraint(expr= m.b654 - m.b655 <= 0) m.c1073", "3.34221486003388*m.b613 <= 3.34221486003388) m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <=", "0) m.c16 = Constraint(expr= m.x40 - m.x49 - m.x52 ==", "Constraint(expr= - m.b680 + m.b681 - m.b771 <= 0) m.c1369", "m.x222 == 0) m.c91 = Constraint(expr= m.x10 - m.x220 -", "- m.x532 == 0) m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665", "m.c322 = Constraint(expr= m.x58 - m.x304 - m.x307 == 0)", "Constraint(expr= m.b629 - m.b630 <= 0) m.c1047 = Constraint(expr= m.b629", "Constraint(expr= 8*m.b729 + m.x819 == 0) m.c967 = Constraint(expr= m.b730", "m.c1451 = Constraint(expr= m.b626 - m.b650 >= 0) m.c1452 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 =", "== 0) m.c31 = Constraint(expr= m.x79 - m.x103 - m.x106", "= Constraint(expr= m.x29 - m.x242 - m.x245 == 0) m.c144", "= Constraint(expr= m.x134 - m.x137 == 0) m.c33 = Constraint(expr=", "m.b653 >= 0) m.c1455 = Constraint(expr= m.b627 - m.b654 >=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 =", "= Constraint(expr= m.b623 - m.b644 >= 0) m.c1446 = Constraint(expr=", "Constraint(expr= m.x343 == 0) m.c266 = Constraint(expr= m.x41 - m.x269", "m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506) m.c744 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x533 == 0) m.c846 = Constraint(expr= m.x534 == 0) m.c847", "3.04984759446376*m.b650 <= 0) m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <=", "Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0) m.c279 = Constraint(expr= m.x339", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 =", "m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329", "m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232", "<= 0.690184503917672) m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672)", "+ m.b607 - m.b697 <= 0) m.c1295 = Constraint(expr= m.b608", "m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723", "Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 1) m.c1179 = Constraint(expr= m.b722 + m.b724 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 =", "m.c882 = Constraint(expr= m.x207 - m.x585 - m.x588 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x439 == 0) m.c476 = Constraint(expr= m.x83 -", "- 13.5*m.b681 <= 0) m.c892 = Constraint(expr= m.x586 - 13.5*m.b682", "m.b733 <= 1) m.c1199 = Constraint(expr= m.b734 + m.b735 <=", "m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376) m.c578 = Constraint(expr= m.x452 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517) m.c105", "- m.x524 - m.x530 == 0) m.c732 = Constraint(expr= m.x177", "m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741", "m.c304 = Constraint(expr= m.x301 + 15*m.b622 <= 15) m.c305 =", "m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185", "m.x588 + 13.5*m.b681 <= 13.5) m.c895 = Constraint(expr= m.x589 +", "+ m.b703 <= 1) m.c1137 = Constraint(expr= m.b701 + m.b703", "<= 1) m.c1199 = Constraint(expr= m.b734 + m.b735 <= 1)", "0) m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0) m.c838", "m.b720 + m.b721 <= 1) m.c1175 = Constraint(expr= m.b722 +", "# Equation counts # Total E G L N X", "m.x118 - m.x430 - m.x433 == 0) m.c449 = Constraint(expr=", "- m.b599 + m.b600 - m.b690 <= 0) m.c1288 =", "m.c467 = Constraint(expr= m.x371 == 0) m.c468 = Constraint(expr= m.x372", "0) m.c798 = Constraint(expr= m.x183 - m.x543 - m.x546 ==", "- m.x374 - m.x377 == 0) m.c384 = Constraint(expr= m.x87", "430*m.x205 + 290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209 +", "+ 0.999*m.b661)))*(0.001 + 0.999* m.b661) <= 0) m.c668 = Constraint(expr=", "m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0) m.c735 =", "<= 0) m.c1070 = Constraint(expr= m.b653 - m.b654 <= 0)", "+ 0.940066550763924*m.b666 <= 0.940066550763924) m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667", "= Constraint(expr= m.x152 - m.x155 - m.x158 == 0) m.c42", "- m.x246 == 0) m.c145 = Constraint(expr= m.x31 - m.x244", "- 1.83548069293539*m.b612 <= 0) m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613", "m.c1089 = Constraint(expr= m.b671 - m.b673 <= 0) m.c1090 =", "m.b603 >= 0) m.c1405 = Constraint(expr= m.b598 + m.b601 -", "m.x384 == 0) m.c472 = Constraint(expr= m.x385 == 0) m.c473", "<= 9) m.c528 = Constraint(expr= m.x444 + 9*m.b642 <= 9)", "m.c138 = Constraint(expr= m.x246 == 0) m.c139 = Constraint(expr= m.x247", "= Constraint(expr= m.x301 == 0) m.c290 = Constraint(expr= m.x347 ==", "m.b752 + m.b754 <= 1) m.c1240 = Constraint(expr= m.b753 +", "m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791", "Constraint(expr= m.b761 + m.b762 <= 1) m.c1256 = Constraint(expr= m.b762", "- m.b678 >= 0) m.c1480 = Constraint(expr= m.b667 - m.b679", "m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569", "Constraint(expr= m.x553 == 0) m.c821 = Constraint(expr= m.x575 == 0)", "= Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0) m.c833 = Constraint(expr=", "m.b723 <= 1) m.c1178 = Constraint(expr= m.b723 + m.b724 <=", "m.b739 <= 1) m.c1207 = Constraint(expr= m.b737 + m.b738 <=", "- m.b607 + m.b619 >= 0) m.c1388 = Constraint(expr= -", "m.x349 + 13.5*m.b622 <= 13.5) m.c311 = Constraint(expr= - 0.6*m.x302", "0) m.c30 = Constraint(expr= m.x78 - m.x102 - m.x105 -", "<= 0) m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0)", "0) m.c1421 = Constraint(expr= m.b608 - m.b620 >= 0) m.c1422", "Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0) m.c661 = Constraint(expr= m.x484", "Constraint(expr= - m.b683 + m.b684 - m.b774 <= 0) m.c1372", "Constraint(expr= 4*m.b687 + m.x777 == 0) m.c925 = Constraint(expr= 6*m.b688", "Constraint(expr= m.x168 - m.x507 - m.x510 == 0) m.c763 =", "+ 15*m.b668 <= 15) m.c786 = Constraint(expr= m.x540 + 15*m.b669", "m.x175 - m.x184 - m.x187 == 0) m.c50 = Constraint(expr=", "0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999* m.b665)", "m.b642 <= 0) m.c1059 = Constraint(expr= m.b641 - m.b643 <=", "= Constraint(expr= m.b666 - m.b678 >= 0) m.c1480 = Constraint(expr=", "+ 0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596)", "== 0) m.c544 = Constraint(expr= m.x127 - m.x448 - m.x451", "m.b722 + m.b723 <= 1) m.c1178 = Constraint(expr= m.b723 +", "== 0) m.c944 = Constraint(expr= 5*m.b707 + m.x797 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 =", "+ m.b646 - m.b736 <= 0) m.c1334 = Constraint(expr= m.b647", "m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469", "- m.b611 + m.b629 >= 0) m.c1380 = Constraint(expr= -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1051 = Constraint(expr= m.b633 - m.b634 <= 0) m.c1052", "0.999* m.b611) <= 0) m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x512 - 30*m.b668 <= 0) m.c777 = Constraint(expr= m.x513", "+ 2.30162356062425*m.b638 <= 2.30162356062425) m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639", "m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 +", "= Constraint(expr= 5*m.b697 + m.x787 == 0) m.c935 = Constraint(expr=", "m.b615 <= 0) m.c1032 = Constraint(expr= m.b614 - m.b616 <=", "m.x26 - m.x29 - m.x32 == 0) m.c12 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 +", "<= 3.34221486003388) m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388)", "Constraint(expr= m.x355 == 0) m.c320 = Constraint(expr= m.x56 - m.x302", "m.b753 <= 1) m.c1238 = Constraint(expr= m.b753 + m.b754 <=", "m.b647 + m.b650 + m.b653 >= 0) m.c1401 = Constraint(expr=", "Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943) m.c809 = Constraint(expr= m.x566", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 =", "33.5*m.b639 <= 0) m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604 =", "m.c621 = Constraint(expr= m.x108 - m.x411 - m.x414 == 0)", "Constraint(expr= m.b737 + m.b738 <= 1) m.c1206 = Constraint(expr= m.b737", "== 0) m.c669 = Constraint(expr= m.x480 == 0) m.c670 =", "8*m.b762 + m.x852 == 0) m.c1000 = Constraint(expr= 7*m.b763 +", "m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0) m.c837 =", "+ m.x815 == 0) m.c963 = Constraint(expr= 6*m.b726 + m.x816", "m.b676 <= 0) m.c1094 = Constraint(expr= m.b677 - m.b678 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 =", "- 5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715", "m.c1402 = Constraint(expr= - m.b628 + m.b649 + m.b652 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 =", "- m.b639 <= 0) m.c1056 = Constraint(expr= m.b638 - m.b640", "m.c1444 = Constraint(expr= m.b625 - m.b643 >= 0) m.c1445 =", "= Constraint(expr= m.x184 - m.x544 - m.x547 == 0) m.c800", "== 0) m.c288 = Constraint(expr= m.x300 == 0) m.c289 =", "m.x388 - 9*m.b643 <= 0) m.c521 = Constraint(expr= m.x389 +", "m.c1096 = Constraint(expr= m.b678 - m.b679 <= 0) m.c1097 =", "m.c1106 = Constraint(expr= m.b687 + m.b688 <= 1) m.c1107 =", "Constraint(expr= - m.b609 + m.b621 + m.b624 + m.b627 >=", "- m.x280 - m.x283 == 0) m.c185 = Constraint(expr= m.x248", "0) m.c263 = Constraint(expr= m.x341 == 0) m.c264 = Constraint(expr=", "m.b631 >= 0) m.c1382 = Constraint(expr= - m.b614 + m.b632", "m.c375 = Constraint(expr= m.x378 == 0) m.c376 = Constraint(expr= m.x379", "- 3*m.b727 - 4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731", "= Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348) m.c256 = Constraint(expr=", "15*m.b669 <= 15) m.c787 = Constraint(expr= m.x541 + 15*m.b670 <=", "+ 30*m.x126 + 35*m.x127 + 25*m.x128 + 50*m.x129 + 10*m.x130", "m.b654 <= 0) m.c1071 = Constraint(expr= m.b653 - m.b655 <=", "m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517) m.c189 = Constraint(expr= m.x252 +", "= Constraint(expr= m.b761 + m.b763 <= 1) m.c1258 = Constraint(expr=", "== 0) m.c698 = Constraint(expr= m.x521 == 0) m.c699 =", "m.b744 <= 1) m.c1218 = Constraint(expr= m.b743 + m.b745 <=", "Constraint(expr= m.x50 - m.x290 - m.x293 == 0) m.c240 =", "m.c421 = Constraint(expr= m.x115 - m.x424 - m.x427 == 0)", "+ 50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127", "= Constraint(expr= m.x461 == 0) m.c591 = Constraint(expr= m.x462 ==", "- 1.18887736200171*m.b653 <= 0) m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654", "Constraint(expr= m.b764 + m.b766 <= 1) m.c1261 = Constraint(expr= m.b764", "4.45628648004517*m.b600 <= 0) m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <=", "m.b649 >= 0) m.c1451 = Constraint(expr= m.b626 - m.b650 >=", "m.x254 - m.x257 == 0) m.c180 = Constraint(expr= m.x36 -", "m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459", "Constraint(expr= m.b663 - m.b672 >= 0) m.c1474 = Constraint(expr= m.b664", "m.c243 = Constraint(expr= m.x66 - m.x327 - m.x333 == 0)", "2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744 -", "m.x124 - m.x442 - m.x445 == 0) m.c518 = Constraint(expr=", "0) m.c175 = Constraint(expr= m.x283 == 0) m.c176 = Constraint(expr=", "0) m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506) m.c744", "+ m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618) <= 0) m.c259 =", "<= 1) m.c1126 = Constraint(expr= m.b696 + m.b697 <= 1)", "0) m.c1039 = Constraint(expr= m.b621 - m.b622 <= 0) m.c1040", "Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b641 >= 0) m.c1443 = Constraint(expr= m.b624 - m.b642", "= Constraint(expr= m.b710 + m.b712 <= 1) m.c1153 = Constraint(expr=", "+ m.x856 == 0) m.c1004 = Constraint(expr= 4*m.b767 + m.x857", "Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 =", "Constraint(expr= m.x310 - 15*m.b628 <= 0) m.c356 = Constraint(expr= m.x311", "+ 40*m.b597 <= 40) m.c73 = Constraint(expr= m.x217 + 40*m.b598", "Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034) m.c582 = Constraint(expr= m.x456", "Constraint(expr= 3*m.b722 + m.x812 == 0) m.c960 = Constraint(expr= m.b723", "m.x65 - m.x326 - m.x332 == 0) m.c243 = Constraint(expr=", "1.26558121681553*m.b637 <= 1.26558121681553) m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <=", "m.b663 <= 0) m.c1080 = Constraint(expr= m.b662 - m.b664 <=", "0) m.c47 = Constraint(expr= m.x173 - m.x182 - m.x185 ==", "m.x248 + m.x278 == 0) m.c162 = Constraint(expr= - m.x249", "= Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353) m.c276 = Constraint(expr=", "m.c369 = Constraint(expr= - m.x375 + m.x417 == 0) m.c370", "Constraint(expr= m.b731 + m.b732 <= 1) m.c1196 = Constraint(expr= m.b732", "- m.b674 >= 0) m.c1476 = Constraint(expr= m.b663 - m.b675", "m.c23 = Constraint(expr= - m.x71 - m.x89 + m.x92 ==", "<= 2.54515263975353) m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800 =", "= Constraint(expr= m.x33 - m.x249 - m.x252 == 0) m.c178", "m.x262 - m.x265 == 0) m.c122 = Constraint(expr= m.x236 -", "- m.x240 == 0) m.c118 = Constraint(expr= m.x28 - m.x238", "m.x62 - m.x317 - m.x323 == 0) m.c381 = Constraint(expr=", "m.x100 == 0) m.c29 = Constraint(expr= m.x77 - m.x101 -", "m.x187 - m.x550 - m.x553 == 0) m.c827 = Constraint(expr=", "== 0) m.c594 = Constraint(expr= m.x105 - m.x405 - m.x408", "- m.x311 == 0) m.c348 = Constraint(expr= m.x60 - m.x309", "Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0)", "m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488", "m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0) m.c273 =", "- m.x188 - m.x191 - m.x194 == 0) m.c51 =", "0.999*m.b648)))*(0.001 + 0.999* m.b648) <= 0) m.c559 = Constraint(expr=(m.x454/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 =", "m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775", "m.x289 == 0) m.c215 = Constraint(expr= m.x62 - m.x314 -", "Constraint(expr= m.x451 + 9*m.b646 <= 9) m.c557 = Constraint(expr=(m.x452/(0.001 +", "- 15*m.b684 <= 0) m.c913 = Constraint(expr= m.x562 - 15*m.b685", "m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700", "0) m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0) m.c487", "0) m.c505 = Constraint(expr= - m.x388 + m.x442 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 =", "m.x228 == 0) m.c67 = Constraint(expr= m.x13 - m.x226 -", "m.c641 = Constraint(expr= m.x473 == 0) m.c642 = Constraint(expr= m.x474", "0) m.c1412 = Constraint(expr= m.b602 - m.b611 >= 0) m.c1413", "- m.b638 >= 0) m.c1440 = Constraint(expr= m.b618 - m.b639", "m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364", "m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001 +", "0) m.c851 = Constraint(expr= m.x176 - m.x527 - m.x533 ==", "m.b657 <= 0) m.c1074 = Constraint(expr= m.b656 - m.b658 <=", "+ m.b712 <= 1) m.c1155 = Constraint(expr= m.b710 + m.b712", "== 0) m.c349 = Constraint(expr= m.x61 - m.x310 - m.x313", "<= 40) m.c99 = Constraint(expr= m.x222 + 40*m.b600 <= 40)", "m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388", "0) m.c115 = Constraint(expr= m.x265 == 0) m.c116 = Constraint(expr=", "<= 15) m.c915 = Constraint(expr= m.x564 + 15*m.b684 <= 15)", "<= 0) m.c783 = Constraint(expr= m.x537 - 15*m.b669 <= 0)", "Constraint(expr= m.x215 == 0) m.c57 = Constraint(expr= m.x216 == 0)", "1) m.c1271 = Constraint(expr= m.b770 + m.b771 <= 1) m.c1272", "Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754", "- m.x449 == 0) m.c543 = Constraint(expr= m.x126 - m.x447", "- 8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693", "Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x278 - 15*m.b608 <= 0) m.c198 = Constraint(expr= m.x279", "+ 3.34221486003388*m.b616 <= 3.34221486003388) m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614", "- m.x276 == 0) m.c268 = Constraint(expr= m.x43 - m.x271", "+ m.b747 <= 1) m.c1224 = Constraint(expr= m.b746 + m.b748", "<= 1.26558121681553) m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553)", "0) m.c1041 = Constraint(expr= m.b623 - m.b625 <= 0) m.c1042", "= Constraint(expr= m.b596 + m.b599 == 1) m.c1374 = Constraint(expr=", "<= 4.45628648004517) m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0)", "+ m.b720 <= 1) m.c1170 = Constraint(expr= m.b719 + m.b721", "<= 40) m.c72 = Constraint(expr= m.x216 + 40*m.b597 <= 40)", "- m.b653 + m.b656 + m.b659 >= 0) m.c1458 =", "m.x68 - m.x80 - m.x83 == 0) m.c21 = Constraint(expr=", "Constraint(expr= m.x576 == 0) m.c823 = Constraint(expr= m.x577 == 0)", "40*m.x110 + 30*m.x111 + 15*m.x112 + 15*m.x113 + 20*m.x114 +", "Constraint(expr= m.b619 - m.b640 >= 0) m.c1442 = Constraint(expr= m.b623", "Var(within=Binary,bounds=(0,1),initialize=0) m.b646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0) m.c452", "+ m.b742 <= 1) m.c1216 = Constraint(expr= m.b741 + m.b742", "Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c1311 = Constraint(expr= - m.b623 + m.b624 -", "m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388) m.c134 =", "== 0) m.c117 = Constraint(expr= m.x27 - m.x237 - m.x240", "Constraint(expr= 2*m.b740 + m.x830 == 0) m.c978 = Constraint(expr= 8*m.b741", "4*m.b742 + m.x832 == 0) m.c980 = Constraint(expr= m.b743 +", "Constraint(expr= m.x505 == 0) m.c725 = Constraint(expr= m.x530 == 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 =", "= Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431) m.c78 = Constraint(expr=", "= Constraint(expr= m.x581 == 0) m.c849 = Constraint(expr= m.x582 ==", "m.x305 + 15*m.b623 <= 15) m.c330 = Constraint(expr= m.x306 +", "15) m.c330 = Constraint(expr= m.x306 + 15*m.b624 <= 15) m.c331", "30) m.c780 = Constraint(expr= m.x516 + 30*m.b669 <= 30) m.c781", "- m.x584 - m.x587 == 0) m.c882 = Constraint(expr= m.x207", "m.x590 - 9*m.b683 <= 0) m.c918 = Constraint(expr= m.x591 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 =", "Constraint(expr= m.x441 - 9*m.b642 <= 0) m.c526 = Constraint(expr= m.x442", "m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691", "Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 0) m.c1314 = Constraint(expr= - m.b626 + m.b627 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x202 - m.x574 - m.x577 == 0) m.c830 =", "3*m.b718 - 7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722 -", "== 0) m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0)", "m.c1459 = Constraint(expr= - m.b655 + m.b658 + m.b661 >=", "m.b719 + m.b720 <= 1) m.c1170 = Constraint(expr= m.b719 +", "1) m.c1209 = Constraint(expr= m.b737 + m.b739 <= 1) m.c1210", "+ 20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118", "m.c420 = Constraint(expr= m.x114 - m.x423 - m.x426 == 0)", "+ 15*m.b620 <= 15) m.c303 = Constraint(expr= m.x300 + 15*m.b621", "- 15*m.b680 <= 0) m.c885 = Constraint(expr= m.x555 - 15*m.b681", "Constraint(expr= m.x110 - m.x416 - m.x419 == 0) m.c387 =", "== 0) m.c625 = Constraint(expr= m.x136 - m.x466 - m.x469", "- m.b673 <= 0) m.c1091 = Constraint(expr= m.b674 - m.b675", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517) m.c154 = Constraint(expr=", "m.x112 - m.x418 - m.x421 == 0) m.c389 = Constraint(expr=", "== 0) m.c377 = Constraint(expr= m.x419 == 0) m.c378 =", "Constraint(expr= m.b604 - m.b616 >= 0) m.c1418 = Constraint(expr= m.b605", "m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144", "0) m.c1050 = Constraint(expr= m.b632 - m.b634 <= 0) m.c1051", "<= 1) m.c1112 = Constraint(expr= m.b690 + m.b691 <= 1)", "m.x51 == 0) m.c16 = Constraint(expr= m.x40 - m.x49 -", "<= 0) m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0)", "m.c980 = Constraint(expr= m.b743 + m.x833 == 0) m.c981 =", "m.c1299 = Constraint(expr= - m.b611 + m.b612 - m.b702 <=", "= Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506) m.c746 = Constraint(expr=", "0) m.c876 = Constraint(expr= m.x588 == 0) m.c877 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675 =", "m.c85 = Constraint(expr= m.x223 == 0) m.c86 = Constraint(expr= m.x233", "== 0) m.c266 = Constraint(expr= m.x41 - m.x269 - m.x275", "- m.b611 >= 0) m.c1413 = Constraint(expr= m.b603 - m.b612", "0) m.c1367 = Constraint(expr= m.b680 - m.b770 <= 0) m.c1368", "= Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0) m.c103 = Constraint(expr=", "= Constraint(expr= m.b732 + m.b733 <= 1) m.c1197 = Constraint(expr=", "m.x451 == 0) m.c539 = Constraint(expr= m.x98 - m.x392 -", "m.x412 - 3.04984759446376*m.b655 <= 0) m.c629 = Constraint(expr= m.x413 +", "2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 =", "m.b604 <= 0) m.c1021 = Constraint(expr= m.b603 - m.b604 <=", "= Constraint(expr= m.b606 - m.b607 <= 0) m.c1025 = Constraint(expr=", "m.b757 <= 0) m.c1355 = Constraint(expr= m.b668 - m.b758 <=", "0) m.c1434 = Constraint(expr= m.b615 - m.b633 >= 0) m.c1435", "m.c26 = Constraint(expr= m.x74 - m.x95 - m.x98 == 0)", "pyomo.environ import * model = m = ConcreteModel() m.x2 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x146 - m.x482 - m.x485 == 0) m.c651 = Constraint(expr=", "Constraint(expr= m.b753 + m.b754 <= 1) m.c1241 = Constraint(expr= m.b755", "0) m.c765 = Constraint(expr= m.x171 - m.x513 - m.x516 ==", "== 0) m.c480 = Constraint(expr= m.x93 - m.x381 - m.x384", "<= 0) m.c1352 = Constraint(expr= m.b665 - m.b755 <= 0)", "0) m.c1413 = Constraint(expr= m.b603 - m.b612 >= 0) m.c1414", "m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43", "Constraint(expr= m.x415 == 0) m.c617 = Constraint(expr= m.x467 == 0)", "- m.x567 - m.x570 == 0) m.c802 = Constraint(expr= m.x199", "0) m.c90 = Constraint(expr= m.x9 - m.x219 - m.x222 ==", "m.c324 = Constraint(expr= m.x75 - m.x351 - m.x354 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172 =", "= Constraint(expr= m.x297 - 15*m.b621 <= 0) m.c301 = Constraint(expr=", "m.b703 <= 1) m.c1138 = Constraint(expr= m.b702 + m.b703 <=", "+ m.b762 <= 1) m.c1254 = Constraint(expr= m.b761 + m.b763", "m.c56 = Constraint(expr= m.x215 == 0) m.c57 = Constraint(expr= m.x216", "m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0) m.c254 =", "= Constraint(expr= - m.b674 + m.b675 - m.b765 <= 0)", "+ m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999* m.b635) <= 0) m.c435", "+ m.x260 == 0) m.c108 = Constraint(expr= - 0.75*m.x237 +", "= Constraint(expr= m.x420 + 20*m.b630 <= 20) m.c406 = Constraint(expr=", "= Constraint(expr= m.x445 + 9*m.b643 <= 9) m.c530 = Constraint(expr=", "3.04984759446376) m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416 == 0)", "m.b719 + m.b720 <= 1) m.c1172 = Constraint(expr= m.b720 +", "m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 +", "m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352", "- m.b615 + m.b616 - m.b706 <= 0) m.c1304 =", "<= 1) m.c1222 = Constraint(expr= m.b744 + m.b745 <= 1)", "m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747", "- m.x507 + m.x537 == 0) m.c748 = Constraint(expr= -", "= Constraint(expr= m.x308 - 15*m.b626 <= 0) m.c354 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 9) m.c551 = Constraint(expr= m.x446 - 9*m.b644 <= 0)", "= Constraint(expr= m.x450 + 9*m.b645 <= 9) m.c556 = Constraint(expr=", "0) m.c847 = Constraint(expr= m.x535 == 0) m.c848 = Constraint(expr=", "m.x386 - m.x389 == 0) m.c513 = Constraint(expr= m.x96 -", "m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234", "<= 1) m.c1220 = Constraint(expr= m.b744 + m.b745 <= 1)", "m.c1148 = Constraint(expr= m.b708 + m.b709 <= 1) m.c1149 =", "Constraint(expr= - m.x71 - m.x89 + m.x92 == 0) m.c24", "<= 0) m.c1017 = Constraint(expr= m.b599 - m.b601 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 =", "Constraint(expr= m.x367 == 0) m.c440 = Constraint(expr= m.x431 == 0)", "<= 0) m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <= 0)", "7*m.b709 + m.x799 == 0) m.c947 = Constraint(expr= 2*m.b710 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644 =", "0) m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001", "m.x41 - m.x266 - m.x272 == 0) m.c147 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 =", "+ m.x416 == 0) m.c369 = Constraint(expr= - m.x375 +", "m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0) m.c660 =", "m.c62 = Constraint(expr= m.x5 - m.x212 - m.x215 == 0)", "m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171) m.c684 =", "4.45628648004517*m.b604 <= 0) m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171) m.c686 = Constraint(expr=", "Constraint(expr= m.x377 + 20*m.b629 <= 20) m.c399 = Constraint(expr= m.x378", "= Constraint(expr= m.x154 - m.x157 - m.x160 == 0) m.c44", "0) m.c824 = Constraint(expr= m.x185 - m.x548 - m.x551 ==", "0) m.c233 = Constraint(expr= m.x293 == 0) m.c234 = Constraint(expr=", "0) m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278 == 0)", "m.c1088 = Constraint(expr= m.b671 - m.b672 <= 0) m.c1089 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603 =", "0) m.c1006 = Constraint(expr= 6*m.b769 + m.x859 == 0) m.c1007", "= Constraint(expr= m.x70 - m.x82 - m.x85 == 0) m.c23", "m.c303 = Constraint(expr= m.x300 + 15*m.b621 <= 15) m.c304 =", "m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392", "m.b757 <= 1) m.c1243 = Constraint(expr= m.b755 + m.b756 <=", "== 0) m.c506 = Constraint(expr= m.x389 == 0) m.c507 =", ">= 0) m.c1477 = Constraint(expr= m.b664 - m.b676 >= 0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836 = Var(within=Reals,bounds=(None,None),initialize=0) m.x837 =", "m.x480 == 0) m.c670 = Constraint(expr= m.x481 == 0) m.c671", "= Constraint(expr= 6*m.b688 + m.x778 == 0) m.c926 = Constraint(expr=", "= Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001", "m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795", "m.x518 == 0) m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519", "m.b609 <= 0) m.c1026 = Constraint(expr= m.b608 - m.b610 <=", "0) m.c568 = Constraint(expr= m.x103 - m.x400 - m.x403 ==", ">= 0) m.c1385 = Constraint(expr= - m.b605 + m.b617 >=", "m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146", "Constraint(expr= m.x517 == 0) m.c758 = Constraint(expr= m.x539 == 0)", "m.x529 - m.x535 == 0) m.c854 = Constraint(expr= m.x203 -", "+ 4.45628648004517*m.b600 <= 4.45628648004517) m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601", "0) m.c1425 = Constraint(expr= m.b609 - m.b624 >= 0) m.c1426", "m.x263 == 0) m.c120 = Constraint(expr= m.x39 - m.x261 -", "m.c382 = Constraint(expr= m.x64 - m.x319 - m.x325 == 0)", "m.c18 = Constraint(expr= m.x45 - m.x54 - m.x57 - m.x60", "== 0) m.c728 = Constraint(expr= m.x164 - m.x500 - m.x503", "m.b601) <= 0) m.c83 = Constraint(expr= m.x221 == 0) m.c84", "15) m.c786 = Constraint(expr= m.x540 + 15*m.b669 <= 15) m.c787", "- 3.71357206670431*m.b597 <= 0) m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598", "== 0) m.c1004 = Constraint(expr= 4*m.b767 + m.x857 == 0)", "0) m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0) m.c602", "m.x482 - 0.78338879230327*m.b656 <= 0) m.c660 = Constraint(expr= m.x483 -", "Constraint(expr= m.b702 + m.b703 <= 1) m.c1139 = Constraint(expr= m.b704", "+ 3.04984759446376*m.b647 <= 3.04984759446376) m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 =", "Constraint(expr= - m.b604 + m.b613 + m.b616 >= 0) m.c1379", "= Constraint(expr= m.x144 - m.x477 - m.x480 == 0) m.c676", "Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0)", "15*m.b669 <= 0) m.c784 = Constraint(expr= m.x538 - 15*m.b670 <=", "m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495", "0) m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0) m.c806", "m.b740 + m.b742 <= 1) m.c1216 = Constraint(expr= m.b741 +", "m.x464 - m.x467 == 0) m.c624 = Constraint(expr= m.x135 -", "- m.x506 + m.x536 == 0) m.c747 = Constraint(expr= -", "- m.b651 <= 0) m.c1068 = Constraint(expr= m.b650 - m.b652", "= Constraint(expr= m.x149 - m.x488 - m.x491 == 0) m.c678", "Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b728 + m.b729 <= 1) m.c1188 = Constraint(expr= m.b728 +", "m.b599 - m.b602 >= 0) m.c1404 = Constraint(expr= m.b597 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 =", "= Constraint(expr= m.x409 == 0) m.c590 = Constraint(expr= m.x461 ==", "m.c49 = Constraint(expr= m.x175 - m.x184 - m.x187 == 0)", "Constraint(expr= 2*m.b710 + m.x800 == 0) m.c948 = Constraint(expr= 5*m.b711", "== 0) m.c984 = Constraint(expr= 5*m.b747 + m.x837 == 0)", "+ m.b706 <= 1) m.c1143 = Constraint(expr= m.b704 + m.b706", "= Constraint(expr= m.b752 + m.b754 <= 1) m.c1237 = Constraint(expr=", "m.b750 <= 1) m.c1232 = Constraint(expr= m.b750 + m.b751 <=", "m.c1095 = Constraint(expr= m.b677 - m.b679 <= 0) m.c1096 =", "0) m.c1402 = Constraint(expr= - m.b628 + m.b649 + m.b652", "Constraint(expr= m.b764 + m.b765 <= 1) m.c1260 = Constraint(expr= m.b764", "m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130", "- m.x269 - m.x275 == 0) m.c267 = Constraint(expr= m.x42", "m.b639 >= 0) m.c1390 = Constraint(expr= - m.b619 + m.b637", "- m.b666 + m.b667 - m.b757 <= 0) m.c1355 =", "m.b695 + m.b697 <= 1) m.c1126 = Constraint(expr= m.b696 +", "m.x207 - m.x585 - m.x588 == 0) m.c883 = Constraint(expr=", "+ m.b727 <= 1) m.c1183 = Constraint(expr= m.b725 + m.b726", "= Constraint(expr= m.b734 + m.b735 <= 1) m.c1202 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c466 = Constraint(expr=(m.x436/(0.001 +", "- m.b696 <= 0) m.c1294 = Constraint(expr= - m.b605 -", "15) m.c201 = Constraint(expr= m.x282 + 15*m.b609 <= 15) m.c202", "= Constraint(expr= - m.b665 + m.b666 - m.b756 <= 0)", "= Constraint(expr= m.x165 - m.x501 - m.x504 == 0) m.c730", "m.c852 = Constraint(expr= m.x177 - m.x528 - m.x534 == 0)", "m.b653) <= 0) m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1", "Constraint(expr= 8*m.b768 + m.x858 == 0) m.c1006 = Constraint(expr= 6*m.b769", "0) m.c956 = Constraint(expr= 7*m.b719 + m.x809 == 0) m.c957", "= Constraint(expr= m.b716 + m.b717 <= 1) m.c1166 = Constraint(expr=", "+ m.x416 == 0) m.c366 = Constraint(expr= - 0.9*m.x318 +", "m.b747 + m.b748 <= 1) m.c1227 = Constraint(expr= m.b746 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704 =", "Constraint(expr= m.b597 + m.b600 - m.b603 >= 0) m.c1405 =", "<= 1.26558121681553) m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0)", "Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 +", "Constraint(expr= m.x131 - m.x458 - m.x461 == 0) m.c597 =", "0) m.c995 = Constraint(expr= 10*m.b758 + m.x848 == 0) m.c996", "Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x56 - m.x302 - m.x305 == 0) m.c321", "m.b681 >= 0) m.c1483 = Constraint(expr= m.b670 - m.b682 >=", "= Constraint(expr= m.x93 - m.x381 - m.x384 == 0) m.c481", "3.34221486003388) m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388) m.c250", "= Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0) m.c498 = Constraint(expr=", "1.83548069293539*m.b629 <= 1.83548069293539) m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 =", "= Constraint(expr= m.x177 - m.x528 - m.x534 == 0) m.c853", "0) m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001", "Constraint(expr= m.x552 == 0) m.c820 = Constraint(expr= m.x553 == 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x367 == 0) m.c440 = Constraint(expr= m.x431 ==", "Constraint(expr= m.b605 - m.b607 <= 0) m.c1024 = Constraint(expr= m.b606", "+ 0.940066550763924*m.b660 <= 0.940066550763924) m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661", "m.c938 = Constraint(expr= 7*m.b701 + m.x791 == 0) m.c939 =", "== 0) m.c675 = Constraint(expr= m.x144 - m.x477 - m.x480", "= Constraint(expr= m.b655 - m.b658 >= 0) m.c1469 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703 =", "m.b715 <= 1) m.c1163 = Constraint(expr= m.b716 + m.b717 <=", "= Constraint(expr= m.x558 + 15*m.b681 <= 15) m.c889 = Constraint(expr=", "- m.b724 <= 0) m.c1322 = Constraint(expr= m.b635 - m.b725", "Constraint(expr= m.x391 == 0) m.c509 = Constraint(expr= m.x443 == 0)", "Constraint(expr= m.x539 + 15*m.b668 <= 15) m.c786 = Constraint(expr= m.x540", "4*m.b767 - 8*m.b768 - 6*m.b769 - 2*m.b770 - m.b771 -", "+ m.b687 <= 1) m.c1106 = Constraint(expr= m.b687 + m.b688", "- 0.480234946352917*m.b675 <= 0) m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676", "4.45628648004517) m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517) m.c128", "0) m.c291 = Constraint(expr= m.x348 == 0) m.c292 = Constraint(expr=", "m.x413 == 0) m.c621 = Constraint(expr= m.x108 - m.x411 -", "== 0) m.c208 = Constraint(expr= m.x289 == 0) m.c209 =", "== 0) m.c173 = Constraint(expr= m.x281 == 0) m.c174 =", "0) m.c467 = Constraint(expr= m.x371 == 0) m.c468 = Constraint(expr=", "+ m.b760 <= 1) m.c1251 = Constraint(expr= m.b758 + m.b760", "m.c191 = Constraint(expr= m.x254 - 30*m.b608 <= 0) m.c192 =", "m.c528 = Constraint(expr= m.x444 + 9*m.b642 <= 9) m.c529 =", "= Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672) m.c868 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= 7*m.b763 + m.x853 == 0) m.c1001 = Constraint(expr=", "+ 0.705049913072943*m.b662 <= 0.705049913072943) m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663", "m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235", "1.83548069293539) m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539) m.c229", "<= 9) m.c549 = Constraint(expr= m.x396 + 9*m.b645 <= 9)", "- 9*m.b642 <= 0) m.c526 = Constraint(expr= m.x442 - 9*m.b643", "m.c1013 = Constraint(expr= m.b596 - m.b597 <= 0) m.c1014 =", "m.c1403 = Constraint(expr= m.b596 + m.b599 - m.b602 >= 0)", "m.c400 = Constraint(expr= m.x379 + 20*m.b631 <= 20) m.c401 =", "m.b676 <= 0) m.c1093 = Constraint(expr= m.b675 - m.b676 <=", "m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285", "Constraint(expr= m.x252 == 0) m.c169 = Constraint(expr= m.x253 == 0)", "- 1.18887736200171*m.b658 <= 0) m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656", "m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525", "m.b693 <= 1) m.c1116 = Constraint(expr= m.b692 + m.b694 <=", "m.x98 - m.x392 - m.x395 == 0) m.c540 = Constraint(expr=", "m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219", "1) m.c1184 = Constraint(expr= m.b726 + m.b727 <= 1) m.c1185", "Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.c239 = Constraint(expr= m.x50 - m.x290 - m.x293 == 0)", "Constraint(expr= m.x442 - 9*m.b643 <= 0) m.c527 = Constraint(expr= m.x443", "0.666992981045719*m.b672 <= 0.666992981045719) m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <=", "m.c1284 = Constraint(expr= - m.b596 + m.b597 - m.b687 <=", "m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0) m.c392 =", "<= 0) m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0)", "+ 1.83548069293539*m.b631 <= 1.83548069293539) m.c395 = Constraint(expr= m.x374 - 20*m.b629", "6*m.b726 - 3*m.b727 - 4*m.b728 - 8*m.b729 - m.b730 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c678 = Constraint(expr= m.x150 - m.x489 - m.x492 == 0)", "Constraint(expr= 4*m.b735 + m.x825 == 0) m.c973 = Constraint(expr= 3*m.b736", "Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596) <= 0) m.c54 =", "0) m.c946 = Constraint(expr= 7*m.b709 + m.x799 == 0) m.c947", "m.c1060 = Constraint(expr= m.b642 - m.b643 <= 0) m.c1061 =", "= Constraint(expr= m.x70 - m.x340 - m.x343 == 0) m.c272", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 =", "+ 3.34221486003388*m.b602 <= 3.34221486003388) m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603", "m.b713 + m.b714 <= 1) m.c1160 = Constraint(expr= m.b714 +", "m.c1050 = Constraint(expr= m.b632 - m.b634 <= 0) m.c1051 =", "m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353) m.c160 =", "- 1.26558121681553*m.b619 <= 0) m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617", "m.x91 + m.x94 == 0) m.c26 = Constraint(expr= m.x74 -", "- m.b609 >= 0) m.c1411 = Constraint(expr= m.b598 + m.b601", "+ m.b715 <= 1) m.c1159 = Constraint(expr= m.b713 + m.b714", "m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251", "+ m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999* m.b649) <= 0) m.c560", "0) m.c562 = Constraint(expr= m.x403 == 0) m.c563 = Constraint(expr=", "+ m.x537 == 0) m.c751 = Constraint(expr= - 0.5*m.x514 +", "m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181", "0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001", "- m.b629 - m.b630 + m.b631 - m.b721 <= 0)", "+ 15*m.b623 <= 15) m.c330 = Constraint(expr= m.x306 + 15*m.b624", "Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 +", "- m.x578 - m.x581 == 0) m.c855 = Constraint(expr= m.x204", "0) m.c1369 = Constraint(expr= - m.b680 - m.b681 + m.b682", "Constraint(expr= m.b610 - m.b628 >= 0) m.c1430 = Constraint(expr= m.b611", "<= 0) m.c1363 = Constraint(expr= - m.b674 - m.b675 +", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803 =", "m.x851 == 0) m.c999 = Constraint(expr= 8*m.b762 + m.x852 ==", "0) m.c1411 = Constraint(expr= m.b598 + m.b601 - m.b610 >=", "m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297", "= Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171) m.c684 = Constraint(expr=", "0.705049913072943*m.b674 <= 0) m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <=", "m.x280 - 15*m.b610 <= 0) m.c200 = Constraint(expr= m.x281 +", "m.c1178 = Constraint(expr= m.b723 + m.b724 <= 1) m.c1179 =", "m.x532 == 0) m.c728 = Constraint(expr= m.x164 - m.x500 -", "+ m.b739 <= 1) m.c1207 = Constraint(expr= m.b737 + m.b738", "m.b643 - m.b733 <= 0) m.c1331 = Constraint(expr= m.b644 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b692 = Var(within=Binary,bounds=(0,1),initialize=0) m.b693 = Var(within=Binary,bounds=(0,1),initialize=0) m.b694 =", "0) m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0) m.c771", "= Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506) m.c862 = Constraint(expr=", "0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657) <=", "m.x364 - 1.26558121681553*m.b637 <= 0) m.c452 = Constraint(expr= m.x365 +", "+ m.b640 >= 0) m.c1391 = Constraint(expr= - m.b608 +", "= Constraint(expr= - m.b620 + m.b638 >= 0) m.c1395 =", "Constraint(expr= - m.b599 + m.b600 - m.b690 <= 0) m.c1288", "Constraint(expr= - m.x146 - m.x149 + m.x152 == 0) m.c39", "Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917) m.c841 = Constraint(expr= m.x577", "= Constraint(expr= m.b653 - m.b656 >= 0) m.c1467 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0)", "0) m.c439 = Constraint(expr= m.x367 == 0) m.c440 = Constraint(expr=", "Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0) m.c225 = Constraint(expr= m.x315", "10*m.b758 + m.x848 == 0) m.c996 = Constraint(expr= 6*m.b759 +", "0) m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0) m.c773", "5*m.b686 - 4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c325 = Constraint(expr= m.x76 - m.x352 - m.x355 == 0)", "0) m.c1091 = Constraint(expr= m.b674 - m.b675 <= 0) m.c1092", "m.b679 >= 0) m.c1481 = Constraint(expr= m.b668 - m.b680 >=", "Constraint(expr= m.b632 - m.b633 <= 0) m.c1050 = Constraint(expr= m.b632", "= Constraint(expr= m.x536 - 15*m.b668 <= 0) m.c783 = Constraint(expr=", "- 10*m.x86 - 5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90", "m.x580 - m.x583 == 0) m.c857 = Constraint(expr= m.x527 -", "= Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0) m.c771 = Constraint(expr=", "<= 9) m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 +", "Constraint(expr= m.b707 + m.b709 <= 1) m.c1150 = Constraint(expr= m.b708", "- m.b770 <= 0) m.c1368 = Constraint(expr= - m.b680 +", "= Constraint(expr= m.x325 == 0) m.c374 = Constraint(expr= m.x377 ==", "= Constraint(expr= m.b659 - m.b660 <= 0) m.c1077 = Constraint(expr=", "== 0) m.c210 = Constraint(expr= m.x321 == 0) m.c211 =", "- m.b749 <= 0) m.c1347 = Constraint(expr= - m.b659 +", "<= 0) m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0)", "+ 10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37", ">= 0) m.c1414 = Constraint(expr= m.b604 - m.b613 >= 0)", "m.c1073 = Constraint(expr= m.b656 - m.b657 <= 0) m.c1074 =", "0) m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0) m.c662", "m.b608 - m.b610 <= 0) m.c1027 = Constraint(expr= m.b609 -", "- m.x13 - m.x16 + m.x19 == 0) m.c8 =", "0) m.c990 = Constraint(expr= 8*m.b753 + m.x843 == 0) m.c991", "2.54515263975353*m.b605 <= 0) m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <=", "Constraint(expr= m.x583 == 0) m.c851 = Constraint(expr= m.x176 - m.x527", "= Constraint(expr= m.x6 - m.x213 - m.x216 == 0) m.c64", "0) m.c120 = Constraint(expr= m.x39 - m.x261 - m.x264 ==", "m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348) m.c427 =", "m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <= 0) m.c891 =", "= Constraint(expr= m.b668 - m.b683 >= 0) m.c1485 = Constraint(expr=", "3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize) m.c2 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 3.04984759446376*m.b626 <= 0) m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627", "Constraint(expr= m.b725 + m.b727 <= 1) m.c1183 = Constraint(expr= m.b725", "m.x455 == 0) m.c570 = Constraint(expr= m.x129 - m.x453 -", "m.c1484 = Constraint(expr= m.b668 - m.b683 >= 0) m.c1485 =", "= Constraint(expr= m.x202 - m.x574 - m.x577 == 0) m.c830", "Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)", "4.45628648004517*m.b605 <= 4.45628648004517) m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <=", "m.x417 - 20*m.b630 <= 0) m.c403 = Constraint(expr= m.x418 -", "m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494", "= Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0) m.c608 = Constraint(expr=", "m.c411 = Constraint(expr= m.x336 == 0) m.c412 = Constraint(expr= m.x337", "+ m.b621 - m.b711 <= 0) m.c1309 = Constraint(expr= -", "Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0) m.c123 = Constraint(expr= m.x237", "1) m.c1212 = Constraint(expr= m.b740 + m.b742 <= 1) m.c1213", "<= 0) m.c328 = Constraint(expr= m.x304 - 15*m.b625 <= 0)", "m.c597 = Constraint(expr= m.x132 - m.x459 - m.x462 == 0)", "+ m.b617 >= 0) m.c1386 = Constraint(expr= - m.b606 +", "+ 20*m.b630 <= 20) m.c400 = Constraint(expr= m.x379 + 20*m.b631", "m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454", "Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c765 = Constraint(expr= m.x171 - m.x513 - m.x516", "== 0) m.c571 = Constraint(expr= m.x130 - m.x454 - m.x457", "- m.b619 + m.b637 + m.b640 >= 0) m.c1391 =", "== 0) m.c645 = Constraint(expr= m.x486 == 0) m.c646 =", "1.18887736200171*m.b658 <= 1.18887736200171) m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <=", "m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132", "== 0) m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0)", "m.b699 <= 1) m.c1128 = Constraint(expr= m.b698 + m.b700 <=", "0.999* m.b651) <= 0) m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) -", "0) m.c615 = Constraint(expr= m.x414 == 0) m.c616 = Constraint(expr=", "Constraint(expr= m.b752 + m.b754 <= 1) m.c1240 = Constraint(expr= m.b753", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 =", "m.x374 - m.x377 == 0) m.c384 = Constraint(expr= m.x87 -", "0) m.c299 = Constraint(expr= m.x296 - 15*m.b620 <= 0) m.c300", "<= 0.705049913072943) m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0)", "0) m.c116 = Constraint(expr= m.x26 - m.x236 - m.x239 ==", "m.b684 - m.b685 <= 0) m.c1103 = Constraint(expr= m.b686 +", "3.04984759446376) m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376) m.c632", "m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634) <= 0) m.c410 = Constraint(expr=", "4*m.b744 + m.x834 == 0) m.c982 = Constraint(expr= m.b745 +", "- m.x388 - m.x391 == 0) m.c515 = Constraint(expr= m.x122", "= Constraint(expr= m.b611 - m.b701 <= 0) m.c1299 = Constraint(expr=", "m.b727 <= 0) m.c1325 = Constraint(expr= m.b638 - m.b728 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 =", "<= 0) m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 +", "m.x70 - m.x82 - m.x85 == 0) m.c23 = Constraint(expr=", "log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656) <= 0) m.c639", "0.994083415506506*m.b679 <= 0.994083415506506) m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <=", "+ m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999* m.b675) <= 0) m.c817", "m.c1421 = Constraint(expr= m.b608 - m.b620 >= 0) m.c1422 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0) m.c741 = Constraint(expr= m.x525", "= Constraint(expr= m.b731 + m.b732 <= 1) m.c1194 = Constraint(expr=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817 =", "- m.x376 - m.x379 == 0) m.c386 = Constraint(expr= m.x110", "Constraint(expr= m.x481 == 0) m.c671 = Constraint(expr= m.x491 == 0)", "- m.b660 <= 0) m.c1077 = Constraint(expr= m.b659 - m.b661", "m.x311 == 0) m.c342 = Constraint(expr= m.x312 == 0) m.c343", "= Constraint(expr= m.b602 - m.b614 >= 0) m.c1416 = Constraint(expr=", "= Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001", "1) m.c1273 = Constraint(expr= m.b770 + m.b771 <= 1) m.c1274", "m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327) m.c663 = Constraint(expr= m.x486 +", "Constraint(expr= m.x105 - m.x405 - m.x408 == 0) m.c595 =", "0) m.c137 = Constraint(expr= m.x245 == 0) m.c138 = Constraint(expr=", "= Constraint(expr= - m.x376 + m.x418 == 0) m.c371 =", "<= 1) m.c1115 = Constraint(expr= m.b692 + m.b693 <= 1)", "<= 1) m.c1239 = Constraint(expr= m.b752 + m.b754 <= 1)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x283 == 0) m.c176 = Constraint(expr= m.x32 - m.x248", "m.x438 == 0) m.c484 = Constraint(expr= m.x121 - m.x436 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= - 0.75*m.x237 + m.x261 == 0) m.c109 =", "- 30*m.b670 <= 0) m.c779 = Constraint(expr= m.x515 + 30*m.b668", "Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5) m.c309 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.b611 - m.b613 <= 0) m.c1030 = Constraint(expr=", "== 0) m.c207 = Constraint(expr= m.x288 == 0) m.c208 =", "m.b632 - m.b633 + m.b634 - m.b724 <= 0) m.c1322", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 =", "m.x302 - 15*m.b623 <= 0) m.c327 = Constraint(expr= m.x303 -", "- m.b765 <= 0) m.c1363 = Constraint(expr= - m.b674 -", "m.b720 <= 0) m.c1318 = Constraint(expr= - m.b629 - m.b630", "m.c386 = Constraint(expr= m.x110 - m.x416 - m.x419 == 0)", "m.c701 = Constraint(expr= m.x161 - m.x494 - m.x497 == 0)", "0) m.c1295 = Constraint(expr= m.b608 - m.b698 <= 0) m.c1296", "+ 40*m.b596 <= 40) m.c72 = Constraint(expr= m.x216 + 40*m.b597", "Constraint(expr= m.b596 + m.b599 - m.b608 >= 0) m.c1410 =", "0) m.c1466 = Constraint(expr= m.b653 - m.b656 >= 0) m.c1467", "- m.b658 >= 0) m.c1469 = Constraint(expr= m.b653 - m.b659", "Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171) m.c686 = Constraint(expr= m.x488", "m.x835 == 0) m.c983 = Constraint(expr= 2*m.b746 + m.x836 ==", "m.b659 + m.b660 - m.b750 <= 0) m.c1348 = Constraint(expr=", "15*m.b622 <= 15) m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <=", "m.x82 - m.x85 == 0) m.c23 = Constraint(expr= - m.x71", "m.c387 = Constraint(expr= m.x111 - m.x417 - m.x420 == 0)", "m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0) m.c500 =", "Constraint(expr= m.x126 - m.x447 - m.x450 == 0) m.c544 =", "m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0) m.c839 =", "<= 1) m.c1132 = Constraint(expr= m.b699 + m.b700 <= 1)", "+ m.b775 <= 1) m.c1282 = Constraint(expr= m.b774 + m.b775", "<= 0.842233385663186) m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186)", "m.b648 >= 0) m.c1450 = Constraint(expr= m.b628 - m.b649 >=", "m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123", "m.x59 == 0) m.c18 = Constraint(expr= m.x45 - m.x54 -", "Constraint(expr= - m.b667 + m.b679 >= 0) m.c1466 = Constraint(expr=", "m.b678 - m.b679 <= 0) m.c1097 = Constraint(expr= m.b680 -", "m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346", "m.x245 == 0) m.c144 = Constraint(expr= m.x30 - m.x243 -", "Constraint(expr= m.x57 - m.x303 - m.x306 == 0) m.c322 =", "Constraint(expr= m.x335 == 0) m.c411 = Constraint(expr= m.x336 == 0)", "m.b606 - m.b696 <= 0) m.c1294 = Constraint(expr= - m.b605", "- 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <=", "0) m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0) m.c488", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 =", "0) m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0) m.c839", "<= 0) m.c1021 = Constraint(expr= m.b603 - m.b604 <= 0)", "m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500", "m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686) m.c459 = Constraint(expr= m.x432 +", "- m.b608 >= 0) m.c1410 = Constraint(expr= m.b597 + m.b600", "3*m.b760 - 4*m.b761 - 8*m.b762 - 7*m.b763 - 7*m.b764 -", "m.b731 + m.b733 <= 1) m.c1195 = Constraint(expr= m.b731 +", "m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186) m.c434 =", "m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322", "m.x173 - m.x518 - m.x521 == 0) m.c705 = Constraint(expr=", "== 0) m.c293 = Constraint(expr= m.x53 - m.x296 - m.x299", "1) m.c1282 = Constraint(expr= m.b774 + m.b775 <= 1) m.c1283", "- m.b612 + m.b613 - m.b703 <= 0) m.c1301 =", "m.b736 <= 1) m.c1201 = Constraint(expr= m.b734 + m.b735 <=", "m.x57 - m.x60 == 0) m.c19 = Constraint(expr= m.x46 -", "0) m.c1373 = Constraint(expr= m.b596 + m.b599 == 1) m.c1374", "m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <= 0) m.c308 =", "Constraint(expr= - m.b623 + m.b624 - m.b714 <= 0) m.c1312", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 =", "Constraint(expr= 7*m.b690 + m.x780 == 0) m.c928 = Constraint(expr= 6*m.b691", "Constraint(expr= m.b689 + m.b691 <= 1) m.c1111 = Constraint(expr= m.b689", "+ m.b724 <= 1) m.c1181 = Constraint(expr= m.b725 + m.b726", "0) m.c938 = Constraint(expr= 7*m.b701 + m.x791 == 0) m.c939", "m.b694 <= 1) m.c1119 = Constraint(expr= m.b692 + m.b694 <=", "1) m.c1249 = Constraint(expr= m.b758 + m.b759 <= 1) m.c1250", "- m.x331 - m.x337 == 0) m.c419 = Constraint(expr= m.x113", "0) m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0) m.c430", "0) m.c642 = Constraint(expr= m.x474 == 0) m.c643 = Constraint(expr=", "Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 +", "m.x813 == 0) m.c961 = Constraint(expr= 9*m.b724 + m.x814 ==", "m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330", "= Constraint(expr= 6*m.b759 + m.x849 == 0) m.c997 = Constraint(expr=", "- m.b623 - m.b624 + m.b625 - m.b715 <= 0)", "m.x837 == 0) m.c985 = Constraint(expr= 2*m.b748 + m.x838 ==", "m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833", "<= 1) m.c1196 = Constraint(expr= m.b732 + m.b733 <= 1)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0)", "+ m.b724 <= 1) m.c1180 = Constraint(expr= m.b723 + m.b724", "== 0) m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0)", ">= 0) m.c1454 = Constraint(expr= m.b626 - m.b653 >= 0)", "0) m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0) m.c804", "0.999* m.b665) <= 0) m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0)", "0.999* m.b647) <= 0) m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) -", "Constraint(expr= m.b680 - m.b682 <= 0) m.c1099 = Constraint(expr= m.b681", "= Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001", "m.x565 + 15*m.b685 <= 15) m.c917 = Constraint(expr= m.x590 -", "m.c1010 = Constraint(expr= 8*m.b773 + m.x863 == 0) m.c1011 =", "Constraint(expr= m.b636 - m.b637 <= 0) m.c1055 = Constraint(expr= m.b638", "- 15*m.b621 <= 0) m.c301 = Constraint(expr= m.x298 - 15*m.b622", "0) m.c648 = Constraint(expr= m.x141 - m.x471 - m.x474 ==", "== 0) m.c622 = Constraint(expr= m.x109 - m.x412 - m.x415", "m.x473 == 0) m.c648 = Constraint(expr= m.x141 - m.x471 -", "+ 20*m.x120 + 20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124", "+ 1.18887736200171*m.b660 <= 1.18887736200171) m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661", "m.x82 - m.x364 - m.x367 == 0) m.c446 = Constraint(expr=", "m.b640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643", "0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999* m.b667)", "m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943) m.c807 =", "m.b615 - m.b616 <= 0) m.c1034 = Constraint(expr= m.b617 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c966 = Constraint(expr= 8*m.b729 + m.x819 == 0) m.c967", "Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 +", "= Constraint(expr= m.b755 + m.b756 <= 1) m.c1242 = Constraint(expr=", "0.75*m.x237 + m.x261 == 0) m.c109 = Constraint(expr= - 0.75*m.x238", "variable and 1 equation from pyomo.environ import * model =", "<= 1) m.c1263 = Constraint(expr= m.b764 + m.b766 <= 1)", "m.c856 = Constraint(expr= m.x205 - m.x580 - m.x583 == 0)", "- m.b606 + m.b618 >= 0) m.c1387 = Constraint(expr= -", "Constraint(expr= m.x575 == 0) m.c822 = Constraint(expr= m.x576 == 0)", "m.x445 + 9*m.b643 <= 9) m.c530 = Constraint(expr= - m.x392", "+ m.x854 == 0) m.c1002 = Constraint(expr= 3*m.b765 + m.x855", "= Constraint(expr= m.x353 + 9*m.b623 <= 9) m.c336 = Constraint(expr=", "- m.x47 - m.x50 == 0) m.c15 = Constraint(expr= m.x39", "= Constraint(expr= m.x395 == 0) m.c534 = Constraint(expr= m.x396 ==", "0.999* m.b676) <= 0) m.c818 = Constraint(expr= m.x551 == 0)", "m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b628 = Var(within=Binary,bounds=(0,1),initialize=0)", "20*m.b629 <= 0) m.c402 = Constraint(expr= m.x417 - 20*m.b630 <=", "0) m.c1030 = Constraint(expr= m.b612 - m.b613 <= 0) m.c1031", "Constraint(expr= m.b689 + m.b690 <= 1) m.c1112 = Constraint(expr= m.b690", "m.c51 = Constraint(expr= m.x180 - m.x189 - m.x192 - m.x195", "= Constraint(expr= m.b632 - m.b633 <= 0) m.c1050 = Constraint(expr=", "m.c1120 = Constraint(expr= m.b693 + m.b694 <= 1) m.c1121 =", "m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288", "Constraint(expr= m.x443 + 9*m.b641 <= 9) m.c528 = Constraint(expr= m.x444", ">= 0) m.c1405 = Constraint(expr= m.b598 + m.b601 - m.b604", "Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= 5*m.b752 + m.x842 == 0) m.c990 = Constraint(expr=", "m.b678 >= 0) m.c1465 = Constraint(expr= - m.b667 + m.b679", "m.c1300 = Constraint(expr= - m.b611 - m.b612 + m.b613 -", "m.b598 + m.b601 == 1) m.c1376 = Constraint(expr= - m.b602", "= Constraint(expr= m.x42 - m.x270 - m.x276 == 0) m.c268", "m.x446 == 0) m.c531 = Constraint(expr= - m.x393 + m.x447", "Constraint(expr= m.x148 - m.x484 - m.x487 == 0) m.c653 =", "m.x342 == 0) m.c271 = Constraint(expr= m.x70 - m.x340 -", "m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357", "m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917) m.c841 = Constraint(expr= m.x577 +", "m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54", "m.c671 = Constraint(expr= m.x491 == 0) m.c672 = Constraint(expr= m.x492", "m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520", "- m.x213 - m.x216 == 0) m.c64 = Constraint(expr= m.x7", "Constraint(expr= m.x133 - m.x460 - m.x463 == 0) m.c599 =", "m.c1231 = Constraint(expr= m.b749 + m.b750 <= 1) m.c1232 =", "- 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999* m.b647) <=", "m.b737 + m.b739 <= 1) m.c1207 = Constraint(expr= m.b737 +", "- m.x321 == 0) m.c217 = Constraint(expr= m.x64 - m.x316", "1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999* m.b627) <= 0)", "m.x133 - m.x460 - m.x463 == 0) m.c599 = Constraint(expr=", "m.c963 = Constraint(expr= 6*m.b726 + m.x816 == 0) m.c964 =", "= Constraint(expr= m.x264 == 0) m.c115 = Constraint(expr= m.x265 ==", "m.c644 = Constraint(expr= m.x485 == 0) m.c645 = Constraint(expr= m.x486", "= Constraint(expr= m.b762 + m.b763 <= 1) m.c1257 = Constraint(expr=", "= Constraint(expr= - m.b667 + m.b679 >= 0) m.c1466 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0) m.c487 = Constraint(expr=", "m.c725 = Constraint(expr= m.x530 == 0) m.c726 = Constraint(expr= m.x531", "0.690184503917672) m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b679 = Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681 =", "<= 0) m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 +", "- 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999* m.b606) <=", "- m.b685 <= 0) m.c1103 = Constraint(expr= m.b686 + m.b687", "<= 0) m.c1020 = Constraint(expr= m.b602 - m.b604 <= 0)", "0) m.c704 = Constraint(expr= m.x173 - m.x518 - m.x521 ==", "= Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001", "Constraint(expr= 10*m.b758 + m.x848 == 0) m.c996 = Constraint(expr= 6*m.b759", "m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353) m.c277 = Constraint(expr= m.x277 +", "0 0 0 0 0 0 0 0 # #", "m.b602 - m.b604 <= 0) m.c1021 = Constraint(expr= m.b603 -", "- 0.940066550763924*m.b666 <= 0) m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667", "0) m.c1345 = Constraint(expr= - m.b656 - m.b657 + m.b658", ">= 0) m.c1387 = Constraint(expr= - m.b607 + m.b619 >=", "m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0) m.c158 =", "+ 0.999*m.b632)))*(0.001 + 0.999*m.b632) <= 0) m.c408 = Constraint(expr=(m.x423/(0.001 +", "3.04984759446376) m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376) m.c364", "m.x14 - m.x230 - m.x233 == 0) m.c93 = Constraint(expr=", "= Constraint(expr= m.b704 + m.b705 <= 1) m.c1142 = Constraint(expr=", "9*m.b646 <= 0) m.c554 = Constraint(expr= m.x449 + 9*m.b644 <=", "0) m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001", "Var(within=Reals,bounds=(None,None),initialize=0) m.x818 = Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ 9*m.b625 <= 9) m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) -", "+ 0.999*m.b655)))*(0.001 + 0.999* m.b655) <= 0) m.c614 = Constraint(expr=", "== 0) m.c949 = Constraint(expr= 2*m.b712 + m.x802 == 0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 =", "m.c679 = Constraint(expr= m.x151 - m.x490 - m.x493 == 0)", "m.x589 == 0) m.c878 = Constraint(expr= m.x188 - m.x554 -", "== 0) m.c957 = Constraint(expr= 2*m.b720 + m.x810 == 0)", ">= 0) m.c1446 = Constraint(expr= m.b624 - m.b645 >= 0)", "+ 0.999*m.b671)))*(0.001 + 0.999* m.b671) <= 0) m.c789 = Constraint(expr=(m.x567/(0.001", "m.x109 == 0) m.c32 = Constraint(expr= m.x134 - m.x137 ==", "- m.x335 == 0) m.c417 = Constraint(expr= m.x66 - m.x330", "+ m.b709 <= 1) m.c1151 = Constraint(expr= m.b710 + m.b711", "m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522", "m.c907 = Constraint(expr= m.x193 - m.x562 - m.x565 == 0)", "0.940066550763924*m.b659 <= 0.940066550763924) m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <=", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846 =", "m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49", "Constraint(expr= m.b722 + m.b724 <= 1) m.c1177 = Constraint(expr= m.b722", "Constraint(expr= m.b656 - m.b746 <= 0) m.c1344 = Constraint(expr= -", "- m.b628 <= 0) m.c1046 = Constraint(expr= m.b629 - m.b630", "= Constraint(expr= m.b657 - m.b658 <= 0) m.c1076 = Constraint(expr=", "- 6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743", "m.x375 + m.x417 == 0) m.c370 = Constraint(expr= - m.x376", "m.c511 = Constraint(expr= m.x445 == 0) m.c512 = Constraint(expr= m.x95", "0) m.c1336 = Constraint(expr= - m.b647 - m.b648 + m.b649", "m.b683 - m.b773 <= 0) m.c1371 = Constraint(expr= - m.b683", "0) m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0) m.c123", "- 13.5*m.b621 <= 0) m.c307 = Constraint(expr= m.x346 - 13.5*m.b622", "m.x565 == 0) m.c902 = Constraint(expr= m.x593 == 0) m.c903", "m.b628 - m.b718 <= 0) m.c1316 = Constraint(expr= m.b629 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x362 - 1.26558121681553*m.b635 <= 0) m.c450 = Constraint(expr= m.x363 -", "m.c1217 = Constraint(expr= m.b743 + m.b744 <= 1) m.c1218 =", "- 0.9*m.x554 + m.x584 == 0) m.c870 = Constraint(expr= -", ">= 0) m.c1442 = Constraint(expr= m.b623 - m.b641 >= 0)", "== 0) m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0)", "0) m.c1439 = Constraint(expr= m.b617 - m.b638 >= 0) m.c1440", "m.b654 >= 0) m.c1456 = Constraint(expr= m.b628 - m.b655 >=", "+ 15*m.b622 <= 15) m.c305 = Constraint(expr= m.x344 - 13.5*m.b620", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 =", "0) m.c514 = Constraint(expr= m.x97 - m.x388 - m.x391 ==", "+ m.b700 <= 1) m.c1132 = Constraint(expr= m.b699 + m.b700", "m.b745 <= 1) m.c1219 = Constraint(expr= m.b743 + m.b744 <=", "m.c544 = Constraint(expr= m.x127 - m.x448 - m.x451 == 0)", "= Constraint(expr= 9*m.b751 + m.x841 == 0) m.c989 = Constraint(expr=", "Constraint(expr= m.b635 - m.b636 <= 0) m.c1053 = Constraint(expr= m.b635", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 =", "== 0) m.c415 = Constraint(expr= m.x427 == 0) m.c416 =", "m.c197 = Constraint(expr= m.x278 - 15*m.b608 <= 0) m.c198 =", "m.x144 == 0) m.c37 = Constraint(expr= m.x139 - m.x142 -", "- m.x282 == 0) m.c184 = Constraint(expr= m.x46 - m.x280", "<= 0) m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 +", "m.b597 - m.b598 <= 0) m.c1016 = Constraint(expr= m.b599 -", "Constraint(expr= 6*m.b708 + m.x798 == 0) m.c946 = Constraint(expr= 7*m.b709", "- m.b620 >= 0) m.c1422 = Constraint(expr= m.b609 - m.b621", "m.x115 - m.x424 - m.x427 == 0) m.c422 = Constraint(expr=", "Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0) m.c660 = Constraint(expr= m.x483", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 =", "Constraint(expr= m.b740 + m.b742 <= 1) m.c1213 = Constraint(expr= m.b740", "m.x68 - m.x338 - m.x341 == 0) m.c270 = Constraint(expr=", "- m.b605 - m.b606 + m.b607 - m.b697 <= 0)", "m.x307 == 0) m.c317 = Constraint(expr= m.x353 == 0) m.c318", "m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409", "- m.x394 - m.x397 == 0) m.c542 = Constraint(expr= m.x125", "0) m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348) m.c255", "m.c1272 = Constraint(expr= m.b770 + m.b772 <= 1) m.c1273 =", "m.b734 + m.b736 <= 1) m.c1204 = Constraint(expr= m.b735 +", "0.940066550763924*m.b666 <= 0) m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <=", "m.x283 + 15*m.b610 <= 15) m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611)", "Constraint(expr= m.x475 == 0) m.c644 = Constraint(expr= m.x485 == 0)", "13.5) m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350 == 0)", "7*m.b714 + m.x804 == 0) m.c952 = Constraint(expr= 4*m.b715 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634 =", "m.x247 == 0) m.c140 = Constraint(expr= m.x272 == 0) m.c141", "m.b766 <= 1) m.c1261 = Constraint(expr= m.b764 + m.b765 <=", "m.b658 >= 0) m.c1469 = Constraint(expr= m.b653 - m.b659 >=", "== 0) m.c476 = Constraint(expr= m.x83 - m.x368 - m.x371", "* model = m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3", "6*m.b692 - 9*m.b693 - 4*m.b694 - 10*m.b695 - 9*m.b696 -", "= Constraint(expr= - m.b635 - m.b636 + m.b637 - m.b727", "m.x78 - m.x357 - m.x360 == 0) m.c352 = Constraint(expr=", "== 0) m.c209 = Constraint(expr= m.x320 == 0) m.c210 =", "m.c1017 = Constraint(expr= m.b599 - m.b601 <= 0) m.c1018 =", "<= 1) m.c1124 = Constraint(expr= m.b696 + m.b697 <= 1)", "m.c821 = Constraint(expr= m.x575 == 0) m.c822 = Constraint(expr= m.x576", "== 0) m.c4 = Constraint(expr= m.x4 - m.x7 - m.x10", "= Constraint(expr= 3*m.b756 + m.x846 == 0) m.c994 = Constraint(expr=", "1) m.c1182 = Constraint(expr= m.b725 + m.b727 <= 1) m.c1183", "+ m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999* m.b659) <= 0) m.c666", "+ 9*m.b643 <= 9) m.c530 = Constraint(expr= - m.x392 +", "Constraint(expr= m.b628 - m.b655 >= 0) m.c1457 = Constraint(expr= -", "0) m.c209 = Constraint(expr= m.x320 == 0) m.c210 = Constraint(expr=", "m.c1112 = Constraint(expr= m.b690 + m.b691 <= 1) m.c1113 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548", "Constraint(expr= m.x127 - m.x448 - m.x451 == 0) m.c545 =", "m.x402 == 0) m.c562 = Constraint(expr= m.x403 == 0) m.c563", "0) m.c768 = Constraint(expr= m.x180 - m.x537 - m.x540 ==", "0) m.c748 = Constraint(expr= - m.x508 + m.x538 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597 =", "Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001 +", "= Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001", "1) m.c1225 = Constraint(expr= m.b746 + m.b747 <= 1) m.c1226", "m.b596 + m.b599 - m.b608 >= 0) m.c1410 = Constraint(expr=", "m.b666 - m.b678 >= 0) m.c1480 = Constraint(expr= m.b667 -", "<= 15) m.c330 = Constraint(expr= m.x306 + 15*m.b624 <= 15)", "m.c759 = Constraint(expr= m.x540 == 0) m.c760 = Constraint(expr= m.x541", "= Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0) m.c125 = Constraint(expr=", "= Constraint(expr= - m.b613 + m.b631 >= 0) m.c1382 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x393 + m.x447 == 0) m.c532 = Constraint(expr= -", "m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10", "Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)", "= ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4 =", "= Constraint(expr= m.b722 + m.b723 <= 1) m.c1176 = Constraint(expr=", "m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608", "m.c419 = Constraint(expr= m.x113 - m.x422 - m.x425 == 0)", "2.54515263975353*m.b618 <= 2.54515263975353) m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <=", "- 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999* m.b653) <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x120 - m.x435 - m.x438 == 0) m.c484 =", "Constraint(expr= m.x302 - 15*m.b623 <= 0) m.c327 = Constraint(expr= m.x303", "1) m.c1140 = Constraint(expr= m.b704 + m.b706 <= 1) m.c1141", "0) m.c647 = Constraint(expr= m.x140 - m.x470 - m.x473 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 =", "m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764", "= Constraint(expr= m.x457 == 0) m.c566 = Constraint(expr= m.x101 -", "Constraint(expr= m.b609 - m.b610 <= 0) m.c1028 = Constraint(expr= m.b611", "== 0) m.c943 = Constraint(expr= 2*m.b706 + m.x796 == 0)", "= Constraint(expr= m.x47 - m.x284 - m.x287 == 0) m.c213", "+ 0.999*m.b627)))*(0.001 + 0.999* m.b627) <= 0) m.c340 = Constraint(expr=(m.x358/(0.001", "- m.b613 <= 0) m.c1031 = Constraint(expr= m.b614 - m.b615", "Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)", "# Total cont binary integer sos1 sos2 scont sint #", "== 0) m.c322 = Constraint(expr= m.x58 - m.x304 - m.x307", "0) m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418 == 0)", "- 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <=", "+ 0.999* m.b666) <= 0) m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667)", "0.666992981045719*m.b672 <= 0) m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <=", "m.b654 - m.b655 <= 0) m.c1073 = Constraint(expr= m.b656 -", "m.b697 <= 1) m.c1125 = Constraint(expr= m.b695 + m.b697 <=", "- m.b748 <= 0) m.c1346 = Constraint(expr= m.b659 - m.b749", "- m.x51 == 0) m.c16 = Constraint(expr= m.x40 - m.x49", "m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611", "m.c550 = Constraint(expr= m.x397 + 9*m.b646 <= 9) m.c551 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 0.940066550763924*m.b663 <= 0) m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664", "0) m.c12 = Constraint(expr= m.x24 - m.x27 - m.x30 -", "m.b636 + m.b639 >= 0) m.c1390 = Constraint(expr= - m.b619", "<= 0) m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553)", "+ m.b690 <= 1) m.c1112 = Constraint(expr= m.b690 + m.b691", "Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742", "Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 =", "== 0) m.c14 = Constraint(expr= m.x38 - m.x47 - m.x50", "m.x472 - m.x475 == 0) m.c650 = Constraint(expr= m.x146 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 =", "+ m.x351 == 0) m.c313 = Constraint(expr= - 0.6*m.x304 +", "= Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001", "m.x379 + 20*m.b631 <= 20) m.c401 = Constraint(expr= m.x416 -", "0.999* m.b653) <= 0) m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) -", "1) m.c1134 = Constraint(expr= m.b701 + m.b703 <= 1) m.c1135", "- m.x61 == 0) m.c20 = Constraint(expr= m.x68 - m.x80", "- m.x386 + m.x440 == 0) m.c504 = Constraint(expr= -", "Var(within=Reals,bounds=(None,None),initialize=0) m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0)", "- m.x101 - m.x104 - m.x107 == 0) m.c30 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 =", "m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224", "4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718 - 7*m.b719 -", "<= 1) m.c1147 = Constraint(expr= m.b707 + m.b708 <= 1)", "0) m.c616 = Constraint(expr= m.x415 == 0) m.c617 = Constraint(expr=", "0) m.c934 = Constraint(expr= 5*m.b697 + m.x787 == 0) m.c935", "0) m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0) m.c743", "- m.b665 + m.b666 - m.b756 <= 0) m.c1354 =", "= Constraint(expr= m.x505 == 0) m.c725 = Constraint(expr= m.x530 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 =", "m.c115 = Constraint(expr= m.x265 == 0) m.c116 = Constraint(expr= m.x26", "Constraint(expr= m.b596 + m.b599 == 1) m.c1374 = Constraint(expr= m.b597", "Constraint(expr= m.x46 - m.x280 - m.x283 == 0) m.c185 =", "Constraint(expr= m.x70 - m.x82 - m.x85 == 0) m.c23 =", "m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 +", "1) m.c1142 = Constraint(expr= m.b705 + m.b706 <= 1) m.c1143", "0) m.c413 = Constraint(expr= m.x425 == 0) m.c414 = Constraint(expr=", "m.x299 == 0) m.c294 = Constraint(expr= m.x54 - m.x297 -", "0) m.c1011 = Constraint(expr= 3*m.b774 + m.x864 == 0) m.c1012", "m.b619 - m.b637 >= 0) m.c1439 = Constraint(expr= m.b617 -", "Constraint(expr= m.b713 + m.b714 <= 1) m.c1160 = Constraint(expr= m.b714", "m.x448 == 0) m.c533 = Constraint(expr= m.x395 == 0) m.c534", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 =", "m.x408 == 0) m.c589 = Constraint(expr= m.x409 == 0) m.c590", "- m.b678 <= 0) m.c1095 = Constraint(expr= m.b677 - m.b679", "m.c1325 = Constraint(expr= m.b638 - m.b728 <= 0) m.c1326 =", "Constraint(expr= m.x298 - 15*m.b622 <= 0) m.c302 = Constraint(expr= m.x299", "= Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0) m.c488 = Constraint(expr=", "m.x840 == 0) m.c988 = Constraint(expr= 9*m.b751 + m.x841 ==", "m.b646 - m.b736 <= 0) m.c1334 = Constraint(expr= m.b647 -", "m.x236 - m.x239 == 0) m.c117 = Constraint(expr= m.x27 -", "+ 1.26558121681553*m.b635 <= 1.26558121681553) m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x811 = Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813 =", "m.b610 - m.b625 >= 0) m.c1427 = Constraint(expr= m.b608 -", "- m.x144 == 0) m.c37 = Constraint(expr= m.x139 - m.x142", "m.x220 - m.x223 == 0) m.c92 = Constraint(expr= m.x14 -", "= Constraint(expr= - m.b623 - m.b624 + m.b625 - m.b715", "+ 1.32154609891348*m.b634 <= 1.32154609891348) m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632", "+ 20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157", "m.b668 - m.b669 <= 0) m.c1086 = Constraint(expr= m.b668 -", "180 0 0 0 0 0 # FX 0 0", "m.x249 + m.x279 == 0) m.c163 = Constraint(expr= - m.x250", "Constraint(expr= m.x541 == 0) m.c761 = Constraint(expr= m.x167 - m.x506", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 =", "0) m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0) m.c253", "m.c702 = Constraint(expr= m.x162 - m.x495 - m.x498 == 0)", "Constraint(expr= m.x431 == 0) m.c441 = Constraint(expr= m.x432 == 0)", "m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419", "m.c1202 = Constraint(expr= m.b735 + m.b736 <= 1) m.c1203 =", "Constraint(expr= m.b657 - m.b658 <= 0) m.c1076 = Constraint(expr= m.b659", "0.940066550763924) m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0) m.c714", "<= 0) m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0)", "m.x398 - 3.04984759446376*m.b647 <= 0) m.c573 = Constraint(expr= m.x399 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 =", "0) m.c472 = Constraint(expr= m.x385 == 0) m.c473 = Constraint(expr=", "0) m.c481 = Constraint(expr= m.x94 - m.x382 - m.x385 ==", "m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0) m.c602 =", "m.c847 = Constraint(expr= m.x535 == 0) m.c848 = Constraint(expr= m.x581", "= Constraint(expr= m.b608 - m.b610 <= 0) m.c1027 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0)", "33.5) m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0) m.c498", "= Constraint(expr= m.b602 - m.b611 >= 0) m.c1413 = Constraint(expr=", "0) m.c1355 = Constraint(expr= m.b668 - m.b758 <= 0) m.c1356", "0) m.c1338 = Constraint(expr= - m.b650 + m.b651 - m.b741", "Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212", "<= 3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 +", "<= 0) m.c1328 = Constraint(expr= m.b641 - m.b731 <= 0)", "Constraint(expr= - m.x148 - m.x151 + m.x154 == 0) m.c41", "m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0) m.c102 =", "1) m.c1250 = Constraint(expr= m.b759 + m.b760 <= 1) m.c1251", "m.x360 == 0) m.c352 = Constraint(expr= m.x79 - m.x358 -", "m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034) m.c582 =", "0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639)", "= Constraint(expr= m.b671 - m.b761 <= 0) m.c1359 = Constraint(expr=", "m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388) m.c222 =", "Constraint(expr= m.x565 + 15*m.b685 <= 15) m.c917 = Constraint(expr= m.x590", "= Constraint(expr= m.b600 - m.b601 <= 0) m.c1019 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103) m.c610 = Constraint(expr=", ">= 0) m.c1450 = Constraint(expr= m.b628 - m.b649 >= 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x246 == 0) m.c139 = Constraint(expr= m.x247 == 0) m.c140", "m.c1146 = Constraint(expr= m.b707 + m.b709 <= 1) m.c1147 =", "- m.b760 <= 0) m.c1358 = Constraint(expr= m.b671 - m.b761", "3.34221486003388) m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0) m.c225", "m.x582 == 0) m.c850 = Constraint(expr= m.x583 == 0) m.c851", "m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533", "<= 15) m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <= 0)", "0.999*m.b599)))*(0.001 + 0.999* m.b599) <= 0) m.c81 = Constraint(expr=(m.x231/(0.001 +", "+ m.b771 <= 1) m.c1272 = Constraint(expr= m.b770 + m.b772", "Constraint(expr= m.b735 + m.b736 <= 1) m.c1205 = Constraint(expr= m.b737", "= Constraint(expr= m.x39 - m.x261 - m.x264 == 0) m.c121", "<= 0) m.c519 = Constraint(expr= m.x387 - 9*m.b642 <= 0)", "m.b718 <= 0) m.c1316 = Constraint(expr= m.b629 - m.b719 <=", "= Constraint(expr= m.b713 + m.b714 <= 1) m.c1158 = Constraint(expr=", "0) m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0) m.c710", "0) m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001", "Constraint(expr= m.x88 - m.x376 - m.x379 == 0) m.c386 =", "Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0) m.c152 = Constraint(expr= m.x245", "Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609 =", ">= 0) m.c1401 = Constraint(expr= - m.b627 + m.b648 +", "m.x358 - 3.04984759446376*m.b628 <= 0) m.c362 = Constraint(expr= m.x359 +", "== 0) m.c795 = Constraint(expr= m.x570 == 0) m.c796 =", "Constraint(expr= m.x336 == 0) m.c412 = Constraint(expr= m.x337 == 0)", "Constraint(expr= m.b596 + m.b599 - m.b602 >= 0) m.c1404 =", "+ m.x518 == 0) m.c693 = Constraint(expr= - 0.75*m.x495 +", "0) m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0) m.c573", "m.x100 - m.x394 - m.x397 == 0) m.c542 = Constraint(expr=", "+ 290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204", "4.45628648004517*m.b609 <= 4.45628648004517) m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <=", "0.842233385663186*m.b633 <= 0) m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <=", "- m.x455 == 0) m.c570 = Constraint(expr= m.x129 - m.x453", "0.480234946352917*m.b674 <= 0.480234946352917) m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <=", "m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734", "- m.x272 == 0) m.c147 = Constraint(expr= m.x42 - m.x267", "0) m.c1325 = Constraint(expr= m.b638 - m.b728 <= 0) m.c1326", "= Constraint(expr= m.x594 + 9*m.b684 <= 9) m.c922 = Constraint(expr=", "<= 3.04984759446376) m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376)", "Constraint(expr= m.x344 - 13.5*m.b620 <= 0) m.c306 = Constraint(expr= m.x345", "m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418 == 0) m.c368", "m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0) m.x859 = Var(within=Reals,bounds=(None,None),initialize=0) m.x860", "- m.x595 == 0) m.c911 = Constraint(expr= m.x560 - 15*m.b683", "= Constraint(expr= m.x265 == 0) m.c116 = Constraint(expr= m.x26 -", "= Constraint(expr= - m.b638 + m.b639 - m.b729 <= 0)", "== 0) m.c478 = Constraint(expr= m.x85 - m.x370 - m.x373", "+ 0.999*m.b679)))*(0.001 + 0.999*m.b679) <= 0) m.c845 = Constraint(expr= m.x533", "m.c503 = Constraint(expr= - m.x386 + m.x440 == 0) m.c504", "- log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677) <= 0)", "Constraint(expr= m.x351 - 9*m.b624 <= 0) m.c334 = Constraint(expr= m.x352", "1.32154609891348) m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348) m.c428", "m.b735 + m.b736 <= 1) m.c1203 = Constraint(expr= m.b734 +", "0.842233385663186*m.b634 <= 0) m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <=", "m.c1253 = Constraint(expr= m.b761 + m.b762 <= 1) m.c1254 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x262 - m.x265 == 0) m.c122 = Constraint(expr= m.x236", "Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c526 = Constraint(expr= m.x442 - 9*m.b643 <= 0) m.c527", "+ m.b682 - m.b772 <= 0) m.c1370 = Constraint(expr= m.b683", "m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834", "= Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0) m.c77 = Constraint(expr=", "- 0.9*m.x317 + m.x416 == 0) m.c366 = Constraint(expr= -", "- m.b656 >= 0) m.c1467 = Constraint(expr= m.b654 - m.b657", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 =", "<= 0) m.c1089 = Constraint(expr= m.b671 - m.b673 <= 0)", "0) m.c140 = Constraint(expr= m.x272 == 0) m.c141 = Constraint(expr=", "Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171) m.c658 = Constraint(expr= m.x475", "0) m.c444 = Constraint(expr= m.x81 - m.x363 - m.x366 ==", "Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917) m.c842 = Constraint(expr=(m.x578/(0.001 +", "= Constraint(expr= m.x311 == 0) m.c342 = Constraint(expr= m.x312 ==", "0) m.c1450 = Constraint(expr= m.b628 - m.b649 >= 0) m.c1451", "m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0) m.c226 =", "+ 0.999*m.b667)))*(0.001 + 0.999* m.b667) <= 0) m.c722 = Constraint(expr=", "m.b647 + m.b648 - m.b738 <= 0) m.c1336 = Constraint(expr=", "- m.x410 - m.x413 == 0) m.c621 = Constraint(expr= m.x108", "1) m.c1283 = Constraint(expr= m.b596 - m.b686 <= 0) m.c1284", "m.c215 = Constraint(expr= m.x62 - m.x314 - m.x320 == 0)", "15*m.b628 <= 15) m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <=", "<= 0) m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719)", "= Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353) m.c160 = Constraint(expr=", "== 0) m.c649 = Constraint(expr= m.x142 - m.x472 - m.x475", "+ m.b711 <= 1) m.c1152 = Constraint(expr= m.b710 + m.b712", "m.b598 + m.b601 - m.b607 >= 0) m.c1409 = Constraint(expr=", "40) m.c73 = Constraint(expr= m.x217 + 40*m.b598 <= 40) m.c74", "= Constraint(expr= m.x124 - m.x442 - m.x445 == 0) m.c518", "= Constraint(expr= m.b655 - m.b661 >= 0) m.c1472 = Constraint(expr=", "m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57", "Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 =", "m.c1125 = Constraint(expr= m.b695 + m.b697 <= 1) m.c1126 =", "m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001 +", "Constraint(expr= m.x241 == 0) m.c113 = Constraint(expr= m.x263 == 0)", "<= 0) m.c1310 = Constraint(expr= m.b623 - m.b713 <= 0)", "= Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553) m.c454 = Constraint(expr=", "3.04984759446376*m.b655 <= 3.04984759446376) m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <=", "Constraint(expr= m.x28 - m.x238 - m.x241 == 0) m.c119 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 =", "0) m.c66 = Constraint(expr= m.x12 - m.x225 - m.x228 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439", "== 0) m.c13 = Constraint(expr= m.x25 - m.x28 - m.x31", "0) m.c962 = Constraint(expr= 2*m.b725 + m.x815 == 0) m.c963", "7*m.b764 + m.x854 == 0) m.c1002 = Constraint(expr= 3*m.b765 +", "m.c1006 = Constraint(expr= 6*m.b769 + m.x859 == 0) m.c1007 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0)", "== 0) m.c985 = Constraint(expr= 2*m.b748 + m.x838 == 0)", "m.x22 - m.x25 == 0) m.c11 = Constraint(expr= m.x23 -", "m.x230 - m.x233 == 0) m.c93 = Constraint(expr= m.x15 -", "- m.b635 + m.b636 - m.b726 <= 0) m.c1324 =", "Constraint(expr= m.x177 - m.x528 - m.x534 == 0) m.c853 =", "m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0) m.c219 =", "+ m.b727 <= 1) m.c1186 = Constraint(expr= m.b726 + m.b727", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 =", "0) m.c243 = Constraint(expr= m.x66 - m.x327 - m.x333 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c826 = Constraint(expr= m.x187 - m.x550 - m.x553 ==", "m.x250 - m.x253 == 0) m.c179 = Constraint(expr= m.x35 -", "1) m.c1232 = Constraint(expr= m.b750 + m.b751 <= 1) m.c1233", "Constraint(expr= m.b747 + m.b748 <= 1) m.c1227 = Constraint(expr= m.b746", "+ 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999*", "0) m.c441 = Constraint(expr= m.x432 == 0) m.c442 = Constraint(expr=", "m.c705 = Constraint(expr= m.x174 - m.x519 - m.x522 == 0)", "m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529", "m.b649 - m.b739 <= 0) m.c1337 = Constraint(expr= m.b650 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b606 + m.b618 >= 0) m.c1387 = Constraint(expr= - m.b607", "- 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999* m.b635) <=", "m.b680 + m.b681 - m.b771 <= 0) m.c1369 = Constraint(expr=", "0) m.c171 = Constraint(expr= m.x258 == 0) m.c172 = Constraint(expr=", "+ m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999* m.b671) <= 0) m.c789", "+ 0.999*m.b673)))*(0.001 + 0.999* m.b673) <= 0) m.c791 = Constraint(expr=", "- 4.45628648004517*m.b605 <= 0) m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606", "m.b652 <= 0) m.c1069 = Constraint(expr= m.b651 - m.b652 <=", "m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504", "Constraint(expr= 4*m.b713 + m.x803 == 0) m.c951 = Constraint(expr= 7*m.b714", "<= 0) m.c1037 = Constraint(expr= m.b620 - m.b621 <= 0)", "+ m.b699 <= 1) m.c1128 = Constraint(expr= m.b698 + m.b700", "+ 9*m.b642 <= 9) m.c523 = Constraint(expr= m.x391 + 9*m.b643", "+ 1.18887736200171*m.b653 <= 1.18887736200171) m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654", "m.b717 <= 1) m.c1166 = Constraint(expr= m.b717 + m.b718 <=", "Constraint(expr= m.b662 - m.b752 <= 0) m.c1350 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 =", "2.54515263975353*m.b606 <= 0) m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <=", "- 7*m.b702 - 4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706", "m.x42 - m.x270 - m.x276 == 0) m.c268 = Constraint(expr=", "+ m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999* m.b636) <= 0) m.c436", "0) m.c141 = Constraint(expr= m.x273 == 0) m.c142 = Constraint(expr=", "0) m.c536 = Constraint(expr= m.x449 == 0) m.c537 = Constraint(expr=", "- m.b614 + m.b615 - m.b705 <= 0) m.c1303 =", "m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0) m.c77 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0)", "+ m.b616 >= 0) m.c1379 = Constraint(expr= - m.b611 +", "0) m.c114 = Constraint(expr= m.x264 == 0) m.c115 = Constraint(expr=", "9) m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001", "<= 1) m.c1177 = Constraint(expr= m.b722 + m.b723 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 =", "m.c961 = Constraint(expr= 9*m.b724 + m.x814 == 0) m.c962 =", "- m.x20 - m.x23 == 0) m.c9 = Constraint(expr= m.x18", "Constraint(expr= m.b749 + m.b751 <= 1) m.c1231 = Constraint(expr= m.b749", "0.999*m.b655)))*(0.001 + 0.999* m.b655) <= 0) m.c614 = Constraint(expr= m.x413", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 =", "<= 0) m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0)", "0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 =", "m.x475 == 0) m.c644 = Constraint(expr= m.x485 == 0) m.c645", "m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433", "Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0) m.c499 = Constraint(expr= m.x436", "m.x553 == 0) m.c821 = Constraint(expr= m.x575 == 0) m.c822", "- m.b655 <= 0) m.c1073 = Constraint(expr= m.b656 - m.b657", "Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943) m.c718 = Constraint(expr= m.x523", "Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)", ">= 0) m.c1406 = Constraint(expr= m.b596 + m.b599 - m.b605", "m.c700 = Constraint(expr= m.x523 == 0) m.c701 = Constraint(expr= m.x161", "<= 0) m.c818 = Constraint(expr= m.x551 == 0) m.c819 =", "+ 9*m.b683 <= 9) m.c921 = Constraint(expr= m.x594 + 9*m.b684", "m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 =", "= Constraint(expr= m.x349 == 0) m.c293 = Constraint(expr= m.x53 -", "<= 0) m.c1083 = Constraint(expr= m.b665 - m.b667 <= 0)", "m.b742 <= 1) m.c1215 = Constraint(expr= m.b740 + m.b742 <=", "- m.b628 + m.b649 + m.b652 + m.b655 >= 0)", "= Constraint(expr= - m.b671 - m.b672 + m.b673 - m.b763", "Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0) m.c806 = Constraint(expr= m.x545", "= Constraint(expr= 3*m.b772 + m.x862 == 0) m.c1010 = Constraint(expr=", "m.b630 >= 0) m.c1432 = Constraint(expr= m.b613 - m.b631 >=", "Constraint(expr= m.x117 - m.x429 - m.x432 == 0) m.c448 =", "+ m.b663 - m.b753 <= 0) m.c1351 = Constraint(expr= -", "0.5*m.x256 + m.x280 == 0) m.c167 = Constraint(expr= m.x251 ==", "<= 1) m.c1261 = Constraint(expr= m.b764 + m.b765 <= 1)", "m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174", "Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539) m.c395 = Constraint(expr= m.x374", "+ m.x807 == 0) m.c955 = Constraint(expr= 3*m.b718 + m.x808", "Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0) m.c601 = Constraint(expr= m.x406", "Constraint(expr= 3*m.b727 + m.x817 == 0) m.c965 = Constraint(expr= 4*m.b728", "m.b768 <= 1) m.c1268 = Constraint(expr= m.b768 + m.b769 <=", "= Constraint(expr= m.x81 - m.x363 - m.x366 == 0) m.c445", "+ 300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211", "m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376) m.c630 =", "0.999*m.b618) <= 0) m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1", "m.x387 + m.x441 == 0) m.c505 = Constraint(expr= - m.x388", "+ m.b688 <= 1) m.c1107 = Constraint(expr= m.b686 + m.b688", "m.b733 <= 1) m.c1195 = Constraint(expr= m.b731 + m.b732 <=", "40*m.b601 <= 0) m.c98 = Constraint(expr= m.x221 + 40*m.b599 <=", "5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741 -", "+ 2.54515263975353*m.b619 <= 2.54515263975353) m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617", "= Constraint(expr= m.x375 - 20*m.b630 <= 0) m.c397 = Constraint(expr=", "m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846", "m.c554 = Constraint(expr= m.x449 + 9*m.b644 <= 9) m.c555 =", "= Constraint(expr= m.b680 - m.b681 <= 0) m.c1098 = Constraint(expr=", "m.b719 + m.b721 <= 1) m.c1174 = Constraint(expr= m.b720 +", "+ m.b736 <= 1) m.c1203 = Constraint(expr= m.b734 + m.b736", "== 0) m.c473 = Constraint(expr= m.x437 == 0) m.c474 =", "m.x552 == 0) m.c820 = Constraint(expr= m.x553 == 0) m.c821", "+ m.b775 <= 1) m.c1283 = Constraint(expr= m.b596 - m.b686", "0) m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262 == 0)", "0) m.c1366 = Constraint(expr= - m.b677 - m.b678 + m.b679", "s2s sc si # Total cont binary integer sos1 sos2", "- 8*m.b768 - 6*m.b769 - 2*m.b770 - m.b771 - 3*m.b772", "m.x359 == 0) m.c345 = Constraint(expr= m.x360 == 0) m.c346", "0) m.c1389 = Constraint(expr= - m.b618 + m.b636 + m.b639", "== 0) m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0)", "+ 0.940066550763924*m.b664 <= 0.940066550763924) m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662", "Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c462 = Constraint(expr=(m.x435/(0.001", "m.b685 - m.b775 <= 0) m.c1373 = Constraint(expr= m.b596 +", "<= 1) m.c1251 = Constraint(expr= m.b758 + m.b760 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 =", "m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001 +", "m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369", "3.04984759446376*m.b653 <= 3.04984759446376) m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <=", "Constraint(expr= m.x594 + 9*m.b684 <= 9) m.c922 = Constraint(expr= m.x595", "- 1.26558121681553*m.b640 <= 0) m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638", "Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b721 <= 0) m.c1319 = Constraint(expr= m.b632 - m.b722 <=", "0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999* m.b614)", "0) m.c377 = Constraint(expr= m.x419 == 0) m.c378 = Constraint(expr=", "= Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719) m.c813 = Constraint(expr=", "m.b597 + m.b600 - m.b609 >= 0) m.c1411 = Constraint(expr=", "= Constraint(expr= 3*m.b718 + m.x808 == 0) m.c956 = Constraint(expr=", "+ m.b748 <= 1) m.c1225 = Constraint(expr= m.b746 + m.b747", "0) m.c1440 = Constraint(expr= m.b618 - m.b639 >= 0) m.c1441", "1) m.c1223 = Constraint(expr= m.b746 + m.b747 <= 1) m.c1224", "0) m.c59 = Constraint(expr= m.x227 == 0) m.c60 = Constraint(expr=", "= Constraint(expr= m.b704 + m.b705 <= 1) m.c1140 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 =", "<= 0) m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0)", "m.c297 = Constraint(expr= m.x72 - m.x345 - m.x348 == 0)", "m.b628 <= 0) m.c1045 = Constraint(expr= m.b627 - m.b628 <=", "390*m.x198 + 350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202 +", "m.x475 == 0) m.c650 = Constraint(expr= m.x146 - m.x482 -", "Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327) m.c664 = Constraint(expr= m.x487", "= Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001", "== 0) m.c3 = Constraint(expr= m.x3 - m.x6 - m.x9", "20) m.c405 = Constraint(expr= m.x420 + 20*m.b630 <= 20) m.c406", "m.c1452 = Constraint(expr= m.b627 - m.b651 >= 0) m.c1453 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x256 - 30*m.b610 <= 0) m.c194 = Constraint(expr= m.x257 +", "= Constraint(expr= 5*m.b732 + m.x822 == 0) m.c970 = Constraint(expr=", "m.x392 + m.x446 == 0) m.c531 = Constraint(expr= - m.x393", "m.x286 - 3.34221486003388*m.b613 <= 0) m.c221 = Constraint(expr= m.x287 +", "= Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001", "m.c754 = Constraint(expr= m.x511 == 0) m.c755 = Constraint(expr= m.x515", "<= 0) m.c1345 = Constraint(expr= - m.b656 - m.b657 +", "m.c200 = Constraint(expr= m.x281 + 15*m.b608 <= 15) m.c201 =", "- m.x160 == 0) m.c44 = Constraint(expr= m.x158 - m.x161", "0) m.c1090 = Constraint(expr= m.b672 - m.b673 <= 0) m.c1091", "Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376) m.c578 = Constraint(expr= m.x452", "<= 1.26558121681553) m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b695 + m.b696 <= 1) m.c1122 = Constraint(expr= m.b695", "+ 3.71357206670431*m.b598 <= 3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) -", "- 15*m.b685 <= 0) m.c914 = Constraint(expr= m.x563 + 15*m.b683", "- 30*m.b669 <= 0) m.c778 = Constraint(expr= m.x514 - 30*m.b670", "0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0)", "<= 1) m.c1104 = Constraint(expr= m.b686 + m.b688 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 =", "m.b767 + m.b769 <= 1) m.c1267 = Constraint(expr= m.b767 +", "m.b659 - m.b660 <= 0) m.c1077 = Constraint(expr= m.b659 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x570 == 0) m.c796 = Constraint(expr= m.x571 == 0) m.c797", "Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0) m.c130 = Constraint(expr= m.x262", "10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37 -", "m.c1233 = Constraint(expr= m.b749 + m.b751 <= 1) m.c1234 =", "m.b603 - m.b615 >= 0) m.c1417 = Constraint(expr= m.b604 -", "= Constraint(expr= m.x43 - m.x271 - m.x277 == 0) m.c269", "Constraint(expr= m.x312 == 0) m.c343 = Constraint(expr= m.x313 == 0)", "m.x162 - m.x495 - m.x498 == 0) m.c703 = Constraint(expr=", "<= 0) m.c1018 = Constraint(expr= m.b600 - m.b601 <= 0)", "Constraint(expr= 4*m.b761 + m.x851 == 0) m.c999 = Constraint(expr= 8*m.b762", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 =", "= Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91 =", "- m.b664 <= 0) m.c1082 = Constraint(expr= m.b665 - m.b666", "= Constraint(expr= 4*m.b703 + m.x793 == 0) m.c941 = Constraint(expr=", "+ 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999*", "m.x59 - m.x308 - m.x311 == 0) m.c348 = Constraint(expr=", "m.b640) <= 0) m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1", "m.b635 + m.b636 - m.b726 <= 0) m.c1324 = Constraint(expr=", "m.c504 = Constraint(expr= - m.x387 + m.x441 == 0) m.c505", "0) m.c93 = Constraint(expr= m.x15 - m.x231 - m.x234 ==", "+ m.x538 == 0) m.c752 = Constraint(expr= m.x509 == 0)", "0) m.c1028 = Constraint(expr= m.b611 - m.b612 <= 0) m.c1029", "1) m.c1376 = Constraint(expr= - m.b602 + m.b611 + m.b614", "<= 0) m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0)", "m.b662 - m.b663 <= 0) m.c1080 = Constraint(expr= m.b662 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 =", "Constraint(expr= m.b614 - m.b704 <= 0) m.c1302 = Constraint(expr= -", "Constraint(expr= m.x282 == 0) m.c175 = Constraint(expr= m.x283 == 0)", "- m.b637 <= 0) m.c1054 = Constraint(expr= m.b636 - m.b637", "m.x266 - m.x272 == 0) m.c147 = Constraint(expr= m.x42 -", "m.x484 - 0.78338879230327*m.b658 <= 0) m.c662 = Constraint(expr= m.x485 +", "<= 0) m.c1301 = Constraint(expr= m.b614 - m.b704 <= 0)", "0) m.c700 = Constraint(expr= m.x523 == 0) m.c701 = Constraint(expr=", "m.x471 - m.x474 == 0) m.c649 = Constraint(expr= m.x142 -", "0) m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917) m.c840", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 =", "<= 0) m.c779 = Constraint(expr= m.x515 + 30*m.b668 <= 30)", "m.x516 == 0) m.c766 = Constraint(expr= m.x172 - m.x514 -", "m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399", "0) m.c1063 = Constraint(expr= m.b645 - m.b646 <= 0) m.c1064", "+ m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999* m.b612) <= 0) m.c205", "== 0) m.c143 = Constraint(expr= m.x29 - m.x242 - m.x245", "== 0) m.c769 = Constraint(expr= m.x181 - m.x538 - m.x541", "== 0) m.c695 = Constraint(expr= m.x497 == 0) m.c696 =", "m.x565 == 0) m.c908 = Constraint(expr= m.x209 - m.x590 -", "+ m.b730 <= 1) m.c1189 = Constraint(expr= m.b728 + m.b729", "+ 0.999* m.b612) <= 0) m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613)", "0) m.c1024 = Constraint(expr= m.b606 - m.b607 <= 0) m.c1025", "0) m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376) m.c603", "m.c287 = Constraint(expr= m.x299 == 0) m.c288 = Constraint(expr= m.x300", "Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)", "15) m.c202 = Constraint(expr= m.x283 + 15*m.b610 <= 15) m.c203", "+ 1.11894339953103*m.b652 <= 1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) -", "m.b615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618", "m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712", "- m.b711 <= 0) m.c1309 = Constraint(expr= - m.b620 -", "m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467", "+ m.x777 == 0) m.c925 = Constraint(expr= 6*m.b688 + m.x778", "0) m.c1474 = Constraint(expr= m.b664 - m.b673 >= 0) m.c1475", "== 0) m.c821 = Constraint(expr= m.x575 == 0) m.c822 =", "= Constraint(expr= 3*m.b765 + m.x855 == 0) m.c1003 = Constraint(expr=", "Constraint(expr= 2*m.b770 + m.x860 == 0) m.c1008 = Constraint(expr= m.b771", "m.b691 <= 0) m.c1289 = Constraint(expr= m.b602 - m.b692 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)", ">= 0) m.c1425 = Constraint(expr= m.b609 - m.b624 >= 0)", "+ 0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679)", "m.x181 - m.x190 - m.x193 - m.x196 == 0) m.c53", "0) m.c950 = Constraint(expr= 4*m.b713 + m.x803 == 0) m.c951", "Constraint(expr= - 0.9*m.x555 + m.x585 == 0) m.c871 = Constraint(expr=", "m.x66 - m.x330 - m.x336 == 0) m.c418 = Constraint(expr=", "- m.x371 == 0) m.c477 = Constraint(expr= m.x84 - m.x369", "m.c1091 = Constraint(expr= m.b674 - m.b675 <= 0) m.c1092 =", "= Constraint(expr= m.x513 - 30*m.b669 <= 0) m.c778 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 =", "Constraint(expr= m.x113 - m.x422 - m.x425 == 0) m.c420 =", "- m.x358 - m.x361 == 0) m.c353 = Constraint(expr= m.x308", "+ 0.999* m.b652) <= 0) m.c587 = Constraint(expr= m.x407 ==", "= Constraint(expr= m.b597 + m.b600 - m.b603 >= 0) m.c1405", "m.x803 == 0) m.c951 = Constraint(expr= 7*m.b714 + m.x804 ==", "- m.b669 <= 0) m.c1086 = Constraint(expr= m.b668 - m.b670", "== 0) m.c51 = Constraint(expr= m.x180 - m.x189 - m.x192", "m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999* m.b626) <= 0) m.c339 =", "m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497", "- 5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751", "+ 0.705049913072943*m.b664 <= 0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) -", "Constraint(expr= m.x134 - m.x464 - m.x467 == 0) m.c624 =", "== 0) m.c95 = Constraint(expr= m.x218 - 40*m.b599 <= 0)", "m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0) m.c281 =", "== 0) m.c747 = Constraint(expr= - m.x507 + m.x537 ==", "m.b609 - m.b621 >= 0) m.c1423 = Constraint(expr= m.b610 -", "== 0) m.c62 = Constraint(expr= m.x5 - m.x212 - m.x215", "m.c978 = Constraint(expr= 8*m.b741 + m.x831 == 0) m.c979 =", "= Constraint(expr= m.x456 == 0) m.c565 = Constraint(expr= m.x457 ==", "+ m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999* m.b654) <= 0) m.c613", "0) m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0) m.c392", "m.b664 - m.b673 >= 0) m.c1475 = Constraint(expr= m.b662 -", "m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659", "0.940066550763924) m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924) m.c713", "m.b648 - m.b738 <= 0) m.c1336 = Constraint(expr= - m.b647", "Var(within=Reals,bounds=(None,None),initialize=0) m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0)", "0) m.c649 = Constraint(expr= m.x142 - m.x472 - m.x475 ==", "0) m.c1027 = Constraint(expr= m.b609 - m.b610 <= 0) m.c1028", "== 0) m.c213 = Constraint(expr= m.x48 - m.x285 - m.x288", "<= 0) m.c1296 = Constraint(expr= - m.b608 + m.b609 -", "13.5*m.b620 <= 13.5) m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <=", "m.x588 == 0) m.c883 = Constraint(expr= m.x208 - m.x586 -", "Constraint(expr= m.b628 - m.b652 >= 0) m.c1454 = Constraint(expr= m.b626", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 =", "m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762", "m.c1186 = Constraint(expr= m.b726 + m.b727 <= 1) m.c1187 =", "<= 0) m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553)", "= Constraint(expr= m.x396 + 9*m.b645 <= 9) m.c550 = Constraint(expr=", "m.b751 <= 1) m.c1233 = Constraint(expr= m.b749 + m.b751 <=", "m.x37 - m.x256 - m.x259 == 0) m.c182 = Constraint(expr=", "m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153", "Var(within=Binary,bounds=(0,1),initialize=0) m.b641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c318 = Constraint(expr= m.x354 == 0) m.c319 = Constraint(expr=", "1.26558121681553*m.b617 <= 1.26558121681553) m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <=", "= Constraint(expr= m.b624 - m.b625 <= 0) m.c1043 = Constraint(expr=", "0) m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0) m.c715", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781 =", "= Constraint(expr= 4*m.b687 + m.x777 == 0) m.c925 = Constraint(expr=", "m.c900 = Constraint(expr= m.x564 == 0) m.c901 = Constraint(expr= m.x565", "= Constraint(expr= - m.x387 + m.x441 == 0) m.c505 =", "Constraint(expr= m.x341 == 0) m.c264 = Constraint(expr= m.x342 == 0)", "<gh_stars>0 # MINLP written by GAMS Convert at 01/15/21 11:37:33", "- m.x312 == 0) m.c349 = Constraint(expr= m.x61 - m.x310", "- m.x360 == 0) m.c352 = Constraint(expr= m.x79 - m.x358", "+ 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999*", "0) m.c757 = Constraint(expr= m.x517 == 0) m.c758 = Constraint(expr=", "+ 0.999*m.b676)))*(0.001 + 0.999* m.b676) <= 0) m.c818 = Constraint(expr=", "- 2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759", "m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0) m.c104 =", "m.b741 + m.b742 <= 1) m.c1217 = Constraint(expr= m.b743 +", "<= 1.83548069293539) m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539)", "0) m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171) m.c636", "<= 0) m.c1340 = Constraint(expr= m.b653 - m.b743 <= 0)", "- m.x81 - m.x84 == 0) m.c22 = Constraint(expr= m.x70", "0) m.c371 = Constraint(expr= m.x323 == 0) m.c372 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 1) m.c1107 = Constraint(expr= m.b686 + m.b688 <= 1)", "Constraint(expr= 7*m.b714 + m.x804 == 0) m.c952 = Constraint(expr= 4*m.b715", "Constraint(expr= m.b774 + m.b775 <= 1) m.c1281 = Constraint(expr= m.b773", "m.x787 == 0) m.c935 = Constraint(expr= 6*m.b698 + m.x788 ==", "m.b655 - m.b745 <= 0) m.c1343 = Constraint(expr= m.b656 -", "m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431) m.c79 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1029 = Constraint(expr= m.b611 - m.b613 <= 0) m.c1030 =", "m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651", "0) m.c1083 = Constraint(expr= m.b665 - m.b667 <= 0) m.c1084", "40) m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0) m.c102", "m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372", "+ 0.999*m.b656) <= 0) m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) -", "<= 0) m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0)", "Constraint(expr= m.b730 + m.x820 == 0) m.c968 = Constraint(expr= 2*m.b731", "+ m.b739 <= 1) m.c1211 = Constraint(expr= m.b740 + m.b741", "0) m.c1350 = Constraint(expr= - m.b662 + m.b663 - m.b753", "m.c1288 = Constraint(expr= - m.b599 - m.b600 + m.b601 -", "+ m.b651 + m.b654 >= 0) m.c1402 = Constraint(expr= -", "Constraint(expr= m.x521 == 0) m.c699 = Constraint(expr= m.x522 == 0)", "== 0) m.c760 = Constraint(expr= m.x541 == 0) m.c761 =", "m.x192 - m.x195 == 0) m.c52 = Constraint(expr= m.x181 -", "m.x25 == 0) m.c11 = Constraint(expr= m.x23 - m.x26 -", "== 0) m.c118 = Constraint(expr= m.x28 - m.x238 - m.x241", "m.x166 - m.x502 - m.x505 == 0) m.c731 = Constraint(expr=", "Constraint(expr= m.b719 + m.b720 <= 1) m.c1172 = Constraint(expr= m.b720", "m.b607 - m.b697 <= 0) m.c1295 = Constraint(expr= m.b608 -", "m.b706 <= 0) m.c1304 = Constraint(expr= m.b617 - m.b707 <=", "Constraint(expr= m.x154 - m.x157 - m.x160 == 0) m.c44 =", "m.x366 == 0) m.c439 = Constraint(expr= m.x367 == 0) m.c440", "- 3.04984759446376*m.b654 <= 0) m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655", "== 0) m.c37 = Constraint(expr= m.x139 - m.x142 - m.x145", "- 0.5*m.x514 + m.x538 == 0) m.c752 = Constraint(expr= m.x509", "m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539) m.c395 =", "Constraint(expr= m.x174 - m.x519 - m.x522 == 0) m.c706 =", "<= 0) m.c1364 = Constraint(expr= m.b677 - m.b767 <= 0)", "= Constraint(expr= m.x558 == 0) m.c874 = Constraint(expr= m.x559 ==", "m.x495 - 0.940066550763924*m.b663 <= 0) m.c709 = Constraint(expr= m.x496 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b673 = Var(within=Binary,bounds=(0,1),initialize=0) m.b674 = Var(within=Binary,bounds=(0,1),initialize=0) m.b675 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x826 = Var(within=Reals,bounds=(None,None),initialize=0) m.x827 = Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829", "Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0) m.c102 = Constraint(expr= m.x231", "m.x306 == 0) m.c316 = Constraint(expr= m.x307 == 0) m.c317", "- 0.572481933717686*m.b636 <= 0) m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637", "m.b605 - m.b607 <= 0) m.c1024 = Constraint(expr= m.b606 -", "m.c1158 = Constraint(expr= m.b713 + m.b715 <= 1) m.c1159 =", "Constraint(expr= m.x45 - m.x54 - m.x57 - m.x60 == 0)", "0) m.c975 = Constraint(expr= 7*m.b738 + m.x828 == 0) m.c976", "m.c1286 = Constraint(expr= m.b599 - m.b689 <= 0) m.c1287 =", "0) m.c351 = Constraint(expr= m.x78 - m.x357 - m.x360 ==", "Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0)", "33.5*m.b639 <= 33.5) m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <=", "- m.x517 == 0) m.c767 = Constraint(expr= m.x179 - m.x536", "== 0) m.c935 = Constraint(expr= 6*m.b698 + m.x788 == 0)", "m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0) m.c635 =", "Constraint(expr= m.x101 - m.x398 - m.x401 == 0) m.c567 =", "= Constraint(expr= m.x223 == 0) m.c86 = Constraint(expr= m.x233 ==", "0) m.c878 = Constraint(expr= m.x188 - m.x554 - m.x557 ==", "+ 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999*", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 =", "m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600 =", "m.b645 - m.b646 <= 0) m.c1064 = Constraint(expr= m.b647 -", "<= 0) m.c887 = Constraint(expr= m.x557 + 15*m.b680 <= 15)", "m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539) m.c228 =", "m.c1294 = Constraint(expr= - m.b605 - m.b606 + m.b607 -", "0) m.c914 = Constraint(expr= m.x563 + 15*m.b683 <= 15) m.c915", "<= 4.45628648004517) m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517)", "m.b698 + m.b700 <= 1) m.c1129 = Constraint(expr= m.b698 +", "m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38", "0) m.c117 = Constraint(expr= m.x27 - m.x237 - m.x240 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0) m.c682 = Constraint(expr=", "9*m.b685 <= 9) m.c923 = Constraint(expr= 5*m.b686 + m.x776 ==", "= Constraint(expr= m.x554 - 15*m.b680 <= 0) m.c885 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831 = Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)", "2.54515263975353*m.b617 <= 2.54515263975353) m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <=", "m.x424 - 0.842233385663186*m.b634 <= 0) m.c431 = Constraint(expr= m.x425 +", "- 6*m.b726 - 3*m.b727 - 4*m.b728 - 8*m.b729 - m.b730", "m.b638 >= 0) m.c1389 = Constraint(expr= - m.b618 + m.b636", "m.b652 <= 0) m.c1070 = Constraint(expr= m.b653 - m.b654 <=", "- m.b624 >= 0) m.c1426 = Constraint(expr= m.b610 - m.b625", "m.c1199 = Constraint(expr= m.b734 + m.b735 <= 1) m.c1200 =", "C B # 1486 571 111 804 0 0 0", "= Constraint(expr= m.b686 + m.b688 <= 1) m.c1105 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c535 = Constraint(expr= m.x397 == 0) m.c536 = Constraint(expr=", "<= 0) m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348)", "m.x222 == 0) m.c85 = Constraint(expr= m.x223 == 0) m.c86", "= Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943) m.c809 = Constraint(expr=", "0) m.c411 = Constraint(expr= m.x336 == 0) m.c412 = Constraint(expr=", "m.c441 = Constraint(expr= m.x432 == 0) m.c442 = Constraint(expr= m.x433", "0) m.c731 = Constraint(expr= m.x176 - m.x524 - m.x530 ==", "m.x102 - m.x105 - m.x108 == 0) m.c31 = Constraint(expr=", "m.x26 - m.x236 - m.x239 == 0) m.c117 = Constraint(expr=", "m.x374 + m.x416 == 0) m.c369 = Constraint(expr= - m.x375", "0) m.c1095 = Constraint(expr= m.b677 - m.b679 <= 0) m.c1096", "m.b626 - m.b647 >= 0) m.c1449 = Constraint(expr= m.b627 -", "= Constraint(expr= m.x392 - 9*m.b644 <= 0) m.c546 = Constraint(expr=", "Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0) m.c838 = Constraint(expr= m.x574", "0.705049913072943) m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0) m.c837", "+ 0.999* m.b654) <= 0) m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b620 - m.b622 <= 0) m.c1039 = Constraint(expr= m.b621", "== 0) m.c373 = Constraint(expr= m.x325 == 0) m.c374 =", "m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943) m.c807 = Constraint(expr= m.x546 +", "7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767 -", "m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856", "9*m.b645 <= 0) m.c553 = Constraint(expr= m.x448 - 9*m.b646 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 =", "m.x420 == 0) m.c379 = Constraint(expr= m.x421 == 0) m.c380", "Var(within=Binary,bounds=(0,1),initialize=0) m.b652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b740 <= 0) m.c1338 = Constraint(expr= - m.b650 +", "= m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x861 == 0) m.c1009 = Constraint(expr= 3*m.b772 + m.x862 ==", "0) m.c1100 = Constraint(expr= m.b683 - m.b684 <= 0) m.c1101", "m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100", "0.999* m.b652) <= 0) m.c587 = Constraint(expr= m.x407 == 0)", "m.x209 - m.x590 - m.x593 == 0) m.c909 = Constraint(expr=", "m.b631 <= 0) m.c1049 = Constraint(expr= m.b632 - m.b633 <=", "m.x255 - 30*m.b609 <= 0) m.c193 = Constraint(expr= m.x256 -", "- 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999* m.b605) <=", "m.x347 + 13.5*m.b620 <= 13.5) m.c309 = Constraint(expr= m.x348 +", "+ 0.999*m.b614)))*(0.001 + 0.999* m.b614) <= 0) m.c231 = Constraint(expr=(m.x327/(0.001", "2*m.b733 + m.x823 == 0) m.c971 = Constraint(expr= 3*m.b734 +", "m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 +", "m.b619 >= 0) m.c1388 = Constraint(expr= - m.b617 + m.b635", "= Constraint(expr= m.b664 - m.b673 >= 0) m.c1475 = Constraint(expr=", "m.b597 <= 0) m.c1014 = Constraint(expr= m.b596 - m.b598 <=", "m.b683 - m.b685 <= 0) m.c1102 = Constraint(expr= m.b684 -", "m.c1479 = Constraint(expr= m.b666 - m.b678 >= 0) m.c1480 =", "- m.x559 == 0) m.c881 = Constraint(expr= m.x206 - m.x584", "- m.x386 - m.x389 == 0) m.c513 = Constraint(expr= m.x96", "3*m.b774 + m.x864 == 0) m.c1012 = Constraint(expr= 4*m.b775 +", "m.b720 + m.b721 <= 1) m.c1173 = Constraint(expr= m.b719 +", "= Constraint(expr= m.b755 + m.b756 <= 1) m.c1244 = Constraint(expr=", "0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999* m.b605)", "- m.b678 + m.b679 - m.b769 <= 0) m.c1367 =", "m.x159 - m.x162 - m.x165 - m.x168 == 0) m.c46", "<= 0.705049913072943) m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943)", "<= 1) m.c1183 = Constraint(expr= m.b725 + m.b726 <= 1)", "m.x99 - m.x393 - m.x396 == 0) m.c541 = Constraint(expr=", "- m.x497 == 0) m.c702 = Constraint(expr= m.x162 - m.x495", "m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187", "m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752", "== 0) m.c511 = Constraint(expr= m.x445 == 0) m.c512 =", "<= 0) m.c1305 = Constraint(expr= - m.b617 + m.b618 -", "Constraint(expr= - m.b614 - m.b615 + m.b616 - m.b706 <=", "- m.x367 == 0) m.c446 = Constraint(expr= m.x116 - m.x428", "m.c1280 = Constraint(expr= m.b774 + m.b775 <= 1) m.c1281 =", "0.999* m.b660) <= 0) m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) -", "0) m.c563 = Constraint(expr= m.x455 == 0) m.c564 = Constraint(expr=", "m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279", "1.04900943706034) m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001", "m.b705 <= 0) m.c1303 = Constraint(expr= - m.b614 - m.b615", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 =", "= Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0) m.c452 = Constraint(expr=", "Constraint(expr= m.x375 - 20*m.b630 <= 0) m.c397 = Constraint(expr= m.x376", "== 0) m.c63 = Constraint(expr= m.x6 - m.x213 - m.x216", "= Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598 =", "1) m.c1261 = Constraint(expr= m.b764 + m.b765 <= 1) m.c1262", "m.c302 = Constraint(expr= m.x299 + 15*m.b620 <= 15) m.c303 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x95 - m.x98 == 0) m.c27 = Constraint(expr= m.x75", "= Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171) m.c659 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.04984759446376*m.b651 <= 3.04984759446376) m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <=", "= Constraint(expr= m.x539 + 15*m.b668 <= 15) m.c786 = Constraint(expr=", "3*m.b734 + m.x824 == 0) m.c972 = Constraint(expr= 4*m.b735 +", "m.c1009 = Constraint(expr= 3*m.b772 + m.x862 == 0) m.c1010 =", "= Constraint(expr= 7*m.b709 + m.x799 == 0) m.c947 = Constraint(expr=", "= Constraint(expr= m.x216 + 40*m.b597 <= 40) m.c73 = Constraint(expr=", "0) m.c1465 = Constraint(expr= - m.b667 + m.b679 >= 0)", "m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40", "Constraint(expr= - m.b618 + m.b636 + m.b639 >= 0) m.c1390", "0) m.c560 = Constraint(expr= m.x401 == 0) m.c561 = Constraint(expr=", "Constraint(expr= m.x444 == 0) m.c511 = Constraint(expr= m.x445 == 0)", "0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999* m.b674) <= 0)", "Constraint(expr= m.b598 + m.b601 - m.b610 >= 0) m.c1412 =", "= Constraint(expr= m.x101 - m.x398 - m.x401 == 0) m.c567", "Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)", "- 9*m.b717 - 3*m.b718 - 7*m.b719 - 2*m.b720 - 9*m.b721", "0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999* m.b635)", "m.b755 + m.b756 <= 1) m.c1242 = Constraint(expr= m.b755 +", "= Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0) m.c253 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 =", "== 0) m.c959 = Constraint(expr= 3*m.b722 + m.x812 == 0)", "= Constraint(expr= m.x38 - m.x260 - m.x263 == 0) m.c120", "0) m.c26 = Constraint(expr= m.x74 - m.x95 - m.x98 ==", "0) m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352 == 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b689 = Var(within=Binary,bounds=(0,1),initialize=0) m.b690 = Var(within=Binary,bounds=(0,1),initialize=0) m.b691 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539) m.c393 = Constraint(expr= m.x324", "m.c1078 = Constraint(expr= m.b660 - m.b661 <= 0) m.c1079 =", "m.b596 + m.b597 - m.b687 <= 0) m.c1285 = Constraint(expr=", "0.572481933717686*m.b636 <= 0) m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <=", "m.x460 - 1.11894339953103*m.b652 <= 0) m.c608 = Constraint(expr= m.x461 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c821 = Constraint(expr= m.x575 == 0) m.c822 = Constraint(expr=", "+ m.x848 == 0) m.c996 = Constraint(expr= 6*m.b759 + m.x849", "== 0) m.c563 = Constraint(expr= m.x455 == 0) m.c564 =", "0) m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0) m.c76", "- 9*m.b684 <= 0) m.c919 = Constraint(expr= m.x592 - 9*m.b685", "1) m.c1242 = Constraint(expr= m.b755 + m.b757 <= 1) m.c1243", "4*m.b735 - 3*m.b736 - 5*m.b737 - 7*m.b738 - 6*m.b739 -", "m.x414 == 0) m.c622 = Constraint(expr= m.x109 - m.x412 -", "m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582", "= Constraint(expr= m.b660 - m.b661 <= 0) m.c1079 = Constraint(expr=", "+ m.b774 <= 1) m.c1280 = Constraint(expr= m.b774 + m.b775", "<= 0) m.c1320 = Constraint(expr= - m.b632 + m.b633 -", "0) m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536 == 0)", "m.b651 + m.b654 >= 0) m.c1402 = Constraint(expr= - m.b628", "m.b668 + m.b669 - m.b759 <= 0) m.c1357 = Constraint(expr=", "+ 15*m.b609 <= 15) m.c202 = Constraint(expr= m.x283 + 15*m.b610", "- m.x82 - m.x85 == 0) m.c23 = Constraint(expr= -", "Constraint(expr= m.x545 == 0) m.c792 = Constraint(expr= m.x546 == 0)", "= Constraint(expr= m.b738 + m.b739 <= 1) m.c1209 = Constraint(expr=", "0) m.c52 = Constraint(expr= m.x181 - m.x190 - m.x193 -", "m.b620 + m.b638 >= 0) m.c1395 = Constraint(expr= - m.b621", "7*m.b702 + m.x792 == 0) m.c940 = Constraint(expr= 4*m.b703 +", "m.x17 - m.x20 - m.x23 == 0) m.c9 = Constraint(expr=", "1) m.c1272 = Constraint(expr= m.b770 + m.b772 <= 1) m.c1273", "- m.x580 - m.x583 == 0) m.c857 = Constraint(expr= m.x527", "= Constraint(expr= m.x191 - m.x560 - m.x563 == 0) m.c906", "0) m.c591 = Constraint(expr= m.x462 == 0) m.c592 = Constraint(expr=", "0) m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0) m.c742", "1) m.c1167 = Constraint(expr= m.b716 + m.b718 <= 1) m.c1168", "40*m.b596 <= 0) m.c69 = Constraint(expr= m.x213 - 40*m.b597 <=", "0) m.c511 = Constraint(expr= m.x445 == 0) m.c512 = Constraint(expr=", "Constraint(expr= m.x421 == 0) m.c380 = Constraint(expr= m.x62 - m.x317", "= Constraint(expr= m.b728 + m.b730 <= 1) m.c1189 = Constraint(expr=", "m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376) m.c603 = Constraint(expr= m.x408 +", "+ 2.54515263975353*m.b606 <= 2.54515263975353) m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607", "<= 0) m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 +", "Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 +", "m.x373 == 0) m.c479 = Constraint(expr= m.x92 - m.x380 -", "m.c1242 = Constraint(expr= m.b755 + m.b757 <= 1) m.c1243 =", "m.b615 - m.b633 >= 0) m.c1435 = Constraint(expr= m.b616 -", "- 7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767", "m.b725 + m.b726 <= 1) m.c1184 = Constraint(expr= m.b726 +", "+ m.b634 - m.b724 <= 0) m.c1322 = Constraint(expr= m.b635", "Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553) m.c489 = Constraint(expr= m.x372", "5*m.b752 + m.x842 == 0) m.c990 = Constraint(expr= 8*m.b753 +", "m.c1313 = Constraint(expr= m.b626 - m.b716 <= 0) m.c1314 =", "m.x540 + 15*m.b669 <= 15) m.c787 = Constraint(expr= m.x541 +", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790 =", "m.b609 + m.b610 - m.b700 <= 0) m.c1298 = Constraint(expr=", "Constraint(expr= m.b770 + m.b771 <= 1) m.c1274 = Constraint(expr= m.b771", "= Constraint(expr= - m.b615 + m.b633 >= 0) m.c1384 =", "m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249", "- m.b671 - m.b672 + m.b673 - m.b763 <= 0)", "m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305", "m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0) m.c805 =", "m.b602 + m.b603 - m.b693 <= 0) m.c1291 = Constraint(expr=", "- 2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735", "== 0) m.c59 = Constraint(expr= m.x227 == 0) m.c60 =", "0) m.c1370 = Constraint(expr= m.b683 - m.b773 <= 0) m.c1371", "= Constraint(expr= - m.b619 + m.b637 + m.b640 >= 0)", "m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0) m.c280 =", "m.x827 == 0) m.c975 = Constraint(expr= 7*m.b738 + m.x828 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1197 = Constraint(expr= m.b731 + m.b733 <= 1) m.c1198 =", "m.x451 + 9*m.b646 <= 9) m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647)", "0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999* m.b674)", "Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 +", "Constraint(expr= m.b635 - m.b725 <= 0) m.c1323 = Constraint(expr= -", "m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334", "m.c571 = Constraint(expr= m.x130 - m.x454 - m.x457 == 0)", "+ m.x346 == 0) m.c287 = Constraint(expr= m.x299 == 0)", "4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707 -", "0) m.c416 = Constraint(expr= m.x65 - m.x329 - m.x335 ==", "m.b678 >= 0) m.c1480 = Constraint(expr= m.b667 - m.b679 >=", "m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001 +", "m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857", "m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206", "+ 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999*", "1.11894339953103*m.b650 <= 0) m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <=", "- m.b619 <= 0) m.c1036 = Constraint(expr= m.b618 - m.b619", "m.c1140 = Constraint(expr= m.b704 + m.b706 <= 1) m.c1141 =", "= Constraint(expr= - m.b666 + m.b678 >= 0) m.c1465 =", "== 0) m.c535 = Constraint(expr= m.x397 == 0) m.c536 =", "== 0) m.c673 = Constraint(expr= m.x493 == 0) m.c674 =", "<= 0) m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0)", "m.b648 + m.b649 - m.b739 <= 0) m.c1337 = Constraint(expr=", "Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388) m.c251 = Constraint(expr= m.x326", "0.9*m.x296 + m.x344 == 0) m.c285 = Constraint(expr= - 0.9*m.x297", "m.b678 - m.b768 <= 0) m.c1366 = Constraint(expr= - m.b677", "0.75*m.x495 + m.x519 == 0) m.c694 = Constraint(expr= - 0.75*m.x496", "m.x298 - m.x301 == 0) m.c296 = Constraint(expr= m.x71 -", "+ m.x855 == 0) m.c1003 = Constraint(expr= 9*m.b766 + m.x856", "Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.b701 + m.b702 <= 1) m.c1134 = Constraint(expr=", "= Constraint(expr= m.b737 + m.b738 <= 1) m.c1208 = Constraint(expr=", "Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0) m.c866 = Constraint(expr= m.x581", "== 0) m.c22 = Constraint(expr= m.x70 - m.x82 - m.x85", "m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262", "Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0) m.c635 = Constraint(expr= m.x467", "<= 1.32154609891348) m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348)", "= Constraint(expr= 9*m.b693 + m.x783 == 0) m.c931 = Constraint(expr=", "9) m.c921 = Constraint(expr= m.x594 + 9*m.b684 <= 9) m.c922", "m.c1034 = Constraint(expr= m.b617 - m.b618 <= 0) m.c1035 =", "Constraint(expr= 6*m.b726 + m.x816 == 0) m.c964 = Constraint(expr= 3*m.b727", "= Constraint(expr= m.x515 == 0) m.c756 = Constraint(expr= m.x516 ==", "= Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0) m.c811 = Constraint(expr=", "m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5", "+ 15*m.b682 <= 15) m.c890 = Constraint(expr= m.x584 - 13.5*m.b680", "m.b701 + m.b702 <= 1) m.c1134 = Constraint(expr= m.b701 +", "+ m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678) <= 0) m.c844 =", "0.5*m.x513 + m.x537 == 0) m.c751 = Constraint(expr= - 0.5*m.x514", "= Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0) m.c580 = Constraint(expr=", "- m.x586 - m.x589 == 0) m.c884 = Constraint(expr= m.x554", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 =", "Constraint(expr= m.x175 - m.x184 - m.x187 == 0) m.c50 =", "+ 0.999* m.b616) <= 0) m.c233 = Constraint(expr= m.x293 ==", "m.c909 = Constraint(expr= m.x210 - m.x591 - m.x594 == 0)", "m.b729 + m.b730 <= 1) m.c1193 = Constraint(expr= m.b731 +", "= Constraint(expr= m.x302 - 15*m.b623 <= 0) m.c327 = Constraint(expr=", "Constraint(expr= m.b662 - m.b664 <= 0) m.c1081 = Constraint(expr= m.b663", "0) m.c14 = Constraint(expr= m.x38 - m.x47 - m.x50 ==", "m.b605) <= 0) m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1", "m.x278 - 15*m.b608 <= 0) m.c198 = Constraint(expr= m.x279 -", "<= 0) m.c437 = Constraint(expr= m.x365 == 0) m.c438 =", "m.x459 - 1.11894339953103*m.b651 <= 0) m.c607 = Constraint(expr= m.x460 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809 =", "m.x382 - m.x385 == 0) m.c482 = Constraint(expr= m.x119 -", "Constraint(expr= m.b630 - m.b631 <= 0) m.c1049 = Constraint(expr= m.b632", "Constraint(expr= m.x144 - m.x477 - m.x480 == 0) m.c676 =", "1) m.c1190 = Constraint(expr= m.b729 + m.b730 <= 1) m.c1191", "m.c1415 = Constraint(expr= m.b602 - m.b614 >= 0) m.c1416 =", "m.c418 = Constraint(expr= m.x67 - m.x331 - m.x337 == 0)", "= Constraint(expr= m.x386 - 9*m.b641 <= 0) m.c519 = Constraint(expr=", "== 0) m.c987 = Constraint(expr= 2*m.b750 + m.x840 == 0)", "Constraint(expr= - m.b607 + m.b619 >= 0) m.c1388 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c873 = Constraint(expr= m.x558 == 0) m.c874 = Constraint(expr=", "<= 4.45628648004517) m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517)", "m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 +", "- m.x299 == 0) m.c294 = Constraint(expr= m.x54 - m.x297", "= Constraint(expr= m.b608 - m.b623 >= 0) m.c1425 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 =", "Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388) m.c249 = Constraint(expr= m.x294", "0) m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001", "Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 +", "m.b714 <= 1) m.c1160 = Constraint(expr= m.b714 + m.b715 <=", "0) m.c352 = Constraint(expr= m.x79 - m.x358 - m.x361 ==", "= Constraint(expr= m.x61 - m.x310 - m.x313 == 0) m.c350", "m.x180 - m.x537 - m.x540 == 0) m.c769 = Constraint(expr=", "<= 0) m.c1357 = Constraint(expr= - m.b668 - m.b669 +", "m.c646 = Constraint(expr= m.x487 == 0) m.c647 = Constraint(expr= m.x140", "30*m.b669 <= 0) m.c778 = Constraint(expr= m.x514 - 30*m.b670 <=", "= Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0) m.c581 = Constraint(expr=", "Constraint(expr= m.b674 - m.b675 <= 0) m.c1092 = Constraint(expr= m.b674", "m.b667 + m.b679 >= 0) m.c1466 = Constraint(expr= m.b653 -", "m.b677 >= 0) m.c1479 = Constraint(expr= m.b666 - m.b678 >=", "<= 1) m.c1282 = Constraint(expr= m.b774 + m.b775 <= 1)", "0.9*m.x297 + m.x345 == 0) m.c286 = Constraint(expr= - 0.9*m.x298", "m.x487 == 0) m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <=", "15*m.b608 <= 15) m.c201 = Constraint(expr= m.x282 + 15*m.b609 <=", "m.x586 == 0) m.c872 = Constraint(expr= m.x557 == 0) m.c873", "Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b740 <= 0) m.c1338 = Constraint(expr= - m.b650 + m.b651", "Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788 = Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.x44 - m.x278 - m.x281 == 0) m.c183", "30) m.c782 = Constraint(expr= m.x536 - 15*m.b668 <= 0) m.c783", "0) m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388) m.c249", "== 0) m.c1003 = Constraint(expr= 9*m.b766 + m.x856 == 0)", "== 0) m.c824 = Constraint(expr= m.x185 - m.x548 - m.x551", "Constraint(expr= - m.b602 + m.b603 - m.b693 <= 0) m.c1291", "- m.x294 == 0) m.c241 = Constraint(expr= m.x52 - m.x292", "m.x10 - m.x220 - m.x223 == 0) m.c92 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387", "<= 1.26558121681553) m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553)", "0.999*m.b632) <= 0) m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1", "Constraint(expr= m.x78 - m.x357 - m.x360 == 0) m.c352 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658 =", "0) m.c369 = Constraint(expr= - m.x375 + m.x417 == 0)", "m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 +", "= Constraint(expr= 4*m.b704 + m.x794 == 0) m.c942 = Constraint(expr=", "0 0 # # Nonzero counts # Total const NL", "m.b705 + m.b706 <= 1) m.c1145 = Constraint(expr= m.b707 +", "m.x365 == 0) m.c438 = Constraint(expr= m.x366 == 0) m.c439", "Constraint(expr= m.x397 + 9*m.b646 <= 9) m.c551 = Constraint(expr= m.x446", "0) m.c181 = Constraint(expr= m.x37 - m.x256 - m.x259 ==", "m.x417 == 0) m.c370 = Constraint(expr= - m.x376 + m.x418", "m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394", "m.b606 <= 0) m.c1023 = Constraint(expr= m.b605 - m.b607 <=", "1.83548069293539) m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539) m.c394", "== 0) m.c948 = Constraint(expr= 5*m.b711 + m.x801 == 0)", "m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730", "- m.x379 == 0) m.c386 = Constraint(expr= m.x110 - m.x416", "== 0) m.c909 = Constraint(expr= m.x210 - m.x591 - m.x594", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 =", "+ m.b726 <= 1) m.c1182 = Constraint(expr= m.b725 + m.b727", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 =", "= Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186) m.c433 = Constraint(expr=", "m.x480 == 0) m.c676 = Constraint(expr= m.x145 - m.x478 -", "Constraint(expr= m.x229 == 0) m.c62 = Constraint(expr= m.x5 - m.x212", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 =", "<= 0) m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0)", "1) m.c1160 = Constraint(expr= m.b714 + m.b715 <= 1) m.c1161", "m.b662 - m.b664 <= 0) m.c1081 = Constraint(expr= m.b663 -", "m.b718 <= 1) m.c1169 = Constraint(expr= m.b719 + m.b720 <=", "m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0) m.c150 =", "0.705049913072943*m.b663 <= 0) m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <=", "= Constraint(expr= m.x245 == 0) m.c138 = Constraint(expr= m.x246 ==", "1) m.c1236 = Constraint(expr= m.b752 + m.b754 <= 1) m.c1237", "Constraint(expr= m.x23 - m.x26 - m.x29 - m.x32 == 0)", "= Constraint(expr= m.x589 == 0) m.c878 = Constraint(expr= m.x188 -", "= Constraint(expr= m.x100 - m.x394 - m.x397 == 0) m.c542", "m.b750 + m.b751 <= 1) m.c1235 = Constraint(expr= m.b752 +", "m.b775 <= 1) m.c1282 = Constraint(expr= m.b774 + m.b775 <=", "m.c781 = Constraint(expr= m.x517 + 30*m.b670 <= 30) m.c782 =", "== 1) m.c1374 = Constraint(expr= m.b597 + m.b600 == 1)", "<= 0) m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0)", "+ 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811 =", "m.x550 - m.x553 == 0) m.c827 = Constraint(expr= m.x200 -", "m.c1356 = Constraint(expr= - m.b668 + m.b669 - m.b759 <=", "= Constraint(expr= m.x577 == 0) m.c824 = Constraint(expr= m.x185 -", "m.c595 = Constraint(expr= m.x106 - m.x406 - m.x409 == 0)", "= Constraint(expr= m.b746 + m.b748 <= 1) m.c1225 = Constraint(expr=", "m.x6 - m.x9 == 0) m.c4 = Constraint(expr= m.x4 -", "m.b741 + m.b742 <= 1) m.c1215 = Constraint(expr= m.b740 +", "m.c209 = Constraint(expr= m.x320 == 0) m.c210 = Constraint(expr= m.x321", "m.x830 == 0) m.c978 = Constraint(expr= 8*m.b741 + m.x831 ==", "= Constraint(expr= m.x296 - 15*m.b620 <= 0) m.c300 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 =", "m.b715 <= 1) m.c1161 = Constraint(expr= m.b713 + m.b715 <=", "<= 0) m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0)", "m.x846 = Var(within=Reals,bounds=(None,None),initialize=0) m.x847 = Var(within=Reals,bounds=(None,None),initialize=0) m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849", "Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c996 = Constraint(expr= 6*m.b759 + m.x849 == 0) m.c997 =", "- m.x438 == 0) m.c484 = Constraint(expr= m.x121 - m.x436", "+ 20*m.b629 <= 20) m.c405 = Constraint(expr= m.x420 + 20*m.b630", "- m.b646 <= 0) m.c1063 = Constraint(expr= m.b645 - m.b646", "Constraint(expr= m.b641 - m.b642 <= 0) m.c1059 = Constraint(expr= m.b641", "m.c526 = Constraint(expr= m.x442 - 9*m.b643 <= 0) m.c527 =", "= Constraint(expr= m.b698 + m.b699 <= 1) m.c1128 = Constraint(expr=", "m.c1304 = Constraint(expr= m.b617 - m.b707 <= 0) m.c1305 =", "0.705049913072943*m.b664 <= 0.705049913072943) m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1", "Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b639 - m.b640 <= 0) m.c1058 = Constraint(expr= m.b641 -", "m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571", "m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103) m.c611 =", "2.30162356062425*m.b638 <= 0) m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <=", "4*m.b704 + m.x794 == 0) m.c942 = Constraint(expr= 3*m.b705 +", "m.x15 + m.x18 == 0) m.c7 = Constraint(expr= - m.x13", "- m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197", "Constraint(expr= m.x534 == 0) m.c847 = Constraint(expr= m.x535 == 0)", "equation from pyomo.environ import * model = m = ConcreteModel()", "m.x834 == 0) m.c982 = Constraint(expr= m.b745 + m.x835 ==", "# Nonzero counts # Total const NL DLL # 3373", "Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 =", "0.999* m.b673) <= 0) m.c791 = Constraint(expr= m.x545 == 0)", "m.c1107 = Constraint(expr= m.b686 + m.b688 <= 1) m.c1108 =", "m.c439 = Constraint(expr= m.x367 == 0) m.c440 = Constraint(expr= m.x431", "m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327) m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659)", "m.b692 + m.b693 <= 1) m.c1116 = Constraint(expr= m.b692 +", "m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192", "m.x432 == 0) m.c442 = Constraint(expr= m.x433 == 0) m.c443", "+ 9*m.b644 <= 9) m.c555 = Constraint(expr= m.x450 + 9*m.b645", "= Constraint(expr= m.b668 - m.b670 <= 0) m.c1087 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.b687 + m.b688 <= 1) m.c1107 = Constraint(expr= m.b686", "m.c1333 = Constraint(expr= - m.b644 - m.b645 + m.b646 -", "1.83548069293539*m.b630 <= 1.83548069293539) m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <=", "m.x12 - m.x15 + m.x18 == 0) m.c7 = Constraint(expr=", "m.c1059 = Constraint(expr= m.b641 - m.b643 <= 0) m.c1060 =", "Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5) m.c496 = Constraint(expr= m.x385", "m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0) m.c581 =", "- m.x442 - m.x445 == 0) m.c518 = Constraint(expr= m.x386", "Constraint(expr= - 0.9*m.x556 + m.x586 == 0) m.c872 = Constraint(expr=", "- m.b710 <= 0) m.c1308 = Constraint(expr= - m.b620 +", "= Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001", "Constraint(expr= m.x556 - 15*m.b682 <= 0) m.c887 = Constraint(expr= m.x557", "Constraint(expr= m.x377 == 0) m.c375 = Constraint(expr= m.x378 == 0)", "Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0) m.c600 = Constraint(expr= m.x405", "Constraint(expr= m.x380 - 33.5*m.b638 <= 0) m.c492 = Constraint(expr= m.x381", "Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x301 + 15*m.b622 <= 15) m.c305 = Constraint(expr= m.x344 -", "= Constraint(expr= m.b770 + m.b771 <= 1) m.c1272 = Constraint(expr=", "m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440", "m.x216 + 40*m.b597 <= 40) m.c73 = Constraint(expr= m.x217 +", "Constraint(expr= m.x557 == 0) m.c873 = Constraint(expr= m.x558 == 0)", "L N X C B # 1486 571 111 804", "<= 1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 +", "= Constraint(expr= m.x442 - 9*m.b643 <= 0) m.c527 = Constraint(expr=", "1) m.c1106 = Constraint(expr= m.b687 + m.b688 <= 1) m.c1107", "<= 9) m.c336 = Constraint(expr= m.x354 + 9*m.b624 <= 9)", "m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31", "= Constraint(expr= m.x222 == 0) m.c85 = Constraint(expr= m.x223 ==", "+ 0.940066550763924*m.b662 <= 0.940066550763924) m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663", "Constraint(expr= m.b626 - m.b716 <= 0) m.c1314 = Constraint(expr= -", "m.b618 - m.b636 >= 0) m.c1438 = Constraint(expr= m.b619 -", "- m.b657 >= 0) m.c1468 = Constraint(expr= m.b655 - m.b658", "0) m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388) m.c132", "m.c448 = Constraint(expr= m.x118 - m.x430 - m.x433 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)", "9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769 - 2*m.b770 -", "m.c1185 = Constraint(expr= m.b725 + m.b727 <= 1) m.c1186 =", "Constraint(expr= m.x67 - m.x328 - m.x334 == 0) m.c245 =", "m.x289 == 0) m.c209 = Constraint(expr= m.x320 == 0) m.c210", "4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708 -", "- 4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718 - 7*m.b719", "Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x171 - m.x513 - m.x516 == 0) m.c766 = Constraint(expr=", "10*m.b695 + m.x785 == 0) m.c933 = Constraint(expr= 9*m.b696 +", "== 0) m.c936 = Constraint(expr= 10*m.b699 + m.x789 == 0)", "m.c1072 = Constraint(expr= m.b654 - m.b655 <= 0) m.c1073 =", "- m.b716 <= 0) m.c1314 = Constraint(expr= - m.b626 +", "+ m.b673 - m.b763 <= 0) m.c1361 = Constraint(expr= m.b674", "sos2 scont sint # 865 685 180 0 0 0", "1.26558121681553) m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0) m.c456", "m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560", "Equation counts # Total E G L N X C", "m.x410 - m.x413 == 0) m.c621 = Constraint(expr= m.x108 -", "50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133 +", "0) m.c1040 = Constraint(expr= m.b623 - m.b624 <= 0) m.c1041", "= Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001", "m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0) m.c424 =", "Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 +", "Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0) m.c392 = Constraint(expr= m.x323", "Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1378 = Constraint(expr= - m.b604 + m.b613 + m.b616 >=", "<= 0) m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517)", "m.x269 - m.x275 == 0) m.c267 = Constraint(expr= m.x42 -", "Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 =", "m.c50 = Constraint(expr= m.x179 - m.x188 - m.x191 - m.x194", ">= 0) m.c1424 = Constraint(expr= m.b608 - m.b623 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 =", "m.c87 = Constraint(expr= m.x234 == 0) m.c88 = Constraint(expr= m.x235", "m.x369 - 1.26558121681553*m.b639 <= 0) m.c487 = Constraint(expr= m.x370 -", "== 0) m.c163 = Constraint(expr= - m.x250 + m.x280 ==", "<= 1) m.c1138 = Constraint(expr= m.b702 + m.b703 <= 1)", "X C B # 1486 571 111 804 0 0", "m.x822 == 0) m.c970 = Constraint(expr= 2*m.b733 + m.x823 ==", "= Constraint(expr= - m.b683 - m.b684 + m.b685 - m.b775", "m.b644 >= 0) m.c1446 = Constraint(expr= m.b624 - m.b645 >=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x366 == 0) m.c439 = Constraint(expr= m.x367 == 0)", "== 0) m.c296 = Constraint(expr= m.x71 - m.x344 - m.x347", "- m.b604 <= 0) m.c1022 = Constraint(expr= m.b605 - m.b606", "m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408", "0.572481933717686) m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686) m.c461", "m.c1153 = Constraint(expr= m.b710 + m.b711 <= 1) m.c1154 =", "+ 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999*", "= Constraint(expr= m.b618 - m.b636 >= 0) m.c1438 = Constraint(expr=", "+ m.b655 - m.b745 <= 0) m.c1343 = Constraint(expr= m.b656", "Constraint(expr= m.b632 - m.b634 <= 0) m.c1051 = Constraint(expr= m.b633", "- 4.45628648004517*m.b599 <= 0) m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600", "m.c1090 = Constraint(expr= m.b672 - m.b673 <= 0) m.c1091 =", "m.x488 - m.x491 == 0) m.c678 = Constraint(expr= m.x150 -", "0) m.c702 = Constraint(expr= m.x162 - m.x495 - m.x498 ==", "m.b672 >= 0) m.c1474 = Constraint(expr= m.b664 - m.b673 >=", "m.c1390 = Constraint(expr= - m.b619 + m.b637 + m.b640 >=", "= Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 =", "m.x278 == 0) m.c162 = Constraint(expr= - m.x249 + m.x279", "0) m.c479 = Constraint(expr= m.x92 - m.x380 - m.x383 ==", "= Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001", "Constraint(expr= m.b758 + m.b759 <= 1) m.c1250 = Constraint(expr= m.b759", "m.c1132 = Constraint(expr= m.b699 + m.b700 <= 1) m.c1133 =", "m.c990 = Constraint(expr= 8*m.b753 + m.x843 == 0) m.c991 =", "m.x314 - m.x320 == 0) m.c216 = Constraint(expr= m.x63 -", "= Constraint(expr= m.b698 + m.b700 <= 1) m.c1132 = Constraint(expr=", "<= 1) m.c1228 = Constraint(expr= m.b747 + m.b748 <= 1)", "0) m.c397 = Constraint(expr= m.x376 - 20*m.b631 <= 0) m.c398", "m.b629 - m.b630 + m.b631 - m.b721 <= 0) m.c1319", "m.b669 - m.b684 >= 0) m.c1486 = Constraint(expr= m.b670 -", "m.b762 <= 1) m.c1256 = Constraint(expr= m.b762 + m.b763 <=", "m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683", "== 0) m.c318 = Constraint(expr= m.x354 == 0) m.c319 =", "- m.x195 == 0) m.c52 = Constraint(expr= m.x181 - m.x190", "- m.b743 <= 0) m.c1341 = Constraint(expr= - m.b653 +", "m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517) m.c190 = Constraint(expr= m.x253 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 =", "m.x369 - m.x372 == 0) m.c478 = Constraint(expr= m.x85 -", "0) m.c532 = Constraint(expr= - m.x394 + m.x448 == 0)", "m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335", "m.x227 == 0) m.c66 = Constraint(expr= m.x12 - m.x225 -", "Constraint(expr= m.b668 - m.b683 >= 0) m.c1485 = Constraint(expr= m.b669", "== 0) m.c23 = Constraint(expr= - m.x71 - m.x89 +", "== 0) m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0)", "+ m.x279 == 0) m.c166 = Constraint(expr= - 0.5*m.x256 +", ">= 0) m.c1461 = Constraint(expr= - m.b663 + m.b672 +", "= Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0) m.c188 = Constraint(expr=", "m.x76 - m.x352 - m.x355 == 0) m.c326 = Constraint(expr=", "0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619) <=", "m.x537 - m.x540 == 0) m.c769 = Constraint(expr= m.x181 -", "<= 2.54515263975353) m.c161 = Constraint(expr= - m.x248 + m.x278 ==", "<= 0) m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0)", "Constraint(expr= m.b608 - m.b626 >= 0) m.c1428 = Constraint(expr= m.b609", "0) m.c62 = Constraint(expr= m.x5 - m.x212 - m.x215 ==", "m.x503 == 0) m.c723 = Constraint(expr= m.x504 == 0) m.c724", "Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b627 - m.b628 <= 0) m.c1046 = Constraint(expr= m.b629", "m.x848 = Var(within=Reals,bounds=(None,None),initialize=0) m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851", "+ 2.54515263975353*m.b607 <= 2.54515263975353) m.c161 = Constraint(expr= - m.x248 +", "0) m.c909 = Constraint(expr= m.x210 - m.x591 - m.x594 ==", "m.c211 = Constraint(expr= m.x322 == 0) m.c212 = Constraint(expr= m.x47", "0) m.c887 = Constraint(expr= m.x557 + 15*m.b680 <= 15) m.c888", "m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547", "Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0) m.x854 = Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b630 - m.b631 <= 0) m.c1049 = Constraint(expr= m.b632 -", "= Constraint(expr= m.x351 - 9*m.b624 <= 0) m.c334 = Constraint(expr=", "m.x238 - m.x241 == 0) m.c119 = Constraint(expr= m.x38 -", "Constraint(expr= m.x257 + 30*m.b608 <= 30) m.c195 = Constraint(expr= m.x258", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b611 =", "m.c920 = Constraint(expr= m.x593 + 9*m.b683 <= 9) m.c921 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c915 = Constraint(expr= m.x564 + 15*m.b684 <= 15) m.c916 =", "m.x354 == 0) m.c319 = Constraint(expr= m.x355 == 0) m.c320", "== 0) m.c924 = Constraint(expr= 4*m.b687 + m.x777 == 0)", "Constraint(expr= - m.b663 + m.b672 + m.b675 >= 0) m.c1462", "= Constraint(expr= m.x403 == 0) m.c563 = Constraint(expr= m.x455 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b647 - m.b648 + m.b649 - m.b739 <= 0)", "= Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001", "== 0) m.c292 = Constraint(expr= m.x349 == 0) m.c293 =", "= Constraint(expr= - m.b625 + m.b643 + m.b646 >= 0)", "= Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0) m.c104 = Constraint(expr=", "= Constraint(expr= m.b669 - m.b670 <= 0) m.c1088 = Constraint(expr=", "<= 0) m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0)", "Constraint(expr= m.b602 - m.b604 <= 0) m.c1021 = Constraint(expr= m.b603", "- m.b766 <= 0) m.c1364 = Constraint(expr= m.b677 - m.b767", "m.x74 - m.x95 - m.x98 == 0) m.c27 = Constraint(expr=", "+ 9*m.b684 <= 9) m.c922 = Constraint(expr= m.x595 + 9*m.b685", "Constraint(expr= - m.b644 - m.b645 + m.b646 - m.b736 <=", "m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <= 0) m.c306 =", "= Constraint(expr= m.b764 + m.b765 <= 1) m.c1262 = Constraint(expr=", "0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999* m.b676)", "m.c730 = Constraint(expr= m.x166 - m.x502 - m.x505 == 0)", "Constraint(expr= 5*m.b686 + m.x776 == 0) m.c924 = Constraint(expr= 4*m.b687", "m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800 = Var(within=Reals,bounds=(None,None),initialize=0) m.x801 = Var(within=Reals,bounds=(None,None),initialize=0) m.x802", "== 0) m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 =", "m.c343 = Constraint(expr= m.x313 == 0) m.c344 = Constraint(expr= m.x359", "m.b769 <= 1) m.c1271 = Constraint(expr= m.b770 + m.b771 <=", "- m.x562 - m.x565 == 0) m.c908 = Constraint(expr= m.x209", "m.x83 == 0) m.c21 = Constraint(expr= m.x69 - m.x81 -", "Constraint(expr= m.x455 == 0) m.c564 = Constraint(expr= m.x456 == 0)", "+ m.x809 == 0) m.c957 = Constraint(expr= 2*m.b720 + m.x810", "m.x21 - m.x24 == 0) m.c10 = Constraint(expr= m.x19 -", "m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 =", "- m.x30 - m.x33 == 0) m.c13 = Constraint(expr= m.x25", "1) m.c1200 = Constraint(expr= m.b734 + m.b736 <= 1) m.c1201", "Constraint(expr= m.b707 + m.b708 <= 1) m.c1146 = Constraint(expr= m.b707", "Constraint(expr= m.x254 - 30*m.b608 <= 0) m.c192 = Constraint(expr= m.x255", "m.b672 - m.b673 <= 0) m.c1091 = Constraint(expr= m.b674 -", "- m.b642 <= 0) m.c1059 = Constraint(expr= m.b641 - m.b643", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 =", "Constraint(expr= - m.x376 + m.x418 == 0) m.c371 = Constraint(expr=", "- m.x217 == 0) m.c65 = Constraint(expr= m.x11 - m.x224", "Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x64 - m.x319 - m.x325 == 0) m.c383 = Constraint(expr=", "1.11894339953103) m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103) m.c611", "+ 3.34221486003388*m.b611 <= 3.34221486003388) m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612", "<= 1) m.c1159 = Constraint(expr= m.b713 + m.b714 <= 1)", "m.x554 - 15*m.b680 <= 0) m.c885 = Constraint(expr= m.x555 -", "m.x777 == 0) m.c925 = Constraint(expr= 6*m.b688 + m.x778 ==", "Constraint(expr= - 0.6*m.x304 + m.x352 == 0) m.c314 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)", "3.34221486003388) m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0) m.c252", "m.b680 - m.b682 <= 0) m.c1099 = Constraint(expr= m.b681 -", "<= 1.18887736200171) m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171)", "- m.b628 >= 0) m.c1430 = Constraint(expr= m.b611 - m.b629", "- m.b758 <= 0) m.c1356 = Constraint(expr= - m.b668 +", "m.c38 = Constraint(expr= - m.x146 - m.x149 + m.x152 ==", "m.b713 <= 0) m.c1311 = Constraint(expr= - m.b623 + m.b624", "Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0) m.c424 = Constraint(expr= m.x331", "Constraint(expr= m.b686 + m.b688 <= 1) m.c1105 = Constraint(expr= m.b686", "Constraint(expr= m.x44 - m.x53 - m.x56 - m.x59 == 0)", "Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 +", "m.b729 + m.b730 <= 1) m.c1191 = Constraint(expr= m.b728 +", "= Constraint(expr= 2*m.b733 + m.x823 == 0) m.c971 = Constraint(expr=", "m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376) m.c604 =", "m.b718 <= 1) m.c1168 = Constraint(expr= m.b717 + m.b718 <=", "== 0) m.c368 = Constraint(expr= - m.x374 + m.x416 ==", "1) m.c1197 = Constraint(expr= m.b731 + m.b733 <= 1) m.c1198", "Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0) m.x794 = Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x518 - m.x521 == 0) m.c705 = Constraint(expr= m.x174 -", "m.x241 == 0) m.c119 = Constraint(expr= m.x38 - m.x260 -", "m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171) m.c636 =", "+ 0.999* m.b605) <= 0) m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606)", "- m.b734 <= 0) m.c1332 = Constraint(expr= - m.b644 +", "m.c5 = Constraint(expr= - m.x11 - m.x14 + m.x17 ==", "Constraint(expr= m.b683 - m.b684 <= 0) m.c1101 = Constraint(expr= m.b683", "m.c960 = Constraint(expr= m.b723 + m.x813 == 0) m.c961 =", "m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517) m.c191 = Constraint(expr= m.x254 -", "<= 1) m.c1175 = Constraint(expr= m.b722 + m.b723 <= 1)", "m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472", "m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783", "Constraint(expr= m.x282 + 15*m.b609 <= 15) m.c202 = Constraint(expr= m.x283", "- m.b650 >= 0) m.c1452 = Constraint(expr= m.b627 - m.b651", "+ m.b599 == 1) m.c1374 = Constraint(expr= m.b597 + m.b600", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 =", "m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553) m.c489 =", "1.83548069293539*m.b611 <= 0) m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <=", "= Constraint(expr= m.x59 - m.x308 - m.x311 == 0) m.c348", "- m.b645 <= 0) m.c1062 = Constraint(expr= m.b644 - m.b646", "= Constraint(expr= m.b672 - m.b673 <= 0) m.c1091 = Constraint(expr=", "m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19", "0) m.c1301 = Constraint(expr= m.b614 - m.b704 <= 0) m.c1302", "0) m.c506 = Constraint(expr= m.x389 == 0) m.c507 = Constraint(expr=", "0) m.c1418 = Constraint(expr= m.b605 - m.b617 >= 0) m.c1419", "m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517) m.c190 =", "m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353) m.c276 =", "m.x277 == 0) m.c269 = Constraint(expr= m.x68 - m.x338 -", "m.b602 - m.b603 <= 0) m.c1020 = Constraint(expr= m.b602 -", "= Constraint(expr= m.b719 + m.b720 <= 1) m.c1170 = Constraint(expr=", "- m.x155 - m.x158 == 0) m.c42 = Constraint(expr= m.x153", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 =", "- log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656) <= 0)", "+ m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999* m.b667) <= 0) m.c722", "1) m.c1267 = Constraint(expr= m.b767 + m.b768 <= 1) m.c1268", "= Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376) m.c631 = Constraint(expr=", "+ 0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633)", "m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001 +", "m.x382 - 33.5*m.b640 <= 0) m.c494 = Constraint(expr= m.x383 +", "= Constraint(expr= 2*m.b725 + m.x815 == 0) m.c963 = Constraint(expr=", "0.999*m.b600)))*(0.001 + 0.999* m.b600) <= 0) m.c82 = Constraint(expr=(m.x232/(0.001 +", "m.b684 >= 0) m.c1486 = Constraint(expr= m.b670 - m.b685 >=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 =", "m.x318 - m.x324 == 0) m.c382 = Constraint(expr= m.x64 -", "15*m.b682 <= 15) m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <=", "m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360", "<= 40) m.c73 = Constraint(expr= m.x217 + 40*m.b598 <= 40)", "<= 3.34221486003388) m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388)", "m.b678 <= 0) m.c1095 = Constraint(expr= m.b677 - m.b679 <=", "m.x575 == 0) m.c822 = Constraint(expr= m.x576 == 0) m.c823", "0) m.c1284 = Constraint(expr= - m.b596 + m.b597 - m.b687", "m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0) m.c499 =", "m.c566 = Constraint(expr= m.x101 - m.x398 - m.x401 == 0)", "- 13.5*m.b680 <= 0) m.c891 = Constraint(expr= m.x585 - 13.5*m.b681", "5*m.b747 + m.x837 == 0) m.c985 = Constraint(expr= 2*m.b748 +", "== 0) m.c997 = Constraint(expr= 3*m.b760 + m.x850 == 0)", "m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490", "== 0) m.c561 = Constraint(expr= m.x402 == 0) m.c562 =", "m.x62 - m.x314 - m.x320 == 0) m.c216 = Constraint(expr=", "= Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171) m.c657 = Constraint(expr=", "Constraint(expr= m.x84 - m.x369 - m.x372 == 0) m.c478 =", "= Constraint(expr= m.x390 + 9*m.b642 <= 9) m.c523 = Constraint(expr=", "== 0) m.c25 = Constraint(expr= - m.x73 - m.x91 +", "== 0) m.c732 = Constraint(expr= m.x177 - m.x525 - m.x531", "m.x163 - m.x496 - m.x499 == 0) m.c704 = Constraint(expr=", "0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c467 = Constraint(expr= m.x371", "+ m.x839 == 0) m.c987 = Constraint(expr= 2*m.b750 + m.x840", "m.b728 + m.b729 <= 1) m.c1190 = Constraint(expr= m.b729 +", "Constraint(expr= m.x353 + 9*m.b623 <= 9) m.c336 = Constraint(expr= m.x354", "0) m.c534 = Constraint(expr= m.x396 == 0) m.c535 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614 =", "= Constraint(expr= m.b654 - m.b660 >= 0) m.c1471 = Constraint(expr=", "m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766", "+ 0.999* m.b638) <= 0) m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639)", "Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924) m.c739 = Constraint(expr= m.x505", "- 2*m.x37 - 10*m.x86 - 5*m.x87 - 5*m.x88 - 5*m.x89", "m.b632 >= 0) m.c1383 = Constraint(expr= - m.b615 + m.b633", "m.b640 <= 0) m.c1058 = Constraint(expr= m.b641 - m.b642 <=", "0) m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0) m.c812", "<= 0.940066550763924) m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0)", "- 6*m.b769 - 2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773", "Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b662 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 =", "m.c510 = Constraint(expr= m.x444 == 0) m.c511 = Constraint(expr= m.x445", "m.c1033 = Constraint(expr= m.b615 - m.b616 <= 0) m.c1034 =", "Constraint(expr= m.b666 - m.b667 <= 0) m.c1085 = Constraint(expr= m.b668", "m.c1385 = Constraint(expr= - m.b605 + m.b617 >= 0) m.c1386", "m.b626) <= 0) m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1", "m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506) m.c746 = Constraint(expr= - m.x506", "Constraint(expr= m.x139 - m.x142 - m.x145 == 0) m.c38 =", "== 0) m.c319 = Constraint(expr= m.x355 == 0) m.c320 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 =", "- m.b695 <= 0) m.c1293 = Constraint(expr= - m.b605 +", "m.b616 >= 0) m.c1379 = Constraint(expr= - m.b611 + m.b629", "Constraint(expr= - m.b668 + m.b669 - m.b759 <= 0) m.c1357", "3.04984759446376*m.b628 <= 0) m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <=", "m.b763 <= 1) m.c1259 = Constraint(expr= m.b764 + m.b765 <=", "0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999* m.b600)", "Constraint(expr= m.b764 + m.b765 <= 1) m.c1262 = Constraint(expr= m.b765", "Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b649 <= 0) m.c1066 = Constraint(expr= m.b648 - m.b649", "Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 +", "m.b727 <= 1) m.c1186 = Constraint(expr= m.b726 + m.b727 <=", "m.b745 <= 1) m.c1223 = Constraint(expr= m.b746 + m.b747 <=", "9) m.c555 = Constraint(expr= m.x450 + 9*m.b645 <= 9) m.c556", "m.x524 - m.x530 == 0) m.c732 = Constraint(expr= m.x177 -", "m.c1392 = Constraint(expr= - m.b609 + m.b621 + m.b624 +", "40*m.b600 <= 0) m.c97 = Constraint(expr= m.x220 - 40*m.b601 <=", "1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999* m.b671) <= 0)", "Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0) m.c864 = Constraint(expr= m.x579", "m.c212 = Constraint(expr= m.x47 - m.x284 - m.x287 == 0)", "m.c1047 = Constraint(expr= m.b629 - m.b631 <= 0) m.c1048 =", "- m.x258 == 0) m.c181 = Constraint(expr= m.x37 - m.x256", "<= 0) m.c885 = Constraint(expr= m.x555 - 15*m.b681 <= 0)", "- m.x52 == 0) m.c17 = Constraint(expr= m.x44 - m.x53", "3.34221486003388) m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388) m.c133", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 =", "= Constraint(expr= m.x60 - m.x309 - m.x312 == 0) m.c349", "Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327) m.c663 = Constraint(expr= m.x486", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844 =", "m.x130 - m.x454 - m.x457 == 0) m.c572 = Constraint(expr=", "m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370", "m.x849 = Var(within=Reals,bounds=(None,None),initialize=0) m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852", "m.x569 == 0) m.c801 = Constraint(expr= m.x198 - m.x567 -", "m.x551 == 0) m.c825 = Constraint(expr= m.x186 - m.x549 -", "m.x303 - m.x306 == 0) m.c322 = Constraint(expr= m.x58 -", "<= 0) m.c845 = Constraint(expr= m.x533 == 0) m.c846 =", "+ m.x590 == 0) m.c897 = Constraint(expr= - 0.6*m.x561 +", "- m.b617 + m.b635 + m.b638 >= 0) m.c1389 =", "20*m.b630 <= 20) m.c406 = Constraint(expr= m.x421 + 20*m.b631 <=", "+ m.b757 <= 1) m.c1246 = Constraint(expr= m.b756 + m.b757", "= Constraint(expr= m.b626 - m.b716 <= 0) m.c1314 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 =", "removed 1 variable and 1 equation from pyomo.environ import *", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 =", "- m.b688 <= 0) m.c1286 = Constraint(expr= m.b599 - m.b689", "m.c202 = Constraint(expr= m.x283 + 15*m.b610 <= 15) m.c203 =", "m.c588 = Constraint(expr= m.x408 == 0) m.c589 = Constraint(expr= m.x409", "m.x416 - 20*m.b629 <= 0) m.c402 = Constraint(expr= m.x417 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b694 = Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0)", "== 0) m.c50 = Constraint(expr= m.x179 - m.x188 - m.x191", "- 20*m.b630 <= 0) m.c397 = Constraint(expr= m.x376 - 20*m.b631", "m.c1161 = Constraint(expr= m.b713 + m.b715 <= 1) m.c1162 =", "- m.b597 + m.b598 - m.b688 <= 0) m.c1286 =", "m.c1348 = Constraint(expr= - m.b659 - m.b660 + m.b661 -", "m.x338 - 1.26558121681553*m.b617 <= 0) m.c279 = Constraint(expr= m.x339 -", "= Constraint(expr= m.x280 - 15*m.b610 <= 0) m.c200 = Constraint(expr=", "Constraint(expr= m.x74 - m.x350 - m.x353 == 0) m.c324 =", "- m.b684 + m.b685 - m.b775 <= 0) m.c1373 =", "- 0.994083415506506*m.b679 <= 0) m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677", "0) m.c760 = Constraint(expr= m.x541 == 0) m.c761 = Constraint(expr=", "= Constraint(expr= m.b605 - m.b617 >= 0) m.c1419 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0)", "+ m.b754 <= 1) m.c1237 = Constraint(expr= m.b752 + m.b753", "- m.b728 <= 0) m.c1326 = Constraint(expr= - m.b638 +", "m.x276 == 0) m.c262 = Constraint(expr= m.x277 == 0) m.c263", "Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x318 - 1.83548069293539*m.b630 <= 0) m.c391 = Constraint(expr= m.x319 -", "m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272", "0) m.c319 = Constraint(expr= m.x355 == 0) m.c320 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x193 - m.x196 == 0) m.c53 = Constraint(expr=(m.x224/(0.001 +", "m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999* m.b647) <= 0) m.c558 =", "m.x537 == 0) m.c748 = Constraint(expr= - m.x508 + m.x538", "= Constraint(expr= m.x53 - m.x296 - m.x299 == 0) m.c294", "m.x563 == 0) m.c900 = Constraint(expr= m.x564 == 0) m.c901", "0) m.c96 = Constraint(expr= m.x219 - 40*m.b600 <= 0) m.c97", "== 0) m.c930 = Constraint(expr= 9*m.b693 + m.x783 == 0)", "m.x576 == 0) m.c829 = Constraint(expr= m.x202 - m.x574 -", "m.x853 == 0) m.c1001 = Constraint(expr= 7*m.b764 + m.x854 ==", "== 0) m.c42 = Constraint(expr= m.x153 - m.x156 - m.x159", "0) m.c912 = Constraint(expr= m.x561 - 15*m.b684 <= 0) m.c913", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 =", "- 40*m.b600 <= 0) m.c97 = Constraint(expr= m.x220 - 40*m.b601", "m.x489 - 0.940066550763924*m.b660 <= 0) m.c688 = Constraint(expr= m.x490 -", "m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115", "m.x28 - m.x31 - m.x34 == 0) m.c14 = Constraint(expr=", "0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999* m.b606)", "<= 0) m.c300 = Constraint(expr= m.x297 - 15*m.b621 <= 0)", "m.c1307 = Constraint(expr= m.b620 - m.b710 <= 0) m.c1308 =", "2*m.x37 - 10*m.x86 - 5*m.x87 - 5*m.x88 - 5*m.x89 -", "Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0) m.c275 = Constraint(expr= m.x275", "+ m.x836 == 0) m.c984 = Constraint(expr= 5*m.b747 + m.x837", "0) m.c1414 = Constraint(expr= m.b604 - m.b613 >= 0) m.c1415", "Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0) m.c273 = Constraint(expr=", "m.b681 <= 0) m.c1098 = Constraint(expr= m.b680 - m.b682 <=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710", "== 0) m.c904 = Constraint(expr= m.x595 == 0) m.c905 =", "Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0)", "0) m.c644 = Constraint(expr= m.x485 == 0) m.c645 = Constraint(expr=", "0) m.c67 = Constraint(expr= m.x13 - m.x226 - m.x229 ==", "m.b759 + m.b760 <= 1) m.c1251 = Constraint(expr= m.b758 +", "- 0.666992981045719*m.b673 <= 0) m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671", "0) m.c92 = Constraint(expr= m.x14 - m.x230 - m.x233 ==", "m.x422 - 0.842233385663186*m.b632 <= 0) m.c429 = Constraint(expr= m.x423 -", "m.b716 + m.b718 <= 1) m.c1165 = Constraint(expr= m.b716 +", "4.45628648004517) m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517) m.c127", "m.b633 + m.b634 - m.b724 <= 0) m.c1322 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0) m.c186 = Constraint(expr=", "m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825 = Var(within=Reals,bounds=(None,None),initialize=0) m.x826", "Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x235 == 0) m.c95 = Constraint(expr= m.x218 - 40*m.b599", "Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1181 = Constraint(expr= m.b725 + m.b726 <= 1) m.c1182", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 =", "0) m.c1026 = Constraint(expr= m.b608 - m.b610 <= 0) m.c1027", "Constraint(expr= m.x594 == 0) m.c904 = Constraint(expr= m.x595 == 0)", "= Constraint(expr= m.b696 + m.b697 <= 1) m.c1127 = Constraint(expr=", "m.x428 - m.x431 == 0) m.c447 = Constraint(expr= m.x117 -", "+ 0.999*m.b597)))*(0.001 + 0.999*m.b597) <= 0) m.c55 = Constraint(expr=(m.x226/(0.001 +", "<= 0) m.c614 = Constraint(expr= m.x413 == 0) m.c615 =", "0.940066550763924) m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924) m.c692", "+ m.x584 == 0) m.c870 = Constraint(expr= - 0.9*m.x555 +", "- m.x307 == 0) m.c323 = Constraint(expr= m.x74 - m.x350", "m.c906 = Constraint(expr= m.x192 - m.x561 - m.x564 == 0)", "- m.b723 <= 0) m.c1321 = Constraint(expr= - m.b632 -", "m.b627 - m.b648 >= 0) m.c1450 = Constraint(expr= m.b628 -", "Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506) m.c746 = Constraint(expr= -", "m.b625 <= 0) m.c1042 = Constraint(expr= m.b624 - m.b625 <=", "m.b640 >= 0) m.c1442 = Constraint(expr= m.b623 - m.b641 >=", "== 0) m.c1007 = Constraint(expr= 2*m.b770 + m.x860 == 0)", "m.b697 <= 1) m.c1123 = Constraint(expr= m.b695 + m.b696 <=", "0.75*m.x236 + m.x260 == 0) m.c108 = Constraint(expr= - 0.75*m.x237", "6*m.b692 + m.x782 == 0) m.c930 = Constraint(expr= 9*m.b693 +", "m.b669 - m.b681 >= 0) m.c1483 = Constraint(expr= m.b670 -", "0) m.c948 = Constraint(expr= 5*m.b711 + m.x801 == 0) m.c949", "- m.x387 - m.x390 == 0) m.c514 = Constraint(expr= m.x97", "0) m.c668 = Constraint(expr= m.x479 == 0) m.c669 = Constraint(expr=", "1) m.c1206 = Constraint(expr= m.b737 + m.b739 <= 1) m.c1207", "m.x842 == 0) m.c990 = Constraint(expr= 8*m.b753 + m.x843 ==", "1 equation from pyomo.environ import * model = m =", "m.c1321 = Constraint(expr= - m.b632 - m.b633 + m.b634 -", "<= 0) m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0)", "9) m.c556 = Constraint(expr= m.x451 + 9*m.b646 <= 9) m.c557", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 =", "m.b671 - m.b761 <= 0) m.c1359 = Constraint(expr= - m.b671", "<= 0) m.c785 = Constraint(expr= m.x539 + 15*m.b668 <= 15)", "40) m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0) m.c75", "Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c161 = Constraint(expr= - m.x248 + m.x278 == 0) m.c162", "m.x235 == 0) m.c95 = Constraint(expr= m.x218 - 40*m.b599 <=", "m.x375 - m.x378 == 0) m.c385 = Constraint(expr= m.x88 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 =", "m.x201 - m.x573 - m.x576 == 0) m.c829 = Constraint(expr=", "m.b709 <= 1) m.c1151 = Constraint(expr= m.b710 + m.b711 <=", "+ m.b645 >= 0) m.c1399 = Constraint(expr= - m.b625 +", "m.x66 - m.x327 - m.x333 == 0) m.c244 = Constraint(expr=", "- m.x502 - m.x505 == 0) m.c731 = Constraint(expr= m.x176", "- 5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110", "0) m.c334 = Constraint(expr= m.x352 - 9*m.b625 <= 0) m.c335", "m.x571 == 0) m.c797 = Constraint(expr= m.x182 - m.x542 -", "+ m.b775 <= 1) m.c1281 = Constraint(expr= m.b773 + m.b775", "== 0) m.c877 = Constraint(expr= m.x589 == 0) m.c878 =", "m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246", "m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271", "m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376) m.c605 = Constraint(expr= m.x458 -", "- m.b598 <= 0) m.c1015 = Constraint(expr= m.b597 - m.b598", "m.x595 == 0) m.c905 = Constraint(expr= m.x191 - m.x560 -", "== 0) m.c678 = Constraint(expr= m.x150 - m.x489 - m.x492", "+ 3.04984759446376*m.b648 <= 3.04984759446376) m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649", "Constraint(expr= m.x386 - 9*m.b641 <= 0) m.c519 = Constraint(expr= m.x387", "<= 1.11894339953103) m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 +", "0) m.c298 = Constraint(expr= m.x73 - m.x346 - m.x349 ==", "== 0) m.c766 = Constraint(expr= m.x172 - m.x514 - m.x517", "m.c1427 = Constraint(expr= m.b608 - m.b626 >= 0) m.c1428 =", "Constraint(expr= m.x321 == 0) m.c211 = Constraint(expr= m.x322 == 0)", "m.c11 = Constraint(expr= m.x23 - m.x26 - m.x29 - m.x32", "== 0) m.c958 = Constraint(expr= 9*m.b721 + m.x811 == 0)", "m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506) m.c863 =", "Constraint(expr= 4*m.b744 + m.x834 == 0) m.c982 = Constraint(expr= m.b745", "m.b653 - m.b659 >= 0) m.c1470 = Constraint(expr= m.b654 -", "== 0) m.c536 = Constraint(expr= m.x449 == 0) m.c537 =", "m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211", "0) m.c473 = Constraint(expr= m.x437 == 0) m.c474 = Constraint(expr=", "Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943) m.c807 = Constraint(expr= m.x546", "= Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924) m.c690 = Constraint(expr=", "m.c1252 = Constraint(expr= m.b759 + m.b760 <= 1) m.c1253 =", "15*m.b668 <= 0) m.c783 = Constraint(expr= m.x537 - 15*m.b669 <=", "+ 0.666992981045719*m.b672 <= 0.666992981045719) m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673", "= Constraint(expr= m.b617 - m.b618 <= 0) m.c1035 = Constraint(expr=", "m.c213 = Constraint(expr= m.x48 - m.x285 - m.x288 == 0)", "0) m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348) m.c426", "- m.b625 >= 0) m.c1427 = Constraint(expr= m.b608 - m.b626", "Constraint(expr= m.b625 - m.b646 >= 0) m.c1448 = Constraint(expr= m.b626", "= Constraint(expr= m.b699 + m.b700 <= 1) m.c1133 = Constraint(expr=", "<= 0) m.c1031 = Constraint(expr= m.b614 - m.b615 <= 0)", "0) m.c1042 = Constraint(expr= m.b624 - m.b625 <= 0) m.c1043", "+ m.b624 - m.b714 <= 0) m.c1312 = Constraint(expr= -", "Constraint(expr= m.x346 - 13.5*m.b622 <= 0) m.c308 = Constraint(expr= m.x347", "- 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999* m.b671) <=", "Constraint(expr= m.b623 - m.b644 >= 0) m.c1446 = Constraint(expr= m.b624", "0) m.c589 = Constraint(expr= m.x409 == 0) m.c590 = Constraint(expr=", "m.x407 == 0) m.c588 = Constraint(expr= m.x408 == 0) m.c589", "= Constraint(expr= m.x84 - m.x369 - m.x372 == 0) m.c478", "15*m.b624 <= 0) m.c328 = Constraint(expr= m.x304 - 15*m.b625 <=", "m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223", "m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0) m.c865 =", "m.c98 = Constraint(expr= m.x221 + 40*m.b599 <= 40) m.c99 =", "- m.x216 == 0) m.c64 = Constraint(expr= m.x7 - m.x214", "+ m.b639 >= 0) m.c1396 = Constraint(expr= - m.b622 +", "m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513", "m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348) m.c427 = Constraint(expr= m.x337 +", "2.30162356062425*m.b639 <= 0) m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <=", "0) m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001", "- m.x163 - m.x166 - m.x169 == 0) m.c47 =", "m.x299 == 0) m.c288 = Constraint(expr= m.x300 == 0) m.c289", "== 0) m.c145 = Constraint(expr= m.x31 - m.x244 - m.x247", "m.c1127 = Constraint(expr= m.b698 + m.b699 <= 1) m.c1128 =", "0) m.c1361 = Constraint(expr= m.b674 - m.b764 <= 0) m.c1362", "m.x96 - m.x99 == 0) m.c28 = Constraint(expr= m.x76 -", "<= 1.32154609891348) m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348)", "- m.x238 - m.x241 == 0) m.c119 = Constraint(expr= m.x38", "9*m.b645 <= 9) m.c556 = Constraint(expr= m.x451 + 9*m.b646 <=", "m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999* m.b635) <= 0) m.c435 =", "m.c1018 = Constraint(expr= m.b600 - m.b601 <= 0) m.c1019 =", "- m.b677 + m.b678 - m.b768 <= 0) m.c1366 =", "m.b682 >= 0) m.c1484 = Constraint(expr= m.b668 - m.b683 >=", "m.x71 - m.x344 - m.x347 == 0) m.c297 = Constraint(expr=", "+ m.b708 <= 1) m.c1146 = Constraint(expr= m.b707 + m.b709", "= Constraint(expr= m.b716 + m.b717 <= 1) m.c1164 = Constraint(expr=", "m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388) m.c223 =", "m.b708 <= 0) m.c1306 = Constraint(expr= - m.b617 - m.b618", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b601 =", "m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461", "m.x214 - m.x217 == 0) m.c65 = Constraint(expr= m.x11 -", "Constraint(expr= m.b746 + m.b748 <= 1) m.c1228 = Constraint(expr= m.b747", "Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0) m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631 =", "0) m.c237 = Constraint(expr= m.x333 == 0) m.c238 = Constraint(expr=", "0) m.c515 = Constraint(expr= m.x122 - m.x440 - m.x443 ==", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830 = Var(within=Reals,bounds=(None,None),initialize=0) m.x831 =", "<= 1.83548069293539) m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776 =", "+ 190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205 + 290*m.x206", "+ m.b754 <= 1) m.c1241 = Constraint(expr= m.b755 + m.b756", "= Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353) m.c161 = Constraint(expr=", "- 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999* m.b660) <=", "<= 1) m.c1157 = Constraint(expr= m.b713 + m.b714 <= 1)", "<= 0) m.c920 = Constraint(expr= m.x593 + 9*m.b683 <= 9)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 =", "<= 1) m.c1205 = Constraint(expr= m.b737 + m.b738 <= 1)", ">= 0) m.c1393 = Constraint(expr= - m.b610 + m.b622 +", "20) m.c401 = Constraint(expr= m.x416 - 20*m.b629 <= 0) m.c402", "= Constraint(expr= m.b683 - m.b773 <= 0) m.c1371 = Constraint(expr=", "Constraint(expr= m.x378 == 0) m.c376 = Constraint(expr= m.x379 == 0)", "log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597) <= 0) m.c55", "Constraint(expr= m.x408 == 0) m.c589 = Constraint(expr= m.x409 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x320 == 0) m.c216 = Constraint(expr= m.x63 - m.x315 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606 =", "m.c1053 = Constraint(expr= m.b635 - m.b637 <= 0) m.c1054 =", "m.x178 - m.x526 - m.x532 == 0) m.c734 = Constraint(expr=", "m.b722 + m.b724 <= 1) m.c1177 = Constraint(expr= m.b722 +", "- m.b683 - m.b684 + m.b685 - m.b775 <= 0)", "= Constraint(expr= m.x27 - m.x237 - m.x240 == 0) m.c118", "0) m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0) m.c220", "m.c1206 = Constraint(expr= m.b737 + m.b739 <= 1) m.c1207 =", "m.x837 = Var(within=Reals,bounds=(None,None),initialize=0) m.x838 = Var(within=Reals,bounds=(None,None),initialize=0) m.x839 = Var(within=Reals,bounds=(None,None),initialize=0) m.x840", "10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155 +", "0) m.c624 = Constraint(expr= m.x135 - m.x465 - m.x468 ==", "- 9*m.b683 <= 0) m.c918 = Constraint(expr= m.x591 - 9*m.b684", "Constraint(expr= m.x197 - m.x566 - m.x569 == 0) m.c801 =", "m.x246 == 0) m.c145 = Constraint(expr= m.x31 - m.x244 -", "== 0) m.c483 = Constraint(expr= m.x120 - m.x435 - m.x438", "0) m.c35 = Constraint(expr= m.x137 - m.x140 - m.x143 ==", "== 0) m.c513 = Constraint(expr= m.x96 - m.x387 - m.x390", "+ 3.34221486003388*m.b614 <= 3.34221486003388) m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615", "m.c1383 = Constraint(expr= - m.b615 + m.b633 >= 0) m.c1384", "Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1318 = Constraint(expr= - m.b629 - m.b630 + m.b631", "Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x563 == 0) m.c900 = Constraint(expr= m.x564 == 0)", "- 0.6*m.x303 + m.x351 == 0) m.c313 = Constraint(expr= -", "= Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0) m.c812 = Constraint(expr=", "m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12", "m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621", "== 0) m.c236 = Constraint(expr= m.x332 == 0) m.c237 =", "Constraint(expr= m.x558 == 0) m.c874 = Constraint(expr= m.x559 == 0)", "m.c1245 = Constraint(expr= m.b755 + m.b757 <= 1) m.c1246 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 =", "<= 0) m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0)", "Constraint(expr= - m.b644 + m.b645 - m.b735 <= 0) m.c1333", "<= 4.45628648004517) m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517)", "m.x478 - m.x481 == 0) m.c677 = Constraint(expr= m.x149 -", "= Constraint(expr= m.x168 - m.x507 - m.x510 == 0) m.c763", "= Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553) m.c491 = Constraint(expr=", "0) m.c753 = Constraint(expr= m.x510 == 0) m.c754 = Constraint(expr=", "+ 0.999* m.b640) <= 0) m.c467 = Constraint(expr= m.x371 ==", "0) m.c471 = Constraint(expr= m.x384 == 0) m.c472 = Constraint(expr=", "== 0) m.c345 = Constraint(expr= m.x360 == 0) m.c346 =", "Constraint(expr= m.b599 - m.b689 <= 0) m.c1287 = Constraint(expr= -", "m.x49 - m.x52 == 0) m.c17 = Constraint(expr= m.x44 -", "0.994083415506506) m.c746 = Constraint(expr= - m.x506 + m.x536 == 0)", "+ m.b744 <= 1) m.c1220 = Constraint(expr= m.b744 + m.b745", "m.c1188 = Constraint(expr= m.b728 + m.b730 <= 1) m.c1189 =", "m.b656 >= 0) m.c1467 = Constraint(expr= m.b654 - m.b657 >=", "+ 1.18887736200171*m.b655 <= 1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) -", "0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633) <=", "m.c1052 = Constraint(expr= m.b635 - m.b636 <= 0) m.c1053 =", "m.c1295 = Constraint(expr= m.b608 - m.b698 <= 0) m.c1296 =", "= Constraint(expr= m.x354 == 0) m.c319 = Constraint(expr= m.x355 ==", "1) m.c1172 = Constraint(expr= m.b720 + m.b721 <= 1) m.c1173", "# MINLP written by GAMS Convert at 01/15/21 11:37:33 #", "== 0) m.c542 = Constraint(expr= m.x125 - m.x446 - m.x449", "m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0) m.c662 =", "+ m.x798 == 0) m.c946 = Constraint(expr= 7*m.b709 + m.x799", "= Constraint(expr= m.x74 - m.x95 - m.x98 == 0) m.c27", "m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13", "m.x250 - 4.45628648004517*m.b610 <= 0) m.c188 = Constraint(expr= m.x251 +", "== 0) m.c386 = Constraint(expr= m.x110 - m.x416 - m.x419", "- 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999* m.b611) <=", "m.c1377 = Constraint(expr= - m.b603 + m.b612 + m.b615 >=", "m.x388 + m.x442 == 0) m.c506 = Constraint(expr= m.x389 ==", "== 0) m.c540 = Constraint(expr= m.x99 - m.x393 - m.x396", "1) m.c1374 = Constraint(expr= m.b597 + m.b600 == 1) m.c1375", "Constraint(expr= m.x134 - m.x137 == 0) m.c33 = Constraint(expr= m.x135", "Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c642 = Constraint(expr= m.x474 == 0) m.c643 =", "Constraint(expr= m.b607 - m.b619 >= 0) m.c1421 = Constraint(expr= m.b608", "0) m.c242 = Constraint(expr= m.x65 - m.x326 - m.x332 ==", "<= 0) m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0)", "0) m.c677 = Constraint(expr= m.x149 - m.x488 - m.x491 ==", "0) m.c1003 = Constraint(expr= 9*m.b766 + m.x856 == 0) m.c1004", "Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b681 + m.b682 - m.b772 <= 0) m.c1370 =", "Constraint(expr= m.x469 == 0) m.c620 = Constraint(expr= m.x107 - m.x410", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b652 =", "Constraint(expr= m.b667 - m.b679 >= 0) m.c1481 = Constraint(expr= m.b668", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 =", "+ m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999* m.b665) <= 0) m.c720", "- m.x345 - m.x348 == 0) m.c298 = Constraint(expr= m.x73", "m.b639 - m.b729 <= 0) m.c1327 = Constraint(expr= - m.b638", "m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0) m.c130 =", "= Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 =", "0) m.c1052 = Constraint(expr= m.b635 - m.b636 <= 0) m.c1053", "Constraint(expr= m.b732 + m.b733 <= 1) m.c1197 = Constraint(expr= m.b731", "m.c647 = Constraint(expr= m.x140 - m.x470 - m.x473 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106", "<= 0) m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0)", "0) m.c477 = Constraint(expr= m.x84 - m.x369 - m.x372 ==", "m.c1238 = Constraint(expr= m.b753 + m.b754 <= 1) m.c1239 =", "Constraint(expr= 9*m.b696 + m.x786 == 0) m.c934 = Constraint(expr= 5*m.b697", "m.c14 = Constraint(expr= m.x38 - m.x47 - m.x50 == 0)", "m.x188 - m.x554 - m.x557 == 0) m.c879 = Constraint(expr=", "m.x280 == 0) m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278", "== 0) m.c289 = Constraint(expr= m.x301 == 0) m.c290 =", "<= 0) m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506)", "m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943) m.c834 = Constraint(expr= m.x552 +", "0) m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0) m.c681", "Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c1451 = Constraint(expr= m.b626 - m.b650 >= 0) m.c1452", "m.x799 == 0) m.c947 = Constraint(expr= 2*m.b710 + m.x800 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 =", "1) m.c1262 = Constraint(expr= m.b765 + m.b766 <= 1) m.c1263", "= Constraint(expr= m.b614 - m.b704 <= 0) m.c1302 = Constraint(expr=", "- m.b598 <= 0) m.c1016 = Constraint(expr= m.b599 - m.b600", "1) m.c1170 = Constraint(expr= m.b719 + m.b721 <= 1) m.c1171", "9*m.b646 <= 9) m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1", "= Constraint(expr= m.x95 - m.x386 - m.x389 == 0) m.c513", "<= 0) m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 +", "- m.x555 - m.x558 == 0) m.c880 = Constraint(expr= m.x190", "m.c444 = Constraint(expr= m.x81 - m.x363 - m.x366 == 0)", "2*m.b746 + m.x836 == 0) m.c984 = Constraint(expr= 5*m.b747 +", "- m.b763 <= 0) m.c1361 = Constraint(expr= m.b674 - m.b764", "m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293", "1.04900943706034) m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034) m.c584", "m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429", "m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225", "m.x104 - m.x107 == 0) m.c30 = Constraint(expr= m.x78 -", "= Constraint(expr= m.x116 - m.x428 - m.x431 == 0) m.c447", "m.b667) <= 0) m.c722 = Constraint(expr= m.x503 == 0) m.c723", "<= 0) m.c1034 = Constraint(expr= m.b617 - m.b618 <= 0)", "m.x501 - 0.940066550763924*m.b666 <= 0) m.c736 = Constraint(expr= m.x502 -", "m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719) m.c814 = Constraint(expr= m.x571 +", "- m.b751 <= 0) m.c1349 = Constraint(expr= m.b662 - m.b752", "== 0) m.c242 = Constraint(expr= m.x65 - m.x326 - m.x332", "+ 90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198 + 350*m.x199", "+ 2.54515263975353*m.b618 <= 2.54515263975353) m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619", "- 0.940066550763924*m.b659 <= 0) m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660", "= Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0) m.c737 = Constraint(expr=", "Constraint(expr= 4*m.b754 + m.x844 == 0) m.c992 = Constraint(expr= 2*m.b755", "Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0) m.c254 = Constraint(expr= m.x332", "Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999* m.b671) <= 0) m.c789 =", "m.b738 + m.b739 <= 1) m.c1209 = Constraint(expr= m.b737 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 =", "m.c16 = Constraint(expr= m.x40 - m.x49 - m.x52 == 0)", "Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517) m.c105 = Constraint(expr= m.x234", "0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0)", "m.x496 - 0.940066550763924*m.b664 <= 0) m.c710 = Constraint(expr= m.x497 +", "m.x379 == 0) m.c386 = Constraint(expr= m.x110 - m.x416 -", "= Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376) m.c632 = Constraint(expr=", "Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 +", "30) m.c781 = Constraint(expr= m.x517 + 30*m.b670 <= 30) m.c782", "<= 0) m.c1077 = Constraint(expr= m.b659 - m.b661 <= 0)", "m.b687 + m.b688 <= 1) m.c1107 = Constraint(expr= m.b686 +", "m.b688 <= 1) m.c1105 = Constraint(expr= m.b686 + m.b687 <=", "m.x148 - m.x484 - m.x487 == 0) m.c653 = Constraint(expr=", "m.b639 >= 0) m.c1396 = Constraint(expr= - m.b622 + m.b640", "m.x466 - 1.18887736200171*m.b655 <= 0) m.c635 = Constraint(expr= m.x467 +", "m.c1394 = Constraint(expr= - m.b620 + m.b638 >= 0) m.c1395", "m.x215 == 0) m.c57 = Constraint(expr= m.x216 == 0) m.c58", "m.c1347 = Constraint(expr= - m.b659 + m.b660 - m.b750 <=", "m.b666 + m.b667 - m.b757 <= 0) m.c1355 = Constraint(expr=", "m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617) <= 0) m.c258 = Constraint(expr=(m.x339/(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c325 = Constraint(expr= m.x76 - m.x352 - m.x355", "Constraint(expr= m.x557 + 15*m.b680 <= 15) m.c888 = Constraint(expr= m.x558", "- m.x554 - m.x557 == 0) m.c879 = Constraint(expr= m.x189", "Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353) m.c277 = Constraint(expr= m.x277", "m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327", "1.26558121681553*m.b638 <= 1.26558121681553) m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <=", "0) m.c566 = Constraint(expr= m.x101 - m.x398 - m.x401 ==", "Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0)", "<= 4.45628648004517) m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0)", "0.690184503917672*m.b677 <= 0.690184503917672) m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <=", "<= 0) m.c1024 = Constraint(expr= m.b606 - m.b607 <= 0)", "m.c167 = Constraint(expr= m.x251 == 0) m.c168 = Constraint(expr= m.x252", "- 15*m.b669 <= 0) m.c784 = Constraint(expr= m.x538 - 15*m.b670", "m.x581 == 0) m.c855 = Constraint(expr= m.x204 - m.x579 -", "0) m.c1092 = Constraint(expr= m.b674 - m.b676 <= 0) m.c1093", "+ 3.04984759446376*m.b650 <= 3.04984759446376) m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651", "40*m.b599 <= 0) m.c96 = Constraint(expr= m.x219 - 40*m.b600 <=", "Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0) m.c629 = Constraint(expr= m.x413", "<= 0) m.c1019 = Constraint(expr= m.b602 - m.b603 <= 0)", "= Constraint(expr= m.b669 - m.b684 >= 0) m.c1486 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 =", "m.x296 - m.x299 == 0) m.c294 = Constraint(expr= m.x54 -", "Constraint(expr= m.x72 - m.x345 - m.x348 == 0) m.c298 =", "= Constraint(expr= m.b617 - m.b619 <= 0) m.c1036 = Constraint(expr=", "m.c1358 = Constraint(expr= m.b671 - m.b761 <= 0) m.c1359 =", "m.x359 == 0) m.c351 = Constraint(expr= m.x78 - m.x357 -", "Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553) m.c283 = Constraint(expr= m.x343", "0) m.c1037 = Constraint(expr= m.b620 - m.b621 <= 0) m.c1038", "- m.b667 <= 0) m.c1085 = Constraint(expr= m.b668 - m.b669", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 =", "4.45628648004517*m.b602 <= 4.45628648004517) m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <=", "= Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0) m.c158 = Constraint(expr=", "= Constraint(expr= - m.b605 + m.b617 >= 0) m.c1386 =", "+ m.x793 == 0) m.c941 = Constraint(expr= 4*m.b704 + m.x794", "0) m.c1342 = Constraint(expr= - m.b653 - m.b654 + m.b655", "- m.x486 == 0) m.c652 = Constraint(expr= m.x148 - m.x484", "+ m.b628 >= 0) m.c1394 = Constraint(expr= - m.b620 +", "- m.x186 == 0) m.c49 = Constraint(expr= m.x175 - m.x184", "0) m.c993 = Constraint(expr= 3*m.b756 + m.x846 == 0) m.c994", "= Constraint(expr= m.x539 == 0) m.c759 = Constraint(expr= m.x540 ==", "m.x355 + 9*m.b625 <= 9) m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626)", "= Constraint(expr= m.b686 + m.b687 <= 1) m.c1106 = Constraint(expr=", "m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331", "Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c1085 = Constraint(expr= m.b668 - m.b669 <= 0)", "0) m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0) m.c186", "- m.b636 + m.b637 - m.b727 <= 0) m.c1325 =", "= Constraint(expr= m.b607 - m.b619 >= 0) m.c1421 = Constraint(expr=", "m.x192 - m.x561 - m.x564 == 0) m.c907 = Constraint(expr=", "+ m.x834 == 0) m.c982 = Constraint(expr= m.b745 + m.x835", "= Constraint(expr= m.b690 + m.b691 <= 1) m.c1113 = Constraint(expr=", "- m.b653 - m.b654 + m.b655 - m.b745 <= 0)", "m.x72 - m.x90 + m.x93 == 0) m.c25 = Constraint(expr=", "0) m.c84 = Constraint(expr= m.x222 == 0) m.c85 = Constraint(expr=", "= Constraint(expr= - m.b680 - m.b681 + m.b682 - m.b772", "= Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0) m.c156 = Constraint(expr=", "6*m.b688 + m.x778 == 0) m.c926 = Constraint(expr= 8*m.b689 +", "= Constraint(expr= 7*m.b764 + m.x854 == 0) m.c1002 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610 =", "m.x44 - m.x278 - m.x281 == 0) m.c183 = Constraint(expr=", "m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441", "m.x807 == 0) m.c955 = Constraint(expr= 3*m.b718 + m.x808 ==", "m.x509 == 0) m.c762 = Constraint(expr= m.x168 - m.x507 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 33.5*m.b639 <= 33.5) m.c496 = Constraint(expr= m.x385 + 33.5*m.b640", "= Constraint(expr= m.x153 - m.x156 - m.x159 == 0) m.c43", "= Constraint(expr= - m.b614 - m.b615 + m.b616 - m.b706", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 =", "0) m.c45 = Constraint(expr= m.x159 - m.x162 - m.x165 -", "Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0) m.c860 = Constraint(expr= m.x533", "- m.x140 - m.x143 == 0) m.c36 = Constraint(expr= m.x138", "- 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999* m.b674) <=", "Constraint(expr= - m.b650 + m.b651 - m.b741 <= 0) m.c1339", "9*m.b642 <= 9) m.c529 = Constraint(expr= m.x445 + 9*m.b643 <=", "Constraint(expr= m.b729 + m.b730 <= 1) m.c1193 = Constraint(expr= m.b731", "= Constraint(expr= m.x97 - m.x388 - m.x391 == 0) m.c515", "m.c877 = Constraint(expr= m.x589 == 0) m.c878 = Constraint(expr= m.x188", "- m.b616 >= 0) m.c1418 = Constraint(expr= m.b605 - m.b617", "m.c1268 = Constraint(expr= m.b768 + m.b769 <= 1) m.c1269 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b730 <= 1) m.c1193 = Constraint(expr= m.b731 + m.b732 <=", "m.b613 + m.b616 >= 0) m.c1379 = Constraint(expr= - m.b611", "m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,20),initialize=0) m.x87", "= Constraint(expr= m.x277 == 0) m.c263 = Constraint(expr= m.x341 ==", "+ m.x808 == 0) m.c956 = Constraint(expr= 7*m.b719 + m.x809", "m.x525 - 0.994083415506506*m.b666 <= 0) m.c742 = Constraint(expr= m.x526 -", "<= 15) m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b614 + m.b615 - m.b705 <= 0) m.c1303 = Constraint(expr=", "= Constraint(expr= m.b683 - m.b685 <= 0) m.c1102 = Constraint(expr=", "m.x592 - m.x595 == 0) m.c911 = Constraint(expr= m.x560 -", "# Variable counts # x b i s1s s2s sc", "m.b654) <= 0) m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1", "Constraint(expr= - m.b665 + m.b677 >= 0) m.c1464 = Constraint(expr=", "m.b772 <= 1) m.c1275 = Constraint(expr= m.b770 + m.b772 <=", "0.9*m.x318 + m.x417 == 0) m.c367 = Constraint(expr= - 0.9*m.x319", "1.18887736200171*m.b654 <= 1.18887736200171) m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <=", "m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76", "Var(within=Reals,bounds=(None,None),initialize=0) m.x791 = Var(within=Reals,bounds=(None,None),initialize=0) m.x792 = Var(within=Reals,bounds=(None,None),initialize=0) m.x793 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.b650 - m.b651 <= 0) m.c1068 = Constraint(expr= m.b650", "0) m.c1044 = Constraint(expr= m.b626 - m.b628 <= 0) m.c1045", "- 5*m.b686 - 4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690", "m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26", "= Constraint(expr= m.x67 - m.x331 - m.x337 == 0) m.c419", "m.c1262 = Constraint(expr= m.b765 + m.b766 <= 1) m.c1263 =", "- m.b650 + m.b651 - m.b741 <= 0) m.c1339 =", "m.c63 = Constraint(expr= m.x6 - m.x213 - m.x216 == 0)", "m.c320 = Constraint(expr= m.x56 - m.x302 - m.x305 == 0)", "- m.x102 - m.x105 - m.x108 == 0) m.c31 =", "Constraint(expr= 4*m.b767 + m.x857 == 0) m.c1005 = Constraint(expr= 8*m.b768", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 =", "1.26558121681553*m.b639 <= 1.26558121681553) m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <=", "= Constraint(expr= m.b596 + m.b599 - m.b602 >= 0) m.c1404", "Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0) m.c716 = Constraint(expr= m.x521", "m.x563 + 15*m.b683 <= 15) m.c915 = Constraint(expr= m.x564 +", "m.c801 = Constraint(expr= m.x198 - m.x567 - m.x570 == 0)", "Constraint(expr= - m.b656 + m.b657 - m.b747 <= 0) m.c1345", "= Constraint(expr= m.b650 - m.b652 <= 0) m.c1069 = Constraint(expr=", "- m.x350 - m.x353 == 0) m.c324 = Constraint(expr= m.x75", "<= 0) m.c233 = Constraint(expr= m.x293 == 0) m.c234 =", "- 15*m.b626 <= 0) m.c354 = Constraint(expr= m.x309 - 15*m.b627", "<= 0) m.c722 = Constraint(expr= m.x503 == 0) m.c723 =", "3*m.b727 - 4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731 -", "3.71357206670431*m.b598 <= 0) m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <=", "- 0.6*m.x562 + m.x592 == 0) m.c899 = Constraint(expr= m.x563", "= Constraint(expr= m.b693 + m.b694 <= 1) m.c1119 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)", "log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679) <= 0) m.c845", "m.b696 + m.b697 <= 1) m.c1125 = Constraint(expr= m.b695 +", "<= 0) m.c784 = Constraint(expr= m.x538 - 15*m.b670 <= 0)", "m.b661 - m.b751 <= 0) m.c1349 = Constraint(expr= m.b662 -", "= Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001", "m.b688 <= 1) m.c1109 = Constraint(expr= m.b689 + m.b690 <=", "m.c383 = Constraint(expr= m.x86 - m.x374 - m.x377 == 0)", "m.x557 + 15*m.b680 <= 15) m.c888 = Constraint(expr= m.x558 +", "m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118", "- m.b620 + m.b621 - m.b711 <= 0) m.c1309 =", "+ 0.690184503917672*m.b679 <= 0.690184503917672) m.c869 = Constraint(expr= - 0.9*m.x554 +", "m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773", "m.b605 - m.b606 + m.b607 - m.b697 <= 0) m.c1295", "Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 =", "m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748", "- m.x54 - m.x57 - m.x60 == 0) m.c19 =", "Constraint(expr= m.x15 - m.x231 - m.x234 == 0) m.c94 =", "m.x149 - m.x488 - m.x491 == 0) m.c678 = Constraint(expr=", "m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666", "- m.x582 == 0) m.c856 = Constraint(expr= m.x205 - m.x580", "0) m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506) m.c861", "m.x808 == 0) m.c956 = Constraint(expr= 7*m.b719 + m.x809 ==", "== 0) m.c512 = Constraint(expr= m.x95 - m.x386 - m.x389", "m.x134 - m.x464 - m.x467 == 0) m.c624 = Constraint(expr=", "m.x313 == 0) m.c344 = Constraint(expr= m.x359 == 0) m.c345", "0) m.c766 = Constraint(expr= m.x172 - m.x514 - m.x517 ==", "m.x97 - m.x100 == 0) m.c29 = Constraint(expr= m.x77 -", "Constraint(expr= m.b659 - m.b749 <= 0) m.c1347 = Constraint(expr= -", "m.b773 + m.b774 <= 1) m.c1280 = Constraint(expr= m.b774 +", "m.b744 + m.b745 <= 1) m.c1221 = Constraint(expr= m.b743 +", "= Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553) m.c453 = Constraint(expr=", "- m.x436 - m.x439 == 0) m.c485 = Constraint(expr= m.x368", "0) m.c978 = Constraint(expr= 8*m.b741 + m.x831 == 0) m.c979", "Constraint(expr= m.x34 - m.x250 - m.x253 == 0) m.c179 =", "0.940066550763924*m.b661 <= 0) m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <=", "= Constraint(expr= - 0.9*m.x298 + m.x346 == 0) m.c287 =", "m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596", "m.x572 - m.x575 == 0) m.c828 = Constraint(expr= m.x201 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0) m.c156 = Constraint(expr= m.x267", "Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 2.54515263975353*m.b606 <= 0) m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607", "m.x127 - m.x448 - m.x451 == 0) m.c545 = Constraint(expr=", "- m.b612 <= 0) m.c1029 = Constraint(expr= m.b611 - m.b613", "- 3.71357206670431*m.b596 <= 0) m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597", "Constraint(expr= m.b647 - m.b648 <= 0) m.c1065 = Constraint(expr= m.b647", "Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)", ">= 0) m.c1400 = Constraint(expr= - m.b626 + m.b647 +", "= Constraint(expr= m.b662 - m.b664 <= 0) m.c1081 = Constraint(expr=", "0) m.c349 = Constraint(expr= m.x61 - m.x310 - m.x313 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 =", "+ m.x802 == 0) m.c950 = Constraint(expr= 4*m.b713 + m.x803", "Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)", "3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001", "0) m.c1049 = Constraint(expr= m.b632 - m.b633 <= 0) m.c1050", "= Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c972 = Constraint(expr= 4*m.b735 + m.x825 == 0) m.c973", "m.x279 == 0) m.c163 = Constraint(expr= - m.x250 + m.x280", "== 0) m.c148 = Constraint(expr= m.x43 - m.x268 - m.x274", "- m.x107 == 0) m.c30 = Constraint(expr= m.x78 - m.x102", "+ m.b712 <= 1) m.c1157 = Constraint(expr= m.b713 + m.b714", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667 =", "m.b620 - m.b710 <= 0) m.c1308 = Constraint(expr= - m.b620", "0) m.c1022 = Constraint(expr= m.b605 - m.b606 <= 0) m.c1023", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x855 = Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857 =", "m.b666 - m.b667 <= 0) m.c1085 = Constraint(expr= m.b668 -", "+ m.b703 <= 1) m.c1135 = Constraint(expr= m.b701 + m.b702", "Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5) m.c497 = Constraint(expr= m.x434", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 =", "m.x267 - 2.54515263975353*m.b606 <= 0) m.c157 = Constraint(expr= m.x268 -", "0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999* m.b611)", "0) m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001", "Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 +", "m.b675 - m.b765 <= 0) m.c1363 = Constraint(expr= - m.b674", "- m.x454 - m.x457 == 0) m.c572 = Constraint(expr= m.x398", "== 0) m.c643 = Constraint(expr= m.x475 == 0) m.c644 =", "m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0) m.c360 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 =", "m.x331 - m.x337 == 0) m.c419 = Constraint(expr= m.x113 -", "Constraint(expr= m.x97 - m.x388 - m.x391 == 0) m.c515 =", "m.b643 <= 0) m.c1060 = Constraint(expr= m.b642 - m.b643 <=", "m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 =", "Constraint(expr= m.x281 == 0) m.c174 = Constraint(expr= m.x282 == 0)", "0) m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0) m.c219", "0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999* m.b626)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x795 = Var(within=Reals,bounds=(None,None),initialize=0) m.x796 = Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0)", "<= 1.83548069293539) m.c395 = Constraint(expr= m.x374 - 20*m.b629 <= 0)", "+ m.x538 == 0) m.c749 = Constraint(expr= - 0.5*m.x512 +", "m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0) m.c772 =", "Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0) m.c687 = Constraint(expr= m.x489", "<= 1) m.c1266 = Constraint(expr= m.b767 + m.b769 <= 1)", "m.c1445 = Constraint(expr= m.b623 - m.b644 >= 0) m.c1446 =", "m.x833 == 0) m.c981 = Constraint(expr= 4*m.b744 + m.x834 ==", "m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375", "250*m.x210 + 300*m.x211 - 5*m.b686 - 4*m.b687 - 6*m.b688 -", "Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5) m.c895 = Constraint(expr= m.x589", "== 0) m.c597 = Constraint(expr= m.x132 - m.x459 - m.x462", "m.c650 = Constraint(expr= m.x146 - m.x482 - m.x485 == 0)", "m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672) m.c869 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c10 = Constraint(expr= m.x19 - m.x22 - m.x25 ==", "= Constraint(expr= m.x44 - m.x53 - m.x56 - m.x59 ==", "m.c1098 = Constraint(expr= m.b680 - m.b682 <= 0) m.c1099 =", "<= 30) m.c195 = Constraint(expr= m.x258 + 30*m.b609 <= 30)", "<= 0) m.c193 = Constraint(expr= m.x256 - 30*m.b610 <= 0)", "- m.x332 == 0) m.c243 = Constraint(expr= m.x66 - m.x327", "m.b672 + m.b673 - m.b763 <= 0) m.c1361 = Constraint(expr=", "+ m.b648 - m.b738 <= 0) m.c1336 = Constraint(expr= -", ">= 0) m.c1462 = Constraint(expr= - m.b664 + m.b673 +", "= Constraint(expr= m.x516 == 0) m.c757 = Constraint(expr= m.x517 ==", "+ m.b619 >= 0) m.c1388 = Constraint(expr= - m.b617 +", "m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924) m.c713 = Constraint(expr= m.x518 -", "m.x281 + 15*m.b608 <= 15) m.c201 = Constraint(expr= m.x282 +", "Constraint(expr= m.x391 + 9*m.b643 <= 9) m.c524 = Constraint(expr= m.x440", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 =", "== 0) m.c570 = Constraint(expr= m.x129 - m.x453 - m.x456", "== 0) m.c853 = Constraint(expr= m.x178 - m.x529 - m.x535", "+ m.x789 == 0) m.c937 = Constraint(expr= 6*m.b700 + m.x790", "30*m.x126 + 35*m.x127 + 25*m.x128 + 50*m.x129 + 10*m.x130 +", "= Constraint(expr= m.b664 - m.b676 >= 0) m.c1478 = Constraint(expr=", "= Constraint(expr= m.x115 - m.x424 - m.x427 == 0) m.c422", "m.b706 <= 1) m.c1144 = Constraint(expr= m.b705 + m.b706 <=", "m.c1400 = Constraint(expr= - m.b626 + m.b647 + m.b650 +", "- m.x191 - m.x194 == 0) m.c51 = Constraint(expr= m.x180", "- 3.34221486003388*m.b612 <= 0) m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613", "+ 0.999* m.b661) <= 0) m.c668 = Constraint(expr= m.x479 ==", "- 4.45628648004517*m.b604 <= 0) m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602", "0) m.c504 = Constraint(expr= - m.x387 + m.x441 == 0)", "= Constraint(expr= m.x322 == 0) m.c212 = Constraint(expr= m.x47 -", "m.c922 = Constraint(expr= m.x595 + 9*m.b685 <= 9) m.c923 =", "m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154", "m.b739 <= 1) m.c1210 = Constraint(expr= m.b738 + m.b739 <=", "Constraint(expr= m.b740 + m.b742 <= 1) m.c1216 = Constraint(expr= m.b741", "0.78338879230327*m.b656 <= 0.78338879230327) m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)", "0.999*m.b601)))*(0.001 + 0.999* m.b601) <= 0) m.c83 = Constraint(expr= m.x221", "== 0) m.c999 = Constraint(expr= 8*m.b762 + m.x852 == 0)", "Constraint(expr= 3*m.b772 + m.x862 == 0) m.c1010 = Constraint(expr= 8*m.b773", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 =", "m.b761 + m.b762 <= 1) m.c1256 = Constraint(expr= m.b762 +", "0) m.c1341 = Constraint(expr= - m.b653 + m.b654 - m.b744", "m.b769 <= 1) m.c1267 = Constraint(expr= m.b767 + m.b768 <=", "= Constraint(expr= m.x378 == 0) m.c376 = Constraint(expr= m.x379 ==", "Constraint(expr= m.x443 == 0) m.c510 = Constraint(expr= m.x444 == 0)", "3373 3193 180 0 # # Reformulation has removed 1", "Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x19 == 0) m.c8 = Constraint(expr= m.x17 - m.x20 -", "<= 1) m.c1133 = Constraint(expr= m.b701 + m.b702 <= 1)", "Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553) m.c453 = Constraint(expr= m.x366", "<= 1) m.c1237 = Constraint(expr= m.b752 + m.b753 <= 1)", "m.b762 <= 0) m.c1360 = Constraint(expr= - m.b671 - m.b672", "0) m.c985 = Constraint(expr= 2*m.b748 + m.x838 == 0) m.c986", "m.x53 - m.x56 - m.x59 == 0) m.c18 = Constraint(expr=", "Constraint(expr= m.b629 - m.b631 <= 0) m.c1048 = Constraint(expr= m.b630", "3.34221486003388) m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388) m.c224", "= Constraint(expr= m.x462 == 0) m.c592 = Constraint(expr= m.x463 ==", "= Constraint(expr= m.b636 - m.b637 <= 0) m.c1055 = Constraint(expr=", "= Constraint(expr= - m.b662 + m.b663 - m.b753 <= 0)", "== 0) m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0)", "== 0) m.c950 = Constraint(expr= 4*m.b713 + m.x803 == 0)", "Constraint(expr= - m.b662 - m.b663 + m.b664 - m.b754 <=", "m.b657 - m.b658 <= 0) m.c1076 = Constraint(expr= m.b659 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 =", "+ m.x827 == 0) m.c975 = Constraint(expr= 7*m.b738 + m.x828", "- m.b644 + m.b645 - m.b735 <= 0) m.c1333 =", "m.b758 <= 0) m.c1356 = Constraint(expr= - m.b668 + m.b669", "Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b611 - m.b613 <= 0) m.c1030 = Constraint(expr= m.b612", "3.04984759446376*m.b626 <= 0) m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <=", "m.b708 + m.b709 <= 1) m.c1151 = Constraint(expr= m.b710 +", "<= 0) m.c1330 = Constraint(expr= - m.b641 - m.b642 +", "m.x239 == 0) m.c117 = Constraint(expr= m.x27 - m.x237 -", "= Constraint(expr= m.x413 == 0) m.c615 = Constraint(expr= m.x414 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c927 = Constraint(expr= 7*m.b690 + m.x780 == 0) m.c928 =", "= Constraint(expr= 4*m.b735 + m.x825 == 0) m.c973 = Constraint(expr=", ">= 0) m.c1452 = Constraint(expr= m.b627 - m.b651 >= 0)", "m.c198 = Constraint(expr= m.x279 - 15*m.b609 <= 0) m.c199 =", "m.x223 == 0) m.c92 = Constraint(expr= m.x14 - m.x230 -", "m.x290 - 3.34221486003388*m.b614 <= 0) m.c246 = Constraint(expr= m.x291 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 =", "m.c119 = Constraint(expr= m.x38 - m.x260 - m.x263 == 0)", "Constraint(expr= - m.b653 + m.b656 + m.b659 >= 0) m.c1458", "- m.x289 == 0) m.c215 = Constraint(expr= m.x62 - m.x314", "Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924)", "= Constraint(expr= m.x445 == 0) m.c512 = Constraint(expr= m.x95 -", ">= 0) m.c1403 = Constraint(expr= m.b596 + m.b599 - m.b602", "Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943) m.c717 = Constraint(expr= m.x522 +", "+ m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999* m.b605) <= 0) m.c135", "9) m.c530 = Constraint(expr= - m.x392 + m.x446 == 0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816 = Var(within=Reals,bounds=(None,None),initialize=0) m.x817 = Var(within=Reals,bounds=(None,None),initialize=0)", "- 4.45628648004517*m.b603 <= 0) m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604", "0) m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0) m.c580", "Var(within=Binary,bounds=(0,1),initialize=0) m.b695 = Var(within=Binary,bounds=(0,1),initialize=0) m.b696 = Var(within=Binary,bounds=(0,1),initialize=0) m.b697 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401", "<= 1) m.c1242 = Constraint(expr= m.b755 + m.b757 <= 1)", "# # Reformulation has removed 1 variable and 1 equation", "0) m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376) m.c630", "6*m.b739 + m.x829 == 0) m.c977 = Constraint(expr= 2*m.b740 +", "Constraint(expr= m.b752 + m.b754 <= 1) m.c1237 = Constraint(expr= m.b752", "+ m.b726 <= 1) m.c1184 = Constraint(expr= m.b726 + m.b727", "0) m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <= 0) m.c308", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 =", "1.32154609891348*m.b632 <= 1.32154609891348) m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <=", "m.b627 - m.b628 <= 0) m.c1046 = Constraint(expr= m.b629 -", "m.b701 + m.b702 <= 1) m.c1136 = Constraint(expr= m.b702 +", "Constraint(expr= - m.b602 - m.b603 + m.b604 - m.b694 <=", "m.x304 - m.x307 == 0) m.c323 = Constraint(expr= m.x74 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 =", ">= 0) m.c1402 = Constraint(expr= - m.b628 + m.b649 +", "m.b642 + m.b643 - m.b733 <= 0) m.c1331 = Constraint(expr=", "<= 1) m.c1230 = Constraint(expr= m.b749 + m.b751 <= 1)", "m.x802 == 0) m.c950 = Constraint(expr= 4*m.b713 + m.x803 ==", "m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378", "- m.b683 + m.b684 - m.b774 <= 0) m.c1372 =", "= Constraint(expr= m.x323 == 0) m.c372 = Constraint(expr= m.x324 ==", "Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 =", "= Constraint(expr= m.x19 - m.x22 - m.x25 == 0) m.c11", "m.b687 + m.b688 <= 1) m.c1109 = Constraint(expr= m.b689 +", "+ 0.999*m.b598)))*(0.001 + 0.999*m.b598) <= 0) m.c56 = Constraint(expr= m.x215", "m.c314 = Constraint(expr= m.x305 == 0) m.c315 = Constraint(expr= m.x306", "m.x551 == 0) m.c819 = Constraint(expr= m.x552 == 0) m.c820", "35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c326 = Constraint(expr= m.x302 - 15*m.b623 <= 0) m.c327 =", "m.b707 + m.b709 <= 1) m.c1150 = Constraint(expr= m.b708 +", "m.x845 == 0) m.c993 = Constraint(expr= 3*m.b756 + m.x846 ==", "m.c210 = Constraint(expr= m.x321 == 0) m.c211 = Constraint(expr= m.x322", "m.b615 + m.b616 - m.b706 <= 0) m.c1304 = Constraint(expr=", "5*m.b697 + m.x787 == 0) m.c935 = Constraint(expr= 6*m.b698 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x539 == 0) m.c768 = Constraint(expr= m.x180 - m.x537 -", "m.b646 <= 0) m.c1064 = Constraint(expr= m.b647 - m.b648 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493", "m.x564 + 15*m.b684 <= 15) m.c916 = Constraint(expr= m.x565 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 =", "0) m.c1080 = Constraint(expr= m.b662 - m.b664 <= 0) m.c1081", "m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924) m.c776 =", "= Constraint(expr= 6*m.b700 + m.x790 == 0) m.c938 = Constraint(expr=", "m.b644 >= 0) m.c1398 = Constraint(expr= - m.b624 + m.b642", "Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 +", "0.705049913072943*m.b676 <= 0) m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <=", "m.x829 == 0) m.c977 = Constraint(expr= 2*m.b740 + m.x830 ==", "= Constraint(expr= 8*m.b729 + m.x819 == 0) m.c967 = Constraint(expr=", "= Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001", "Constraint(expr= m.b705 + m.b706 <= 1) m.c1145 = Constraint(expr= m.b707", "Constraint(expr= m.x463 == 0) m.c593 = Constraint(expr= m.x104 - m.x404", "m.x160 - m.x163 - m.x166 - m.x169 == 0) m.c47", "Constraint(expr= - m.b606 + m.b618 >= 0) m.c1387 = Constraint(expr=", "m.c1051 = Constraint(expr= m.b633 - m.b634 <= 0) m.c1052 =", "- 10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699", "Variable counts # x b i s1s s2s sc si", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 =", "- m.b622 <= 0) m.c1040 = Constraint(expr= m.b623 - m.b624", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 =", "m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264", "= Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517) m.c189 = Constraint(expr=", "Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001 +", "- m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775,", "1.83548069293539*m.b613 <= 0) m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <=", "- m.x73 - m.x91 + m.x94 == 0) m.c26 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)", "ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,40),initialize=0) m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0)", "Constraint(expr= m.b626 - m.b627 <= 0) m.c1044 = Constraint(expr= m.b626", "m.b757 <= 1) m.c1245 = Constraint(expr= m.b755 + m.b757 <=", "Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0) m.c252 = Constraint(expr= m.x327", "<= 9) m.c337 = Constraint(expr= m.x355 + 9*m.b625 <= 9)", "== 0) m.c829 = Constraint(expr= m.x202 - m.x574 - m.x577", "Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= 3*m.b727 + m.x817 == 0) m.c965 = Constraint(expr=", "m.c523 = Constraint(expr= m.x391 + 9*m.b643 <= 9) m.c524 =", "0) m.c322 = Constraint(expr= m.x58 - m.x304 - m.x307 ==", "m.x488 - 0.940066550763924*m.b659 <= 0) m.c687 = Constraint(expr= m.x489 -", "m.b667 <= 0) m.c1084 = Constraint(expr= m.b666 - m.b667 <=", "= Constraint(expr= - m.b614 + m.b632 >= 0) m.c1383 =", "m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669 =", "<= 0) m.c641 = Constraint(expr= m.x473 == 0) m.c642 =", "Constraint(expr= m.x181 - m.x190 - m.x193 - m.x196 == 0)", "- m.x182 - m.x185 == 0) m.c48 = Constraint(expr= m.x174", "m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53", "Constraint(expr= m.b771 + m.b772 <= 1) m.c1277 = Constraint(expr= m.b773", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0) m.x836 =", "= Constraint(expr= m.b620 - m.b622 <= 0) m.c1039 = Constraint(expr=", "m.b774 <= 1) m.c1280 = Constraint(expr= m.b774 + m.b775 <=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538", "15*m.b620 <= 15) m.c303 = Constraint(expr= m.x300 + 15*m.b621 <=", "m.c121 = Constraint(expr= m.x40 - m.x262 - m.x265 == 0)", "= Constraint(expr= m.b707 + m.b709 <= 1) m.c1150 = Constraint(expr=", "m.c593 = Constraint(expr= m.x104 - m.x404 - m.x407 == 0)", "<= 0) m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5)", "Constraint(expr= m.x42 - m.x270 - m.x276 == 0) m.c268 =", "= Constraint(expr= m.b732 + m.b733 <= 1) m.c1199 = Constraint(expr=", "m.c1320 = Constraint(expr= - m.b632 + m.b633 - m.b723 <=", "Constraint(expr= m.b664 - m.b673 >= 0) m.c1475 = Constraint(expr= m.b662", "m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14", "- m.b680 >= 0) m.c1482 = Constraint(expr= m.b669 - m.b681", "= Constraint(expr= m.b663 - m.b664 <= 0) m.c1082 = Constraint(expr=", "m.c162 = Constraint(expr= - m.x249 + m.x279 == 0) m.c163", "+ 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999*", "m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 =", "m.c753 = Constraint(expr= m.x510 == 0) m.c754 = Constraint(expr= m.x511", "Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0) m.c186 = Constraint(expr= m.x249", "m.b707 + m.b708 <= 1) m.c1146 = Constraint(expr= m.b707 +", "Constraint(expr= m.b623 - m.b641 >= 0) m.c1443 = Constraint(expr= m.b624", "m.b605 + m.b617 >= 0) m.c1386 = Constraint(expr= - m.b606", "Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b658)))*(0.001 + 0.999*m.b658) <= 0) m.c641 = Constraint(expr= m.x473", "m.x230 - 4.45628648004517*m.b599 <= 0) m.c102 = Constraint(expr= m.x231 -", "Constraint(expr= m.x70 - m.x340 - m.x343 == 0) m.c272 =", "m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167", "0.666992981045719*m.b671 <= 0) m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <=", "= Constraint(expr= m.x235 == 0) m.c89 = Constraint(expr= m.x8 -", "= Constraint(expr= m.x209 - m.x590 - m.x593 == 0) m.c909", "9*m.b693 - 4*m.b694 - 10*m.b695 - 9*m.b696 - 5*m.b697 -", "m.x431 == 0) m.c441 = Constraint(expr= m.x432 == 0) m.c442", "m.b750 <= 1) m.c1230 = Constraint(expr= m.b749 + m.b751 <=", "+ m.b735 <= 1) m.c1200 = Constraint(expr= m.b734 + m.b736", "0) m.c964 = Constraint(expr= 3*m.b727 + m.x817 == 0) m.c965", "m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844", "= Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924) m.c774 = Constraint(expr=", "Constraint(expr= m.b738 + m.b739 <= 1) m.c1211 = Constraint(expr= m.b740", "Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943) m.c717 = Constraint(expr= m.x522", "Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0) m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b675)))*(0.001 + 0.999* m.b675) <= 0) m.c817 = Constraint(expr=(m.x574/(0.001", "m.b745 + m.x835 == 0) m.c983 = Constraint(expr= 2*m.b746 +", "m.x2 - m.x5 - m.x8 == 0) m.c3 = Constraint(expr=", "- m.x315 - m.x321 == 0) m.c217 = Constraint(expr= m.x64", "m.x166 - m.x169 == 0) m.c47 = Constraint(expr= m.x173 -", "m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999* m.b627) <= 0) m.c340 =", "0.705049913072943*m.b673 <= 0) m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <=", "m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425) m.c502 = Constraint(expr= m.x439 +", "Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5) m.c896 = Constraint(expr= -", "m.b601 - m.b610 >= 0) m.c1412 = Constraint(expr= m.b602 -", "865 685 180 0 0 0 0 0 # FX", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 =", "m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340", "= Constraint(expr= m.x304 - 15*m.b625 <= 0) m.c329 = Constraint(expr=", "m.b599 - m.b689 <= 0) m.c1287 = Constraint(expr= - m.b599", "m.c979 = Constraint(expr= 4*m.b742 + m.x832 == 0) m.c980 =", "- 7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718", "m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0) m.c606 =", "Constraint(expr= m.x387 - 9*m.b642 <= 0) m.c520 = Constraint(expr= m.x388", "- m.b633 + m.b634 - m.b724 <= 0) m.c1322 =", "m.x404 - 3.04984759446376*m.b650 <= 0) m.c600 = Constraint(expr= m.x405 -", "m.x141 - m.x471 - m.x474 == 0) m.c649 = Constraint(expr=", "= Constraint(expr= m.b731 + m.b733 <= 1) m.c1198 = Constraint(expr=", "- m.x445 == 0) m.c518 = Constraint(expr= m.x386 - 9*m.b641", "= Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917) m.c840 = Constraint(expr=", "Constraint(expr= m.b696 + m.b697 <= 1) m.c1127 = Constraint(expr= m.b698", "m.c1292 = Constraint(expr= m.b605 - m.b695 <= 0) m.c1293 =", "3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769 -", ">= 0) m.c1397 = Constraint(expr= - m.b623 + m.b641 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 =", "m.x151 - m.x490 - m.x493 == 0) m.c680 = Constraint(expr=", "m.b664 <= 0) m.c1082 = Constraint(expr= m.b665 - m.b666 <=", "- m.x7 - m.x10 == 0) m.c5 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 =", ">= 0) m.c1483 = Constraint(expr= m.b670 - m.b682 >= 0)", "Constraint(expr= - m.b617 - m.b618 + m.b619 - m.b709 <=", "m.x538 == 0) m.c752 = Constraint(expr= m.x509 == 0) m.c753", "m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b638", "Constraint(expr= m.x390 == 0) m.c508 = Constraint(expr= m.x391 == 0)", "<= 0) m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0)", "= Constraint(expr= m.b771 + m.x861 == 0) m.c1009 = Constraint(expr=", "+ m.b634 >= 0) m.c1385 = Constraint(expr= - m.b605 +", "m.b626 >= 0) m.c1392 = Constraint(expr= - m.b609 + m.b621", "m.c1192 = Constraint(expr= m.b729 + m.b730 <= 1) m.c1193 =", "m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705", "- 3.34221486003388*m.b611 <= 0) m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612", "== 0) m.c796 = Constraint(expr= m.x571 == 0) m.c797 =", "m.x132 - m.x459 - m.x462 == 0) m.c598 = Constraint(expr=", "Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506) m.c862 = Constraint(expr= m.x535", "Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0)", ">= 0) m.c1426 = Constraint(expr= m.b610 - m.b625 >= 0)", "Constraint(expr= m.x555 - 15*m.b681 <= 0) m.c886 = Constraint(expr= m.x556", "+ 0.999*m.b619) <= 0) m.c260 = Constraint(expr= m.x275 == 0)", "== 0) m.c44 = Constraint(expr= m.x158 - m.x161 - m.x164", "m.c1442 = Constraint(expr= m.b623 - m.b641 >= 0) m.c1443 =", "0) m.c1094 = Constraint(expr= m.b677 - m.b678 <= 0) m.c1095", "0.940066550763924) m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924) m.c739", "+ 0.999*m.b632) <= 0) m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 =", "m.c137 = Constraint(expr= m.x245 == 0) m.c138 = Constraint(expr= m.x246", "m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755", "m.x271 - 2.54515263975353*m.b619 <= 0) m.c275 = Constraint(expr= m.x275 +", "0) m.c1089 = Constraint(expr= m.b671 - m.b673 <= 0) m.c1090", "Constraint(expr= m.b617 - m.b638 >= 0) m.c1440 = Constraint(expr= m.b618", "- m.x459 - m.x462 == 0) m.c598 = Constraint(expr= m.x133", "- m.b643 >= 0) m.c1445 = Constraint(expr= m.b623 - m.b644", "Constraint(expr= m.b669 - m.b670 <= 0) m.c1088 = Constraint(expr= m.b671", "<= 0) m.c83 = Constraint(expr= m.x221 == 0) m.c84 =", "- 4*m.b694 - 10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698", "Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x239 == 0) m.c111 = Constraint(expr= m.x240 ==", "<= 0.666992981045719) m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719)", "3.04984759446376) m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376) m.c365", "0) m.c1461 = Constraint(expr= - m.b663 + m.b672 + m.b675", "0) m.c1079 = Constraint(expr= m.b662 - m.b663 <= 0) m.c1080", "= Constraint(expr= m.x181 - m.x190 - m.x193 - m.x196 ==", "m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0) m.c580 =", "m.c1123 = Constraint(expr= m.b695 + m.b696 <= 1) m.c1124 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c237 = Constraint(expr= m.x333 == 0) m.c238 = Constraint(expr= m.x334", "1) m.c1222 = Constraint(expr= m.b744 + m.b745 <= 1) m.c1223", "Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0) m.c715 = Constraint(expr= m.x520", "m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0) m.c456 =", "30*m.b608 <= 0) m.c192 = Constraint(expr= m.x255 - 30*m.b609 <=", "m.b659) <= 0) m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1", "- m.x380 - m.x383 == 0) m.c480 = Constraint(expr= m.x93", "0) m.c1404 = Constraint(expr= m.b597 + m.b600 - m.b603 >=", "0) m.c216 = Constraint(expr= m.x63 - m.x315 - m.x321 ==", "<= 0) m.c1023 = Constraint(expr= m.b605 - m.b607 <= 0)", "m.c1147 = Constraint(expr= m.b707 + m.b708 <= 1) m.c1148 =", "m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816", "m.x233 == 0) m.c93 = Constraint(expr= m.x15 - m.x231 -", "Constraint(expr= m.b662 - m.b663 <= 0) m.c1080 = Constraint(expr= m.b662", "0) m.c1307 = Constraint(expr= m.b620 - m.b710 <= 0) m.c1308", "m.b599 - m.b608 >= 0) m.c1410 = Constraint(expr= m.b597 +", "<= 0.940066550763924) m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924)", "0) m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001", "0) m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672) m.c867", "Constraint(expr= m.x135 - m.x138 == 0) m.c34 = Constraint(expr= m.x136", "m.x302 - m.x305 == 0) m.c321 = Constraint(expr= m.x57 -", "m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0) m.c275 =", "- m.b673 >= 0) m.c1475 = Constraint(expr= m.b662 - m.b674", "m.b602 - m.b692 <= 0) m.c1290 = Constraint(expr= - m.b602", "== 0) m.c379 = Constraint(expr= m.x421 == 0) m.c380 =", "- 0.705049913072943*m.b671 <= 0) m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672", "m.x398 - m.x401 == 0) m.c567 = Constraint(expr= m.x102 -", "m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77", "= Constraint(expr= m.b765 + m.b766 <= 1) m.c1265 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.b682 <= 0) m.c1099 = Constraint(expr= m.b681 - m.b682", "m.b681 - m.b682 <= 0) m.c1100 = Constraint(expr= m.b683 -", "0) m.c963 = Constraint(expr= 6*m.b726 + m.x816 == 0) m.c964", "m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365", "== 0) m.c168 = Constraint(expr= m.x252 == 0) m.c169 =", "m.c1122 = Constraint(expr= m.b695 + m.b697 <= 1) m.c1123 =", "== 0) m.c932 = Constraint(expr= 10*m.b695 + m.x785 == 0)", "Constraint(expr= - 0.6*m.x560 + m.x590 == 0) m.c897 = Constraint(expr=", "m.b608 - m.b626 >= 0) m.c1428 = Constraint(expr= m.b609 -", "Constraint(expr= - m.x374 + m.x416 == 0) m.c369 = Constraint(expr=", "0) m.c17 = Constraint(expr= m.x44 - m.x53 - m.x56 -", "= Constraint(expr= m.x394 - 9*m.b646 <= 0) m.c548 = Constraint(expr=", "<= 0) m.c1095 = Constraint(expr= m.b677 - m.b679 <= 0)", "40*m.b599 <= 40) m.c99 = Constraint(expr= m.x222 + 40*m.b600 <=", "= Constraint(expr= 8*m.b757 + m.x847 == 0) m.c995 = Constraint(expr=", "15*m.b685 <= 15) m.c917 = Constraint(expr= m.x590 - 9*m.b683 <=", "1) m.c1264 = Constraint(expr= m.b765 + m.b766 <= 1) m.c1265", "Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457", "m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670", "Constraint(expr= m.b602 - m.b611 >= 0) m.c1413 = Constraint(expr= m.b603", "m.c1110 = Constraint(expr= m.b689 + m.b691 <= 1) m.c1111 =", "+ m.b631 >= 0) m.c1382 = Constraint(expr= - m.b614 +", "== 0) m.c182 = Constraint(expr= m.x44 - m.x278 - m.x281", "Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388) m.c134 = Constraint(expr=(m.x266/(0.001", "= Constraint(expr= m.x79 - m.x103 - m.x106 - m.x109 ==", "- m.b597 <= 0) m.c1014 = Constraint(expr= m.b596 - m.b598", "m.c296 = Constraint(expr= m.x71 - m.x344 - m.x347 == 0)", "m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726", "0) m.c1287 = Constraint(expr= - m.b599 + m.b600 - m.b690", "Constraint(expr= m.b603 - m.b615 >= 0) m.c1417 = Constraint(expr= m.b604", "Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.b694 <= 1) m.c1117 = Constraint(expr= m.b692 + m.b693", "m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701", "Constraint(expr= - m.b677 + m.b678 - m.b768 <= 0) m.c1366", "Constraint(expr= - m.b605 - m.b606 + m.b607 - m.b697 <=", "- m.x58 - m.x61 == 0) m.c20 = Constraint(expr= m.x68", "+ 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999*", "15) m.c889 = Constraint(expr= m.x559 + 15*m.b682 <= 15) m.c890", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689 =", "- m.x395 == 0) m.c540 = Constraint(expr= m.x99 - m.x393", "m.c939 = Constraint(expr= 7*m.b702 + m.x792 == 0) m.c940 =", "- m.b692 <= 0) m.c1290 = Constraint(expr= - m.b602 +", "m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924) m.c739 =", "= Constraint(expr= m.b623 - m.b713 <= 0) m.c1311 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b668 =", "= Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506) m.c745 = Constraint(expr=", "m.c1448 = Constraint(expr= m.b626 - m.b647 >= 0) m.c1449 =", "+ m.b650 + m.b653 >= 0) m.c1401 = Constraint(expr= -", "Constraint(expr= m.x176 - m.x527 - m.x533 == 0) m.c852 =", "+ m.b687 <= 1) m.c1104 = Constraint(expr= m.b686 + m.b688", "= Constraint(expr= - m.b641 - m.b642 + m.b643 - m.b733", "+ m.b694 <= 1) m.c1121 = Constraint(expr= m.b695 + m.b696", "<= 0) m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0)", "Constraint(expr= m.x497 == 0) m.c696 = Constraint(expr= m.x498 == 0)", "0) m.c1396 = Constraint(expr= - m.b622 + m.b640 >= 0)", "0) m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553) m.c489", "m.b729 <= 0) m.c1327 = Constraint(expr= - m.b638 - m.b639", "0) m.c1340 = Constraint(expr= m.b653 - m.b743 <= 0) m.c1341", "= Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c348 = Constraint(expr= m.x60 - m.x309 - m.x312 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 =", "= Constraint(expr= m.b710 + m.b711 <= 1) m.c1152 = Constraint(expr=", "0) m.c1347 = Constraint(expr= - m.b659 + m.b660 - m.b750", "= Constraint(expr= 5*m.b686 + m.x776 == 0) m.c924 = Constraint(expr=", "0) m.c699 = Constraint(expr= m.x522 == 0) m.c700 = Constraint(expr=", "<= 0) m.c1069 = Constraint(expr= m.b651 - m.b652 <= 0)", "m.x394 + m.x448 == 0) m.c533 = Constraint(expr= m.x395 ==", "<= 0) m.c1315 = Constraint(expr= - m.b626 - m.b627 +", "- m.b625 + m.b643 + m.b646 >= 0) m.c1400 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 =", "m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999* m.b659) <= 0) m.c666 =", "- m.x545 == 0) m.c798 = Constraint(expr= m.x183 - m.x543", "m.x490 - m.x493 == 0) m.c680 = Constraint(expr= m.x476 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x520 - m.x523 == 0) m.c707 = Constraint(expr= m.x494", "1) m.c1135 = Constraint(expr= m.b701 + m.b702 <= 1) m.c1136", "m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760", "m.x147 - m.x483 - m.x486 == 0) m.c652 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c60 = Constraint(expr= m.x228 == 0) m.c61 = Constraint(expr= m.x229", ">= 0) m.c1390 = Constraint(expr= - m.b619 + m.b637 +", "= Constraint(expr= m.x199 - m.x568 - m.x571 == 0) m.c803", "0) m.c1339 = Constraint(expr= - m.b650 - m.b651 + m.b652", "Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c73 = Constraint(expr= m.x217 + 40*m.b598 <= 40) m.c74 =", "<= 0) m.c1035 = Constraint(expr= m.b617 - m.b619 <= 0)", "m.c520 = Constraint(expr= m.x388 - 9*m.b643 <= 0) m.c521 =", "- m.b683 >= 0) m.c1485 = Constraint(expr= m.b669 - m.b684", "0) m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001", "+ 1.32154609891348*m.b632 <= 1.32154609891348) m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623 =", "- m.x566 - m.x569 == 0) m.c801 = Constraint(expr= m.x198", "= Constraint(expr= m.b740 + m.b742 <= 1) m.c1216 = Constraint(expr=", "0) m.c95 = Constraint(expr= m.x218 - 40*m.b599 <= 0) m.c96", "m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)", "2.54515263975353*m.b618 <= 0) m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <=", "- m.x441 - m.x444 == 0) m.c517 = Constraint(expr= m.x124", "== 0) m.c184 = Constraint(expr= m.x46 - m.x280 - m.x283", "== 0) m.c846 = Constraint(expr= m.x534 == 0) m.c847 =", "<= 0) m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0)", "= Constraint(expr= m.b674 - m.b764 <= 0) m.c1362 = Constraint(expr=", "0) m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001", "= Constraint(expr= m.x203 - m.x578 - m.x581 == 0) m.c855", "Constraint(expr= m.x38 - m.x47 - m.x50 == 0) m.c15 =", "Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186) m.c433 = Constraint(expr= m.x427", "m.b664 <= 0) m.c1081 = Constraint(expr= m.b663 - m.b664 <=", ">= 0) m.c1453 = Constraint(expr= m.b628 - m.b652 >= 0)", "== 0) m.c670 = Constraint(expr= m.x481 == 0) m.c671 =", "m.b771 <= 0) m.c1369 = Constraint(expr= - m.b680 - m.b681", "m.c374 = Constraint(expr= m.x377 == 0) m.c375 = Constraint(expr= m.x378", "m.c1031 = Constraint(expr= m.b614 - m.b615 <= 0) m.c1032 =", "m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388) m.c224 = Constraint(expr= m.x314 -", "= Constraint(expr= m.x288 == 0) m.c208 = Constraint(expr= m.x289 ==", "3.34221486003388*m.b603 <= 0) m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <=", ">= 0) m.c1409 = Constraint(expr= m.b596 + m.b599 - m.b608", "0) m.c415 = Constraint(expr= m.x427 == 0) m.c416 = Constraint(expr=", "= Constraint(expr= m.x540 + 15*m.b669 <= 15) m.c787 = Constraint(expr=", "= Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0) m.c858 = Constraint(expr=", "m.x46 - m.x55 - m.x58 - m.x61 == 0) m.c20", "m.c522 = Constraint(expr= m.x390 + 9*m.b642 <= 9) m.c523 =", "- log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596) <= 0)", "0.999*m.b667)))*(0.001 + 0.999* m.b667) <= 0) m.c722 = Constraint(expr= m.x503", "Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c543 = Constraint(expr= m.x126 - m.x447 - m.x450 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001", "m.c96 = Constraint(expr= m.x219 - 40*m.b600 <= 0) m.c97 =", "m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171) m.c659 = Constraint(expr= m.x482 -", "m.b738 <= 1) m.c1208 = Constraint(expr= m.b738 + m.b739 <=", "m.x264 == 0) m.c115 = Constraint(expr= m.x265 == 0) m.c116", "m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352 == 0) m.c314", "m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776", "m.c59 = Constraint(expr= m.x227 == 0) m.c60 = Constraint(expr= m.x228", "== 0) m.c911 = Constraint(expr= m.x560 - 15*m.b683 <= 0)", "<= 15) m.c357 = Constraint(expr= m.x312 + 15*m.b627 <= 15)", "m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822 = Var(within=Reals,bounds=(None,None),initialize=0) m.x823", "1.26558121681553*m.b619 <= 1.26558121681553) m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344", "m.c1211 = Constraint(expr= m.b740 + m.b741 <= 1) m.c1212 =", "m.b609 + m.b621 + m.b624 + m.b627 >= 0) m.c1393", "m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62", "0) m.c937 = Constraint(expr= 6*m.b700 + m.x790 == 0) m.c938", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 =", "m.c416 = Constraint(expr= m.x65 - m.x329 - m.x335 == 0)", "- m.x373 == 0) m.c479 = Constraint(expr= m.x92 - m.x380", "Constraint(expr= - m.b653 + m.b654 - m.b744 <= 0) m.c1342", "and 1 equation from pyomo.environ import * model = m", "m.x418 - m.x421 == 0) m.c389 = Constraint(expr= m.x317 -", ">= 0) m.c1427 = Constraint(expr= m.b608 - m.b626 >= 0)", "- m.b633 >= 0) m.c1435 = Constraint(expr= m.b616 - m.b634", "Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0) m.c708 = Constraint(expr= m.x495", "Constraint(expr= m.b677 - m.b679 <= 0) m.c1096 = Constraint(expr= m.b678", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 =", "m.b648 - m.b649 <= 0) m.c1067 = Constraint(expr= m.b650 -", "Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0) m.c714 = Constraint(expr= m.x519", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 =", "0) m.c1066 = Constraint(expr= m.b648 - m.b649 <= 0) m.c1067", ">= 0) m.c1445 = Constraint(expr= m.b623 - m.b644 >= 0)", "m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672) m.c867 =", "= Constraint(expr= - m.b622 + m.b640 >= 0) m.c1397 =", "= Constraint(expr= m.x179 - m.x536 - m.x539 == 0) m.c768", "m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658", "= Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517) m.c105 = Constraint(expr=", "m.b717 + m.b718 <= 1) m.c1167 = Constraint(expr= m.b716 +", "m.c547 = Constraint(expr= m.x394 - 9*m.b646 <= 0) m.c548 =", "- m.b613 >= 0) m.c1415 = Constraint(expr= m.b602 - m.b614", "<= 0) m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0)", "Constraint(expr= - m.b603 + m.b612 + m.b615 >= 0) m.c1378", "m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97", "<= 1) m.c1195 = Constraint(expr= m.b731 + m.b732 <= 1)", "m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592", "m.b661 <= 0) m.c1078 = Constraint(expr= m.b660 - m.b661 <=", "m.b704 + m.b706 <= 1) m.c1144 = Constraint(expr= m.b705 +", "m.b597 + m.b600 - m.b606 >= 0) m.c1408 = Constraint(expr=", "Constraint(expr= - m.b647 + m.b648 - m.b738 <= 0) m.c1336", "- m.x55 - m.x58 - m.x61 == 0) m.c20 =", "m.x121 - m.x436 - m.x439 == 0) m.c485 = Constraint(expr=", "= Constraint(expr= 7*m.b719 + m.x809 == 0) m.c957 = Constraint(expr=", "m.b604 <= 0) m.c1022 = Constraint(expr= m.b605 - m.b606 <=", "= Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001", "20) m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001", "= Constraint(expr= m.x401 == 0) m.c561 = Constraint(expr= m.x402 ==", "m.x405 - 3.04984759446376*m.b651 <= 0) m.c601 = Constraint(expr= m.x406 -", "0) m.c701 = Constraint(expr= m.x161 - m.x494 - m.x497 ==", "m.b601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604", "m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64", "+ 0.940066550763924*m.b661 <= 0.940066550763924) m.c692 = Constraint(expr= - 0.75*m.x494 +", "+ 0.999*m.b596)))*(0.001 + 0.999*m.b596) <= 0) m.c54 = Constraint(expr=(m.x225/(0.001 +", "= Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0) m.c715 = Constraint(expr=", "m.b698 <= 0) m.c1296 = Constraint(expr= - m.b608 + m.b609", "m.x45 - m.x54 - m.x57 - m.x60 == 0) m.c19", "== 0) m.c624 = Constraint(expr= m.x135 - m.x465 - m.x468", "<= 1) m.c1256 = Constraint(expr= m.b762 + m.b763 <= 1)", "- m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734", "- 7*m.b709 - 2*m.b710 - 5*m.b711 - 2*m.b712 - 4*m.b713", "= Constraint(expr= m.x121 - m.x436 - m.x439 == 0) m.c485", "m.b718 <= 1) m.c1167 = Constraint(expr= m.b716 + m.b718 <=", "- 0.75*m.x238 + m.x262 == 0) m.c110 = Constraint(expr= m.x239", "Constraint(expr= m.b723 + m.x813 == 0) m.c961 = Constraint(expr= 9*m.b724", "m.b757 <= 1) m.c1247 = Constraint(expr= m.b758 + m.b759 <=", "+ m.b769 <= 1) m.c1270 = Constraint(expr= m.b768 + m.b769", "m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492", "m.x102 - m.x399 - m.x402 == 0) m.c568 = Constraint(expr=", "m.x852 == 0) m.c1000 = Constraint(expr= 7*m.b763 + m.x853 ==", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x814 = Var(within=Reals,bounds=(None,None),initialize=0) m.x815 = Var(within=Reals,bounds=(None,None),initialize=0) m.x816 =", "+ 15*m.b680 <= 15) m.c888 = Constraint(expr= m.x558 + 15*m.b681", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 =", "m.x243 - m.x246 == 0) m.c145 = Constraint(expr= m.x31 -", "<= 0) m.c403 = Constraint(expr= m.x418 - 20*m.b631 <= 0)", "Constraint(expr= - m.b671 + m.b672 - m.b762 <= 0) m.c1360", "Constraint(expr= m.x418 - 20*m.b631 <= 0) m.c404 = Constraint(expr= m.x419", "m.c328 = Constraint(expr= m.x304 - 15*m.b625 <= 0) m.c329 =", "m.c1169 = Constraint(expr= m.b719 + m.b720 <= 1) m.c1170 =", "1.83548069293539*m.b612 <= 0) m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <=", "2.54515263975353*m.b606 <= 2.54515263975353) m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <=", "m.b666 + m.b678 >= 0) m.c1465 = Constraint(expr= - m.b667", "m.c110 = Constraint(expr= m.x239 == 0) m.c111 = Constraint(expr= m.x240", "m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0) m.c866 =", "Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001 +", "= Constraint(expr= 3*m.b716 + m.x806 == 0) m.c954 = Constraint(expr=", "m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216", "Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b710 + m.b711 <= 1) m.c1154 = Constraint(expr= m.b711 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 =", "m.x329 - 1.32154609891348*m.b632 <= 0) m.c423 = Constraint(expr= m.x330 -", "m.x535 == 0) m.c854 = Constraint(expr= m.x203 - m.x578 -", "+ m.b635 + m.b638 >= 0) m.c1389 = Constraint(expr= -", "1) m.c1169 = Constraint(expr= m.b719 + m.b720 <= 1) m.c1170", "- m.x492 == 0) m.c679 = Constraint(expr= m.x151 - m.x490", "0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596) <=", "m.x221 == 0) m.c90 = Constraint(expr= m.x9 - m.x219 -", "m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281", "9*m.b642 <= 9) m.c523 = Constraint(expr= m.x391 + 9*m.b643 <=", "== 0) m.c828 = Constraint(expr= m.x201 - m.x573 - m.x576", "m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 =", "- m.x297 - m.x300 == 0) m.c295 = Constraint(expr= m.x55", "Constraint(expr= - m.b635 - m.b636 + m.b637 - m.b727 <=", "m.x454 - m.x457 == 0) m.c572 = Constraint(expr= m.x398 -", "0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999* m.b637)", "= Constraint(expr= m.b759 + m.b760 <= 1) m.c1251 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c889 = Constraint(expr= m.x559 + 15*m.b682 <= 15) m.c890 =", "- m.x468 == 0) m.c625 = Constraint(expr= m.x136 - m.x466", "1) m.c1128 = Constraint(expr= m.b698 + m.b700 <= 1) m.c1129", "Constraint(expr= m.b726 + m.b727 <= 1) m.c1187 = Constraint(expr= m.b728", "m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 =", "m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94", "+ m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c463", "m.x490 - 0.940066550763924*m.b661 <= 0) m.c689 = Constraint(expr= m.x491 +", "m.c1207 = Constraint(expr= m.b737 + m.b738 <= 1) m.c1208 =", "1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999* m.b672) <= 0)", "m.x838 == 0) m.c986 = Constraint(expr= 9*m.b749 + m.x839 ==", "m.c473 = Constraint(expr= m.x437 == 0) m.c474 = Constraint(expr= m.x438", "= Constraint(expr= m.x28 - m.x238 - m.x241 == 0) m.c119", "- m.x446 - m.x449 == 0) m.c543 = Constraint(expr= m.x126", "Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x78 - m.x357 - m.x360 == 0) m.c352", "m.c1481 = Constraint(expr= m.b668 - m.b680 >= 0) m.c1482 =", "+ 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999*", "Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835 = Var(within=Reals,bounds=(None,None),initialize=0)", "0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c465 = Constraint(expr=(m.x435/(0.001 +", "0.999*m.b678) <= 0) m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1", "- m.x570 == 0) m.c802 = Constraint(expr= m.x199 - m.x568", "<= 0) m.c1348 = Constraint(expr= - m.b659 - m.b660 +", "m.c294 = Constraint(expr= m.x54 - m.x297 - m.x300 == 0)", "m.b727 <= 1) m.c1187 = Constraint(expr= m.b728 + m.b729 <=", "Constraint(expr= m.x461 == 0) m.c591 = Constraint(expr= m.x462 == 0)", "m.b653 - m.b656 >= 0) m.c1467 = Constraint(expr= m.b654 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b698 = Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700 =", "<= 0) m.c1317 = Constraint(expr= - m.b629 + m.b630 -", "m.x513 - 30*m.b669 <= 0) m.c778 = Constraint(expr= m.x514 -", "m.c216 = Constraint(expr= m.x63 - m.x315 - m.x321 == 0)", "== 0) m.c992 = Constraint(expr= 2*m.b755 + m.x845 == 0)", "0) m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388) m.c222", "0) m.c675 = Constraint(expr= m.x144 - m.x477 - m.x480 ==", "m.b747 <= 1) m.c1224 = Constraint(expr= m.b746 + m.b748 <=", "<= 1) m.c1265 = Constraint(expr= m.b767 + m.b768 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.b734 + m.b736 <= 1) m.c1201 = Constraint(expr= m.b734", "Constraint(expr= - m.b620 + m.b621 - m.b711 <= 0) m.c1309", "Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c240 = Constraint(expr= m.x51 - m.x291 - m.x294 ==", "Constraint(expr= m.x585 - 13.5*m.b681 <= 0) m.c892 = Constraint(expr= m.x586", "== 0) m.c964 = Constraint(expr= 3*m.b727 + m.x817 == 0)", "- 0.6*m.x302 + m.x350 == 0) m.c312 = Constraint(expr= -", "= Constraint(expr= - m.b617 + m.b618 - m.b708 <= 0)", "15) m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001", "m.b602 >= 0) m.c1404 = Constraint(expr= m.b597 + m.b600 -", "300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211 -", "== 0) m.c960 = Constraint(expr= m.b723 + m.x813 == 0)", "= Constraint(expr= - m.b599 + m.b600 - m.b690 <= 0)", "m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619) <= 0) m.c260 = Constraint(expr=", "m.b643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b646", "m.c355 = Constraint(expr= m.x310 - 15*m.b628 <= 0) m.c356 =", "<= 1) m.c1200 = Constraint(expr= m.b734 + m.b736 <= 1)", "0) m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0) m.c709", "- m.b755 <= 0) m.c1353 = Constraint(expr= - m.b665 +", "Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388) m.c132 = Constraint(expr= m.x264", "m.b627) <= 0) m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1", "<= 0) m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353)", "- m.b640 <= 0) m.c1058 = Constraint(expr= m.b641 - m.b642", "= Constraint(expr= m.x137 - m.x140 - m.x143 == 0) m.c36", "Constraint(expr= m.x350 - 9*m.b623 <= 0) m.c333 = Constraint(expr= m.x351", "== 0) m.c43 = Constraint(expr= m.x154 - m.x157 - m.x160", "= Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001", "= Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0) m.c221 = Constraint(expr=", "Constraint(expr= m.x540 == 0) m.c760 = Constraint(expr= m.x541 == 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 =", "m.x189 - m.x192 - m.x195 == 0) m.c52 = Constraint(expr=", "m.c482 = Constraint(expr= m.x119 - m.x434 - m.x437 == 0)", "+ m.x417 == 0) m.c367 = Constraint(expr= - 0.9*m.x319 +", "- m.x450 == 0) m.c544 = Constraint(expr= m.x127 - m.x448", "Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0) m.x800", "m.x188 - m.x191 - m.x194 == 0) m.c51 = Constraint(expr=", "<= 4.45628648004517) m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517)", "- m.b618 <= 0) m.c1035 = Constraint(expr= m.b617 - m.b619", "0) m.c943 = Constraint(expr= 2*m.b706 + m.x796 == 0) m.c944", "== 0) m.c978 = Constraint(expr= 8*m.b741 + m.x831 == 0)", "m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79", "Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)", "- 2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736 - 5*m.b737", "+ 0.999*m.b605)))*(0.001 + 0.999* m.b605) <= 0) m.c135 = Constraint(expr=(m.x267/(0.001", "Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)", "<= 0) m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0)", "m.x312 + 15*m.b627 <= 15) m.c358 = Constraint(expr= m.x313 +", "+ 0.842233385663186*m.b634 <= 0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) -", "m.x478 - 1.18887736200171*m.b661 <= 0) m.c683 = Constraint(expr= m.x479 +", "== 0) m.c651 = Constraint(expr= m.x147 - m.x483 - m.x486", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 =", "0.999*m.b679)))*(0.001 + 0.999*m.b679) <= 0) m.c845 = Constraint(expr= m.x533 ==", "<= 1) m.c1150 = Constraint(expr= m.b708 + m.b709 <= 1)", "m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348) m.c428 = Constraint(expr= m.x422 -", "= Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001", "0) m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0) m.c450", "Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425) m.c502 = Constraint(expr= m.x439", "m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0) m.c156 =", "0) m.c346 = Constraint(expr= m.x361 == 0) m.c347 = Constraint(expr=", "Constraint(expr= m.x211 - m.x592 - m.x595 == 0) m.c911 =", "30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120 + 20*m.x121 +", "m.c792 = Constraint(expr= m.x546 == 0) m.c793 = Constraint(expr= m.x547", "1.32154609891348) m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348) m.c256", "m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465", "0) m.c1308 = Constraint(expr= - m.b620 + m.b621 - m.b711", "m.c1342 = Constraint(expr= - m.b653 - m.b654 + m.b655 -", "+ 9*m.b646 <= 9) m.c551 = Constraint(expr= m.x446 - 9*m.b644", "- 9*m.b751 - 5*m.b752 - 8*m.b753 - 4*m.b754 - 2*m.b755", "0) m.c97 = Constraint(expr= m.x220 - 40*m.b601 <= 0) m.c98", "0) m.c83 = Constraint(expr= m.x221 == 0) m.c84 = Constraint(expr=", "m.c1297 = Constraint(expr= - m.b608 - m.b609 + m.b610 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 =", "m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376) m.c603 =", "= Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034) m.c582 = Constraint(expr=", "Constraint(expr= m.x348 == 0) m.c292 = Constraint(expr= m.x349 == 0)", "Constraint(expr= m.x590 - 9*m.b683 <= 0) m.c918 = Constraint(expr= m.x591", "Var(within=Binary,bounds=(0,1),initialize=0) m.b701 = Var(within=Binary,bounds=(0,1),initialize=0) m.b702 = Var(within=Binary,bounds=(0,1),initialize=0) m.b703 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348) m.c256 = Constraint(expr= m.x334 +", "m.b669 = Var(within=Binary,bounds=(0,1),initialize=0) m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672", "+ m.x785 == 0) m.c933 = Constraint(expr= 9*m.b696 + m.x786", "Constraint(expr= m.x540 + 15*m.b669 <= 15) m.c787 = Constraint(expr= m.x541", "- m.x480 == 0) m.c676 = Constraint(expr= m.x145 - m.x478", "m.c556 = Constraint(expr= m.x451 + 9*m.b646 <= 9) m.c557 =", "= Constraint(expr= m.b653 - m.b659 >= 0) m.c1470 = Constraint(expr=", "Constraint(expr= m.b653 - m.b659 >= 0) m.c1470 = Constraint(expr= m.b654", "- 6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694 - 10*m.b695", "m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c464 =", "m.x372 == 0) m.c469 = Constraint(expr= m.x373 == 0) m.c470", "3.34221486003388*m.b616 <= 0) m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <=", "m.x10 == 0) m.c5 = Constraint(expr= - m.x11 - m.x14", "= Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0) m.c574 = Constraint(expr=", "- m.x151 + m.x154 == 0) m.c41 = Constraint(expr= m.x152", "m.b597 + m.b598 - m.b688 <= 0) m.c1286 = Constraint(expr=", "6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692 -", "9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700 -", "= Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0) m.c430 = Constraint(expr=", "7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 =", "3.04984759446376*m.b628 <= 3.04984759446376) m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416", "Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540", "m.b665 + m.b666 - m.b756 <= 0) m.c1354 = Constraint(expr=", "= Constraint(expr= m.b610 - m.b622 >= 0) m.c1424 = Constraint(expr=", "m.b663 + m.b672 + m.b675 >= 0) m.c1462 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b615 =", "0) m.c1075 = Constraint(expr= m.b657 - m.b658 <= 0) m.c1076", "0) m.c1351 = Constraint(expr= - m.b662 - m.b663 + m.b664", "Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0) m.c248", "m.b629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632", "= Constraint(expr= m.x175 - m.x520 - m.x523 == 0) m.c707", "0) m.c703 = Constraint(expr= m.x163 - m.x496 - m.x499 ==", "0.940066550763924) m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924) m.c776", "m.c1116 = Constraint(expr= m.b692 + m.b694 <= 1) m.c1117 =", "<= 1) m.c1235 = Constraint(expr= m.b752 + m.b753 <= 1)", "0) m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0) m.c860", "m.b647 - m.b737 <= 0) m.c1335 = Constraint(expr= - m.b647", "0) m.c33 = Constraint(expr= m.x135 - m.x138 == 0) m.c34", "m.b629 >= 0) m.c1380 = Constraint(expr= - m.b612 + m.b630", "0) m.c791 = Constraint(expr= m.x545 == 0) m.c792 = Constraint(expr=", "0) m.c376 = Constraint(expr= m.x379 == 0) m.c377 = Constraint(expr=", "1) m.c1269 = Constraint(expr= m.b767 + m.b769 <= 1) m.c1270", "m.x589 + 13.5*m.b682 <= 13.5) m.c896 = Constraint(expr= - 0.6*m.x560", "= Constraint(expr= m.x52 - m.x292 - m.x295 == 0) m.c242", "Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1220 = Constraint(expr= m.b744 + m.b745 <= 1) m.c1221", "- 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999* m.b661) <=", "m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162", "- 1.11894339953103*m.b650 <= 0) m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651", "m.b718 <= 1) m.c1165 = Constraint(expr= m.b716 + m.b717 <=", "0) m.c1432 = Constraint(expr= m.b613 - m.b631 >= 0) m.c1433", "- m.x251 == 0) m.c177 = Constraint(expr= m.x33 - m.x249", "9*m.b643 <= 0) m.c527 = Constraint(expr= m.x443 + 9*m.b641 <=", "= Constraint(expr= m.x467 == 0) m.c618 = Constraint(expr= m.x468 ==", "+ m.b705 <= 1) m.c1140 = Constraint(expr= m.b704 + m.b706", "m.b596 - m.b597 + m.b598 - m.b688 <= 0) m.c1286", "Constraint(expr= m.b720 + m.b721 <= 1) m.c1175 = Constraint(expr= m.b722", "m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597", "m.c971 = Constraint(expr= 3*m.b734 + m.x824 == 0) m.c972 =", "m.b728 <= 0) m.c1326 = Constraint(expr= - m.b638 + m.b639", "0 0 0 0 0 # FX 0 0 0", "- 3.04984759446376*m.b650 <= 0) m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659 =", "- m.x393 - m.x396 == 0) m.c541 = Constraint(expr= m.x100", "- m.x48 - m.x51 == 0) m.c16 = Constraint(expr= m.x40", "Constraint(expr= m.x538 - 15*m.b670 <= 0) m.c785 = Constraint(expr= m.x539", "m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767", "<= 0) m.c1044 = Constraint(expr= m.b626 - m.b628 <= 0)", "m.b744 <= 1) m.c1220 = Constraint(expr= m.b744 + m.b745 <=", "m.c1399 = Constraint(expr= - m.b625 + m.b643 + m.b646 >=", "0 0 0 0 # FX 0 0 0 0", "15) m.c787 = Constraint(expr= m.x541 + 15*m.b670 <= 15) m.c788", "<= 1) m.c1273 = Constraint(expr= m.b770 + m.b771 <= 1)", "+ 3.34221486003388*m.b613 <= 3.34221486003388) m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611", "+ m.b632 >= 0) m.c1383 = Constraint(expr= - m.b615 +", "= Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 =", "m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105", "5*m.b686 + m.x776 == 0) m.c924 = Constraint(expr= 4*m.b687 +", "m.x442 == 0) m.c506 = Constraint(expr= m.x389 == 0) m.c507", "m.b691 <= 1) m.c1114 = Constraint(expr= m.b690 + m.b691 <=", "20) m.c406 = Constraint(expr= m.x421 + 20*m.b631 <= 20) m.c407", "Constraint(expr= m.b665 - m.b677 >= 0) m.c1479 = Constraint(expr= m.b666", "m.b627 + m.b628 - m.b718 <= 0) m.c1316 = Constraint(expr=", "0) m.c1013 = Constraint(expr= m.b596 - m.b597 <= 0) m.c1014", "m.b708 + m.b709 <= 1) m.c1149 = Constraint(expr= m.b707 +", "m.x71 - m.x89 + m.x92 == 0) m.c24 = Constraint(expr=", "m.x389 == 0) m.c513 = Constraint(expr= m.x96 - m.x387 -", "== 0) m.c827 = Constraint(expr= m.x200 - m.x572 - m.x575", "Constraint(expr= m.x297 - 15*m.b621 <= 0) m.c301 = Constraint(expr= m.x298", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 =", "m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b667", "m.b637 <= 0) m.c1055 = Constraint(expr= m.b638 - m.b639 <=", "Constraint(expr= m.b608 - m.b609 <= 0) m.c1026 = Constraint(expr= m.b608", "m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483", "m.c1201 = Constraint(expr= m.b734 + m.b735 <= 1) m.c1202 =", "m.c1482 = Constraint(expr= m.b669 - m.b681 >= 0) m.c1483 =", "m.b773 + m.b774 <= 1) m.c1278 = Constraint(expr= m.b773 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 =", "= Constraint(expr= m.x170 - m.x512 - m.x515 == 0) m.c765", "+ m.b599 - m.b605 >= 0) m.c1407 = Constraint(expr= m.b597", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 =", "log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677) <= 0) m.c843", "m.b636 + m.b637 - m.b727 <= 0) m.c1325 = Constraint(expr=", "= Constraint(expr= m.x182 - m.x542 - m.x545 == 0) m.c798", "Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425) m.c503 = Constraint(expr= -", "m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 =", "Constraint(expr= m.b737 + m.b739 <= 1) m.c1210 = Constraint(expr= m.b738", "m.b684 - m.b774 <= 0) m.c1372 = Constraint(expr= - m.b683", "<= 0) m.c1084 = Constraint(expr= m.b666 - m.b667 <= 0)", "m.b600 - m.b603 >= 0) m.c1405 = Constraint(expr= m.b598 +", "0.994083415506506*m.b677 <= 0.994083415506506) m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <=", "m.b599 - m.b600 <= 0) m.c1017 = Constraint(expr= m.b599 -", "+ 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*", "0.999* m.b675) <= 0) m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) -", "m.x802 = Var(within=Reals,bounds=(None,None),initialize=0) m.x803 = Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805", "+ m.x344 == 0) m.c285 = Constraint(expr= - 0.9*m.x297 +", "Constraint(expr= m.x487 == 0) m.c647 = Constraint(expr= m.x140 - m.x470", "m.x211 - m.x592 - m.x595 == 0) m.c911 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 =", "m.b648) <= 0) m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1", "- 7*m.b701 - 7*m.b702 - 4*m.b703 - 4*m.b704 - 3*m.b705", "Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0)", "Constraint(expr= m.x123 - m.x441 - m.x444 == 0) m.c517 =", "33.5*m.b638 <= 0) m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <=", "m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550", "- m.x84 == 0) m.c22 = Constraint(expr= m.x70 - m.x82", "0) m.c206 = Constraint(expr= m.x287 == 0) m.c207 = Constraint(expr=", "1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999* m.b628) <= 0)", "= Constraint(expr= m.b641 - m.b731 <= 0) m.c1329 = Constraint(expr=", "- 30*m.b610 <= 0) m.c194 = Constraint(expr= m.x257 + 30*m.b608", "- 20*m.b631 <= 0) m.c404 = Constraint(expr= m.x419 + 20*m.b629", "- 15*m.b620 <= 0) m.c300 = Constraint(expr= m.x297 - 15*m.b621", "- m.x296 - m.x299 == 0) m.c294 = Constraint(expr= m.x54", "0.940066550763924*m.b669 <= 0) m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <=", "Constraint(expr= m.x193 - m.x562 - m.x565 == 0) m.c908 =", "0.6*m.x561 + m.x591 == 0) m.c898 = Constraint(expr= - 0.6*m.x562", "<= 0) m.c1060 = Constraint(expr= m.b642 - m.b643 <= 0)", "3.34221486003388*m.b613 <= 0) m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <=", "0.999*m.b618)))*(0.001 + 0.999*m.b618) <= 0) m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619)", "0) m.c42 = Constraint(expr= m.x153 - m.x156 - m.x159 ==", "- m.x10 == 0) m.c5 = Constraint(expr= - m.x11 -", "= Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5) m.c497 = Constraint(expr=", "m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179", "0) m.c421 = Constraint(expr= m.x115 - m.x424 - m.x427 ==", "- m.x279 - m.x282 == 0) m.c184 = Constraint(expr= m.x46", "+ m.b694 <= 1) m.c1120 = Constraint(expr= m.b693 + m.b694", "0) m.c1442 = Constraint(expr= m.b623 - m.b641 >= 0) m.c1443", "m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517) m.c154 =", "= Constraint(expr= - m.b674 - m.b675 + m.b676 - m.b766", "Constraint(expr= m.x216 + 40*m.b597 <= 40) m.c73 = Constraint(expr= m.x217", "+ 20*m.b630 <= 20) m.c406 = Constraint(expr= m.x421 + 20*m.b631", "Constraint(expr= m.b678 - m.b679 <= 0) m.c1097 = Constraint(expr= m.b680", "m.b716 <= 0) m.c1314 = Constraint(expr= - m.b626 + m.b627", "Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x795 == 0) m.c943 = Constraint(expr= 2*m.b706 + m.x796", "<= 0) m.c1082 = Constraint(expr= m.b665 - m.b666 <= 0)", "+ 0.999*m.b677)))*(0.001 + 0.999*m.b677) <= 0) m.c843 = Constraint(expr=(m.x579/(0.001 +", "0) m.c1354 = Constraint(expr= - m.b665 - m.b666 + m.b667", "- m.x287 == 0) m.c213 = Constraint(expr= m.x48 - m.x285", "0) m.c527 = Constraint(expr= m.x443 + 9*m.b641 <= 9) m.c528", "m.c846 = Constraint(expr= m.x534 == 0) m.c847 = Constraint(expr= m.x535", "2*m.b706 + m.x796 == 0) m.c944 = Constraint(expr= 5*m.b707 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 =", "m.x296 - 15*m.b620 <= 0) m.c300 = Constraint(expr= m.x297 -", "= Constraint(expr= - 0.6*m.x302 + m.x350 == 0) m.c312 =", "Constraint(expr= m.x96 - m.x387 - m.x390 == 0) m.c514 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 =", "m.c1237 = Constraint(expr= m.b752 + m.b753 <= 1) m.c1238 =", "<= 0) m.c200 = Constraint(expr= m.x281 + 15*m.b608 <= 15)", "<= 0) m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <= 0)", "0.690184503917672) m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672) m.c868", "+ m.b765 <= 1) m.c1262 = Constraint(expr= m.b765 + m.b766", "m.c1455 = Constraint(expr= m.b627 - m.b654 >= 0) m.c1456 =", "= Constraint(expr= m.x342 == 0) m.c265 = Constraint(expr= m.x343 ==", "m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256", "m.b640 <= 0) m.c1057 = Constraint(expr= m.b639 - m.b640 <=", "1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999* m.b611) <= 0)", "- m.b609 <= 0) m.c1026 = Constraint(expr= m.b608 - m.b610", "m.b683 = Var(within=Binary,bounds=(0,1),initialize=0) m.b684 = Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686", "m.c727 = Constraint(expr= m.x532 == 0) m.c728 = Constraint(expr= m.x164", "m.c506 = Constraint(expr= m.x389 == 0) m.c507 = Constraint(expr= m.x390", "3.34221486003388*m.b615 <= 3.34221486003388) m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <=", "+ 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999*", "m.c972 = Constraint(expr= 4*m.b735 + m.x825 == 0) m.c973 =", ">= 0) m.c1429 = Constraint(expr= m.b610 - m.b628 >= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x81 - m.x84 == 0) m.c22 = Constraint(expr= m.x70 -", "- 15*m.b670 <= 0) m.c785 = Constraint(expr= m.x539 + 15*m.b668", "m.c1061 = Constraint(expr= m.b644 - m.b645 <= 0) m.c1062 =", "0) m.c1333 = Constraint(expr= - m.b644 - m.b645 + m.b646", "0 # # Nonzero counts # Total const NL DLL", "5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736 -", "m.x83 - m.x368 - m.x371 == 0) m.c477 = Constraint(expr=", "m.x428 - 0.572481933717686*m.b635 <= 0) m.c456 = Constraint(expr= m.x429 -", "m.c1355 = Constraint(expr= m.b668 - m.b758 <= 0) m.c1356 =", "- m.x264 == 0) m.c121 = Constraint(expr= m.x40 - m.x262", "m.c562 = Constraint(expr= m.x403 == 0) m.c563 = Constraint(expr= m.x455", "Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376) m.c364 =", "m.b687 <= 0) m.c1285 = Constraint(expr= - m.b596 - m.b597", "0) m.c800 = Constraint(expr= m.x197 - m.x566 - m.x569 ==", "= Constraint(expr= m.x54 - m.x297 - m.x300 == 0) m.c295", "m.b763 <= 1) m.c1258 = Constraint(expr= m.b762 + m.b763 <=", "0) m.c1348 = Constraint(expr= - m.b659 - m.b660 + m.b661", "Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c40 = Constraint(expr= - m.x148 - m.x151 + m.x154 ==", "Constraint(expr= m.x56 - m.x302 - m.x305 == 0) m.c321 =", "<= 13.5) m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350 ==", "<= 3.04984759446376) m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376)", "m.c594 = Constraint(expr= m.x105 - m.x405 - m.x408 == 0)", "= Constraint(expr= m.b638 - m.b639 <= 0) m.c1056 = Constraint(expr=", "+ m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999* m.b606) <= 0) m.c136", "- 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999* m.b673) <=", "m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462", "= Constraint(expr= - m.x71 - m.x89 + m.x92 == 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c829 = Constraint(expr= m.x202 - m.x574 - m.x577 ==", "= Constraint(expr= m.x415 == 0) m.c617 = Constraint(expr= m.x467 ==", "m.x291 - 3.34221486003388*m.b615 <= 0) m.c247 = Constraint(expr= m.x292 -", "0) m.c1302 = Constraint(expr= - m.b614 + m.b615 - m.b705", "0) m.c379 = Constraint(expr= m.x421 == 0) m.c380 = Constraint(expr=", "- m.b739 <= 0) m.c1337 = Constraint(expr= m.b650 - m.b740", "0) m.c1394 = Constraint(expr= - m.b620 + m.b638 >= 0)", "m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,30),initialize=0) m.x171 = Var(within=Reals,bounds=(0,30),initialize=0) m.x172", "m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638)", "m.x276 == 0) m.c268 = Constraint(expr= m.x43 - m.x271 -", "<= 0) m.c777 = Constraint(expr= m.x513 - 30*m.b669 <= 0)", "= Constraint(expr= m.b758 + m.b759 <= 1) m.c1248 = Constraint(expr=", "= Constraint(expr= m.x287 == 0) m.c207 = Constraint(expr= m.x288 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x21 - m.x24 == 0) m.c10 = Constraint(expr= m.x19", "0) m.c1019 = Constraint(expr= m.b602 - m.b603 <= 0) m.c1020", "1) m.c1253 = Constraint(expr= m.b761 + m.b762 <= 1) m.c1254", "Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x514 - 30*m.b670 <= 0) m.c779 = Constraint(expr= m.x515 +", "0) m.c8 = Constraint(expr= m.x17 - m.x20 - m.x23 ==", "= Constraint(expr= m.x419 == 0) m.c378 = Constraint(expr= m.x420 ==", "Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x2 - m.x3 - m.x4 + 5*m.x20 + 10*m.x21 +", "- m.x463 == 0) m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650", "+ 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c467 = Constraint(expr=", "+ 0.480234946352917*m.b675 <= 0.480234946352917) m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676", "Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b619 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x524 - 0.994083415506506*m.b665 <= 0) m.c741 = Constraint(expr= m.x525 -", "= Constraint(expr= m.b635 - m.b636 <= 0) m.c1053 = Constraint(expr=", "= Constraint(expr= 2*m.b720 + m.x810 == 0) m.c958 = Constraint(expr=", "m.c1354 = Constraint(expr= - m.b665 - m.b666 + m.b667 -", "m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519 == 0) m.c694", "Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171) m.c684 = Constraint(expr= m.x480", "m.c234 = Constraint(expr= m.x294 == 0) m.c235 = Constraint(expr= m.x295", "= Constraint(expr= m.b626 - m.b628 <= 0) m.c1045 = Constraint(expr=", "<= 0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 +", "Constraint(expr= - m.b683 - m.b684 + m.b685 - m.b775 <=", "0.940066550763924*m.b668 <= 0) m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <=", "m.x336 == 0) m.c412 = Constraint(expr= m.x337 == 0) m.c413", "m.x184 - m.x187 == 0) m.c50 = Constraint(expr= m.x179 -", "m.c546 = Constraint(expr= m.x393 - 9*m.b645 <= 0) m.c547 =", "m.x420 + 20*m.b630 <= 20) m.c406 = Constraint(expr= m.x421 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862 = Var(within=Reals,bounds=(None,None),initialize=0)", "- m.x359 == 0) m.c351 = Constraint(expr= m.x78 - m.x357", "m.x104 - m.x404 - m.x407 == 0) m.c594 = Constraint(expr=", "m.b653 - m.b743 <= 0) m.c1341 = Constraint(expr= - m.b653", "- m.b627 >= 0) m.c1429 = Constraint(expr= m.b610 - m.b628", "= Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0) m.x38 =", "<= 0) m.c198 = Constraint(expr= m.x279 - 15*m.b609 <= 0)", "= Constraint(expr= m.b770 + m.b772 <= 1) m.c1273 = Constraint(expr=", "- m.x285 - m.x288 == 0) m.c214 = Constraint(expr= m.x49", "0) m.c992 = Constraint(expr= 2*m.b755 + m.x845 == 0) m.c993", "m.x282 + 15*m.b609 <= 15) m.c202 = Constraint(expr= m.x283 +", "0.572481933717686*m.b636 <= 0.572481933717686) m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <=", "m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0) m.x835", "== 0) m.c679 = Constraint(expr= m.x151 - m.x490 - m.x493", "Var(within=Reals,bounds=(None,None),initialize=0) m.x797 = Var(within=Reals,bounds=(None,None),initialize=0) m.x798 = Var(within=Reals,bounds=(None,None),initialize=0) m.x799 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.b633 - m.b723 <= 0) m.c1321 = Constraint(expr= - m.b632", "Constraint(expr= 4*m.b704 + m.x794 == 0) m.c942 = Constraint(expr= 3*m.b705", "<= 0) m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186)", "0.690184503917672) m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672) m.c869", "01/15/21 11:37:33 # # Equation counts # Total E G", "1.32154609891348*m.b616 <= 1.32154609891348) m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1", "0) m.c761 = Constraint(expr= m.x167 - m.x506 - m.x509 ==", "1) m.c1119 = Constraint(expr= m.b692 + m.b694 <= 1) m.c1120", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x812 = Var(within=Reals,bounds=(None,None),initialize=0) m.x813 = Var(within=Reals,bounds=(None,None),initialize=0) m.x814 =", "m.x231 - m.x234 == 0) m.c94 = Constraint(expr= m.x16 -", "m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286", "Constraint(expr= m.x189 - m.x555 - m.x558 == 0) m.c880 =", "Constraint(expr= - 0.9*m.x554 + m.x584 == 0) m.c870 = Constraint(expr=", "m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189", "+ 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0) m.c464 = Constraint(expr=(m.x434/(0.001", "= Constraint(expr= - m.b626 - m.b627 + m.b628 - m.b718", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 =", "m.b647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650", "m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0)", "1) m.c1163 = Constraint(expr= m.b716 + m.b717 <= 1) m.c1164", "m.x189 - m.x555 - m.x558 == 0) m.c880 = Constraint(expr=", "0) m.c825 = Constraint(expr= m.x186 - m.x549 - m.x552 ==", "m.b747 <= 1) m.c1226 = Constraint(expr= m.b747 + m.b748 <=", "Constraint(expr= m.b627 - m.b648 >= 0) m.c1450 = Constraint(expr= m.b628", "m.b628 - m.b649 >= 0) m.c1451 = Constraint(expr= m.b626 -", "m.b620 + m.b623 + m.b626 >= 0) m.c1392 = Constraint(expr=", "1) m.c1152 = Constraint(expr= m.b710 + m.b712 <= 1) m.c1153", "m.b618 - m.b639 >= 0) m.c1441 = Constraint(expr= m.b619 -", "m.c295 = Constraint(expr= m.x55 - m.x298 - m.x301 == 0)", "1) m.c1198 = Constraint(expr= m.b732 + m.b733 <= 1) m.c1199", "<= 0) m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0)", "m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186) m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635)", "== 0) m.c646 = Constraint(expr= m.x487 == 0) m.c647 =", "m.b689 <= 0) m.c1287 = Constraint(expr= - m.b599 + m.b600", "- m.x161 - m.x164 - m.x167 == 0) m.c45 =", "- m.x22 - m.x25 == 0) m.c11 = Constraint(expr= m.x23", "m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 +", "= Constraint(expr= - m.x386 + m.x440 == 0) m.c504 =", "m.x248 - 4.45628648004517*m.b608 <= 0) m.c186 = Constraint(expr= m.x249 -", "- m.x423 - m.x426 == 0) m.c421 = Constraint(expr= m.x115", "= Constraint(expr= - m.x393 + m.x447 == 0) m.c532 =", "0) m.c1021 = Constraint(expr= m.b603 - m.b604 <= 0) m.c1022", "m.b624 >= 0) m.c1426 = Constraint(expr= m.b610 - m.b625 >=", "Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0) m.c451 = Constraint(expr= m.x364", "- m.b607 <= 0) m.c1024 = Constraint(expr= m.b606 - m.b607", "m.x159 == 0) m.c43 = Constraint(expr= m.x154 - m.x157 -", "m.c268 = Constraint(expr= m.x43 - m.x271 - m.x277 == 0)", "+ m.b748 <= 1) m.c1227 = Constraint(expr= m.b746 + m.b748", "- m.x587 == 0) m.c882 = Constraint(expr= m.x207 - m.x585", "0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999* m.b599)", "m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c463 =", "0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999* m.b640) <= 0)", "0) m.c1397 = Constraint(expr= - m.b623 + m.b641 + m.b644", "Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x66 - m.x327 - m.x333 == 0) m.c244 =", "m.x850 = Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853", "Constraint(expr= m.x169 - m.x508 - m.x511 == 0) m.c764 =", "1) m.c1194 = Constraint(expr= m.b731 + m.b733 <= 1) m.c1195", "m.b655 >= 0) m.c1457 = Constraint(expr= - m.b653 + m.b656", "Constraint(expr= m.x39 - m.x48 - m.x51 == 0) m.c16 =", "Constraint(expr= m.b728 + m.b729 <= 1) m.c1190 = Constraint(expr= m.b729", "= Constraint(expr= m.b614 - m.b616 <= 0) m.c1033 = Constraint(expr=", "0) m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0) m.c689", "Constraint(expr= m.x163 - m.x496 - m.x499 == 0) m.c704 =", "m.c732 = Constraint(expr= m.x177 - m.x525 - m.x531 == 0)", "+ m.b768 <= 1) m.c1266 = Constraint(expr= m.b767 + m.b769", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 =", "0.999*m.b679) <= 0) m.c845 = Constraint(expr= m.x533 == 0) m.c846", "= Constraint(expr= m.x79 - m.x358 - m.x361 == 0) m.c353", "0) m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001", "m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999* m.b637) <= 0) m.c437 =", "m.c695 = Constraint(expr= m.x497 == 0) m.c696 = Constraint(expr= m.x498", "<= 1) m.c1174 = Constraint(expr= m.b720 + m.b721 <= 1)", "+ m.b706 <= 1) m.c1144 = Constraint(expr= m.b705 + m.b706", "= Constraint(expr= m.b665 - m.b755 <= 0) m.c1353 = Constraint(expr=", "0) m.c1400 = Constraint(expr= - m.b626 + m.b647 + m.b650", "cont binary integer sos1 sos2 scont sint # 865 685", "+ m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598) <= 0) m.c56 =", "Constraint(expr= m.b701 + m.b702 <= 1) m.c1136 = Constraint(expr= m.b702", "m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159", "0.994083415506506*m.b667 <= 0.994083415506506) m.c746 = Constraint(expr= - m.x506 + m.x536", "m.x593 + 9*m.b683 <= 9) m.c921 = Constraint(expr= m.x594 +", "m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91", "4.45628648004517) m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517) m.c107", "m.c1426 = Constraint(expr= m.b610 - m.b625 >= 0) m.c1427 =", "m.b638 >= 0) m.c1395 = Constraint(expr= - m.b621 + m.b639", "+ m.x862 == 0) m.c1010 = Constraint(expr= 8*m.b773 + m.x863", "m.c1370 = Constraint(expr= m.b683 - m.b773 <= 0) m.c1371 =", "- 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999* m.b599) <=", "<= 0) m.c1286 = Constraint(expr= m.b599 - m.b689 <= 0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x777 = Var(within=Reals,bounds=(None,None),initialize=0) m.x778 = Var(within=Reals,bounds=(None,None),initialize=0) m.x779 = Var(within=Reals,bounds=(None,None),initialize=0)", "== 0) m.c114 = Constraint(expr= m.x264 == 0) m.c115 =", "m.x569 == 0) m.c795 = Constraint(expr= m.x570 == 0) m.c796", "Constraint(expr= 6*m.b700 + m.x790 == 0) m.c938 = Constraint(expr= 7*m.b701", "+ m.b733 <= 1) m.c1199 = Constraint(expr= m.b734 + m.b735", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b656 =", "Constraint(expr= m.b752 + m.b753 <= 1) m.c1238 = Constraint(expr= m.b753", "= Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0) m.c499 = Constraint(expr=", "+ m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999* m.b676) <= 0) m.c818", "m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539) m.c229 = Constraint(expr= m.x322 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 =", "3.04984759446376*m.b653 <= 0) m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <=", "m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0) m.c600 =", "+ m.b723 <= 1) m.c1176 = Constraint(expr= m.b722 + m.b724", "== 0) m.c505 = Constraint(expr= - m.x388 + m.x442 ==", "= Constraint(expr= m.x381 - 33.5*m.b639 <= 0) m.c493 = Constraint(expr=", "Constraint(expr= m.b609 - m.b621 >= 0) m.c1423 = Constraint(expr= m.b610", "m.c1461 = Constraint(expr= - m.b663 + m.b672 + m.b675 >=", "0) m.c828 = Constraint(expr= m.x201 - m.x573 - m.x576 ==", "1.18887736200171*m.b655 <= 1.18887736200171) m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1", "<= 1) m.c1253 = Constraint(expr= m.b761 + m.b762 <= 1)", "m.x529 - 0.994083415506506*m.b679 <= 0) m.c860 = Constraint(expr= m.x533 +", "m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16", "- m.b662 + m.b671 + m.b674 >= 0) m.c1461 =", "m.x484 - m.x487 == 0) m.c653 = Constraint(expr= m.x470 -", "+ m.b613 + m.b616 >= 0) m.c1379 = Constraint(expr= -", "= Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0) m.c689 = Constraint(expr=", "0) m.c269 = Constraint(expr= m.x68 - m.x338 - m.x341 ==", "Constraint(expr= m.x142 - m.x472 - m.x475 == 0) m.c650 =", "20) m.c399 = Constraint(expr= m.x378 + 20*m.b630 <= 20) m.c400", "+ m.b727 <= 1) m.c1185 = Constraint(expr= m.b725 + m.b727", "1.26558121681553*m.b618 <= 1.26558121681553) m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <=", "== 0) m.c418 = Constraint(expr= m.x67 - m.x331 - m.x337", "10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699 -", "0) m.c121 = Constraint(expr= m.x40 - m.x262 - m.x265 ==", "0) m.c1067 = Constraint(expr= m.b650 - m.b651 <= 0) m.c1068", "0) m.c1304 = Constraint(expr= m.b617 - m.b707 <= 0) m.c1305", "- m.x326 - m.x332 == 0) m.c243 = Constraint(expr= m.x66", "m.b721 <= 1) m.c1175 = Constraint(expr= m.b722 + m.b723 <=", "Constraint(expr= m.b749 + m.b750 <= 1) m.c1232 = Constraint(expr= m.b750", "= Constraint(expr= - m.b683 + m.b684 - m.b774 <= 0)", "m.c370 = Constraint(expr= - m.x376 + m.x418 == 0) m.c371", "m.c569 = Constraint(expr= m.x128 - m.x452 - m.x455 == 0)", "m.x307 == 0) m.c323 = Constraint(expr= m.x74 - m.x350 -", "<= 1) m.c1280 = Constraint(expr= m.b774 + m.b775 <= 1)", "m.b658 + m.b661 >= 0) m.c1460 = Constraint(expr= - m.b662", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 =", "- m.x250 - m.x253 == 0) m.c179 = Constraint(expr= m.x35", "0) m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001", "= Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506) m.c861 = Constraint(expr=", "0.999* m.b672) <= 0) m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) -", "- m.b671 >= 0) m.c1473 = Constraint(expr= m.b663 - m.b672", "m.c335 = Constraint(expr= m.x353 + 9*m.b623 <= 9) m.c336 =", "Constraint(expr= - m.b621 + m.b639 >= 0) m.c1396 = Constraint(expr=", "= Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0) m.c75 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 =", "m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351", "+ 1.18887736200171*m.b656 <= 1.18887736200171) m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657", "= Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0) m.c805 = Constraint(expr=", "m.x205 - m.x580 - m.x583 == 0) m.c857 = Constraint(expr=", "400*m.x204 + 430*m.x205 + 290*m.x206 + 300*m.x207 + 240*m.x208 +", "m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586 == 0) m.c872", "m.x135 - m.x138 == 0) m.c34 = Constraint(expr= m.x136 -", "== 0) m.c754 = Constraint(expr= m.x511 == 0) m.c755 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0) m.c103 = Constraint(expr= m.x232", "== 0) m.c297 = Constraint(expr= m.x72 - m.x345 - m.x348", "m.c925 = Constraint(expr= 6*m.b688 + m.x778 == 0) m.c926 =", "m.b616 - m.b634 >= 0) m.c1436 = Constraint(expr= m.b617 -", "1 variable and 1 equation from pyomo.environ import * model", "m.b605 - m.b695 <= 0) m.c1293 = Constraint(expr= - m.b605", "<= 0) m.c1045 = Constraint(expr= m.b627 - m.b628 <= 0)", "= Constraint(expr= m.x255 - 30*m.b609 <= 0) m.c193 = Constraint(expr=", "Constraint(expr= m.b770 + m.b772 <= 1) m.c1273 = Constraint(expr= m.b770", "Constraint(expr= m.x595 + 9*m.b685 <= 9) m.c923 = Constraint(expr= 5*m.b686", "== 0) m.c933 = Constraint(expr= 9*m.b696 + m.x786 == 0)", "m.x249 - 4.45628648004517*m.b609 <= 0) m.c187 = Constraint(expr= m.x250 -", "+ m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999* m.b638) <= 0) m.c462", "0) m.c32 = Constraint(expr= m.x134 - m.x137 == 0) m.c33", "- m.x465 - m.x468 == 0) m.c625 = Constraint(expr= m.x136", "m.b699 + m.b700 <= 1) m.c1131 = Constraint(expr= m.b698 +", "m.b723 + m.b724 <= 1) m.c1179 = Constraint(expr= m.b722 +", "= Constraint(expr= m.x240 == 0) m.c112 = Constraint(expr= m.x241 ==", "0 0 0 0 0 # # Nonzero counts #", "+ m.x19 == 0) m.c8 = Constraint(expr= m.x17 - m.x20", "- m.b616 <= 0) m.c1033 = Constraint(expr= m.b615 - m.b616", "Constraint(expr= m.x61 - m.x310 - m.x313 == 0) m.c350 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 =", "0.999* m.b659) <= 0) m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) -", "== 0) m.c951 = Constraint(expr= 7*m.b714 + m.x804 == 0)", "Constraint(expr= m.b621 - m.b622 <= 0) m.c1040 = Constraint(expr= m.b623", "Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 +", "m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.x776 = Var(within=Reals,bounds=(None,None),initialize=0) m.x777", "0) m.c1064 = Constraint(expr= m.b647 - m.b648 <= 0) m.c1065", "- 4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717", "m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325", "== 0) m.c543 = Constraint(expr= m.x126 - m.x447 - m.x450", "m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772", "9) m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001", "m.x461 == 0) m.c591 = Constraint(expr= m.x462 == 0) m.c592", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 =", "Constraint(expr= 7*m.b702 + m.x792 == 0) m.c940 = Constraint(expr= 4*m.b703", "1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999* m.b612) <= 0)", "<= 0) m.c334 = Constraint(expr= m.x352 - 9*m.b625 <= 0)", "Constraint(expr= m.x289 == 0) m.c209 = Constraint(expr= m.x320 == 0)", "m.x310 - m.x313 == 0) m.c350 = Constraint(expr= m.x77 -", "0) m.c168 = Constraint(expr= m.x252 == 0) m.c169 = Constraint(expr=", "= Constraint(expr= - m.b647 - m.b648 + m.b649 - m.b739", "m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 +", "- m.x330 - m.x336 == 0) m.c418 = Constraint(expr= m.x67", "<= 15) m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0)", "0) m.c1431 = Constraint(expr= m.b612 - m.b630 >= 0) m.c1432", "Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842 =", "0) m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5) m.c894", ">= 0) m.c1473 = Constraint(expr= m.b663 - m.b672 >= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 =", "m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999* m.b649) <= 0) m.c560 =", "0) m.c1436 = Constraint(expr= m.b617 - m.b635 >= 0) m.c1437", "1) m.c1213 = Constraint(expr= m.b740 + m.b741 <= 1) m.c1214", "m.x327 - 1.32154609891348*m.b615 <= 0) m.c253 = Constraint(expr= m.x328 -", "- m.x344 - m.x347 == 0) m.c297 = Constraint(expr= m.x72", "== 0) m.c961 = Constraint(expr= 9*m.b724 + m.x814 == 0)", "m.x6 - m.x213 - m.x216 == 0) m.c64 = Constraint(expr=", "= Constraint(expr= m.b605 - m.b606 <= 0) m.c1023 = Constraint(expr=", "- 2.54515263975353*m.b619 <= 0) m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617", "Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b665", "20*m.x120 + 20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124 +", "m.b668 - m.b680 >= 0) m.c1482 = Constraint(expr= m.b669 -", "Constraint(expr= m.x541 + 15*m.b670 <= 15) m.c788 = Constraint(expr=(m.x566/(0.001 +", "Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0) m.c219 = Constraint(expr= m.x285", "m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545", "0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999* m.b616) <= 0)", "m.x589 == 0) m.c884 = Constraint(expr= m.x554 - 15*m.b680 <=", "m.x516 + 30*m.b669 <= 30) m.c781 = Constraint(expr= m.x517 +", "<= 0) m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0)", "1.11894339953103*m.b650 <= 1.11894339953103) m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <=", "m.c33 = Constraint(expr= m.x135 - m.x138 == 0) m.c34 =", "m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715", "Constraint(expr= m.x184 - m.x544 - m.x547 == 0) m.c800 =", "+ 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999*", "15) m.c888 = Constraint(expr= m.x558 + 15*m.b681 <= 15) m.c889", "3*m.b718 + m.x808 == 0) m.c956 = Constraint(expr= 7*m.b719 +", "m.c674 = Constraint(expr= m.x143 - m.x476 - m.x479 == 0)", "0) m.c587 = Constraint(expr= m.x407 == 0) m.c588 = Constraint(expr=", "Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0) m.c710 = Constraint(expr= m.x497", "Constraint(expr= m.x311 + 15*m.b626 <= 15) m.c357 = Constraint(expr= m.x312", "Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0) m.c655 = Constraint(expr= m.x472", "+ m.x817 == 0) m.c965 = Constraint(expr= 4*m.b728 + m.x818", "m.b674 >= 0) m.c1476 = Constraint(expr= m.b663 - m.b675 >=", "0) m.c63 = Constraint(expr= m.x6 - m.x213 - m.x216 ==", "m.x840 = Var(within=Reals,bounds=(None,None),initialize=0) m.x841 = Var(within=Reals,bounds=(None,None),initialize=0) m.x842 = Var(within=Reals,bounds=(None,None),initialize=0) m.x843", "9*m.b749 + m.x839 == 0) m.c987 = Constraint(expr= 2*m.b750 +", "Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0) m.c607 = Constraint(expr= m.x460", "Constraint(expr= m.x54 - m.x297 - m.x300 == 0) m.c295 =", "- m.b722 <= 0) m.c1320 = Constraint(expr= - m.b632 +", "0.75*m.x496 + m.x520 == 0) m.c695 = Constraint(expr= m.x497 ==", "m.c468 = Constraint(expr= m.x372 == 0) m.c469 = Constraint(expr= m.x373", "- m.b621 + m.b622 - m.b712 <= 0) m.c1310 =", "m.b628 >= 0) m.c1430 = Constraint(expr= m.b611 - m.b629 >=", "Constraint(expr= - 0.6*m.x303 + m.x351 == 0) m.c313 = Constraint(expr=", "<= 0) m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0)", "0.999* m.b636) <= 0) m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) -", "== 0) m.c411 = Constraint(expr= m.x336 == 0) m.c412 =", "= Var(within=Reals,bounds=(0,20),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 =", "== 0) m.c91 = Constraint(expr= m.x10 - m.x220 - m.x223", "0) m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353) m.c159", "Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553) m.c455 = Constraint(expr= m.x428", "40) m.c100 = Constraint(expr= m.x223 + 40*m.b601 <= 40) m.c101", "m.c413 = Constraint(expr= m.x425 == 0) m.c414 = Constraint(expr= m.x426", "0) m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0) m.c683", "+ 0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617)", "- m.x478 - m.x481 == 0) m.c677 = Constraint(expr= m.x149", "<= 1) m.c1167 = Constraint(expr= m.b716 + m.b718 <= 1)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 =", "= Constraint(expr= m.x346 - 13.5*m.b622 <= 0) m.c308 = Constraint(expr=", "- m.b705 <= 0) m.c1303 = Constraint(expr= - m.b614 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1041 = Constraint(expr= m.b623 - m.b625 <= 0) m.c1042 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 =", "= Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924) m.c776 = Constraint(expr=", "m.c1315 = Constraint(expr= - m.b626 - m.b627 + m.b628 -", "m.c97 = Constraint(expr= m.x220 - 40*m.b601 <= 0) m.c98 =", "m.x487 == 0) m.c647 = Constraint(expr= m.x140 - m.x470 -", "Constraint(expr= - m.b632 + m.b633 - m.b723 <= 0) m.c1321", "m.b617 - m.b618 <= 0) m.c1035 = Constraint(expr= m.b617 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b680 = Var(within=Binary,bounds=(0,1),initialize=0) m.b681 = Var(within=Binary,bounds=(0,1),initialize=0) m.b682 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.b600 - m.b601 <= 0) m.c1019 = Constraint(expr= m.b602", ">= 0) m.c1381 = Constraint(expr= - m.b613 + m.b631 >=", "- 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999* m.b639) <=", "m.x412 - m.x415 == 0) m.c623 = Constraint(expr= m.x134 -", "<= 0) m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 +", "m.c1121 = Constraint(expr= m.b695 + m.b696 <= 1) m.c1122 =", "2*m.b725 - 6*m.b726 - 3*m.b727 - 4*m.b728 - 8*m.b729 -", "9*m.b684 <= 0) m.c919 = Constraint(expr= m.x592 - 9*m.b685 <=", "= Constraint(expr= m.b612 - m.b630 >= 0) m.c1432 = Constraint(expr=", "1) m.c1207 = Constraint(expr= m.b737 + m.b738 <= 1) m.c1208", "m.c1281 = Constraint(expr= m.b773 + m.b775 <= 1) m.c1282 =", "+ 0.999*m.b639)))*(0.001 + 0.999* m.b639) <= 0) m.c466 = Constraint(expr=(m.x436/(0.001", "0) m.c987 = Constraint(expr= 2*m.b750 + m.x840 == 0) m.c988", "m.c981 = Constraint(expr= 4*m.b744 + m.x834 == 0) m.c982 =", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 =", "m.c1056 = Constraint(expr= m.b638 - m.b640 <= 0) m.c1057 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.b642 - m.b643 <= 0) m.c1061 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x439 == 0) m.c476 = Constraint(expr= m.x83 - m.x368 -", "- m.b670 <= 0) m.c1087 = Constraint(expr= m.b669 - m.b670", "m.b629 + m.b630 - m.b720 <= 0) m.c1318 = Constraint(expr=", "m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317", "Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376) m.c605 =", "= Constraint(expr= - m.b599 - m.b600 + m.b601 - m.b691", "+ m.b614 >= 0) m.c1377 = Constraint(expr= - m.b603 +", "m.c1462 = Constraint(expr= - m.b664 + m.b673 + m.b676 >=", "+ m.b745 <= 1) m.c1219 = Constraint(expr= m.b743 + m.b744", "m.b764 + m.b766 <= 1) m.c1264 = Constraint(expr= m.b765 +", "m.c147 = Constraint(expr= m.x42 - m.x267 - m.x273 == 0)", "+ 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*", "= Constraint(expr= m.x48 - m.x285 - m.x288 == 0) m.c214", "= Constraint(expr= m.x105 - m.x405 - m.x408 == 0) m.c595", "m.x533 == 0) m.c852 = Constraint(expr= m.x177 - m.x528 -", "1) m.c1202 = Constraint(expr= m.b735 + m.b736 <= 1) m.c1203", "m.b625 - m.b715 <= 0) m.c1313 = Constraint(expr= m.b626 -", "m.x164 - m.x500 - m.x503 == 0) m.c729 = Constraint(expr=", "m.x547 == 0) m.c794 = Constraint(expr= m.x569 == 0) m.c795", "- m.x309 - m.x312 == 0) m.c349 = Constraint(expr= m.x61", "m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131", "40*m.b597 <= 0) m.c70 = Constraint(expr= m.x214 - 40*m.b598 <=", "Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 +", "= Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001", "+ m.x816 == 0) m.c964 = Constraint(expr= 3*m.b727 + m.x817", "== 0) m.c239 = Constraint(expr= m.x50 - m.x290 - m.x293", "m.c1103 = Constraint(expr= m.b686 + m.b687 <= 1) m.c1104 =", "m.b631 <= 0) m.c1048 = Constraint(expr= m.b630 - m.b631 <=", "0) m.c402 = Constraint(expr= m.x417 - 20*m.b630 <= 0) m.c403", "+ m.b639 - m.b729 <= 0) m.c1327 = Constraint(expr= -", "m.c953 = Constraint(expr= 3*m.b716 + m.x806 == 0) m.c954 =", "m.c1007 = Constraint(expr= 2*m.b770 + m.x860 == 0) m.c1008 =", "<= 0) m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0)", "0) m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376) m.c363", "Constraint(expr= m.b758 + m.b760 <= 1) m.c1249 = Constraint(expr= m.b758", "- 13.5*m.b682 <= 0) m.c893 = Constraint(expr= m.x587 + 13.5*m.b680", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 =", "m.b603 - m.b604 <= 0) m.c1022 = Constraint(expr= m.b605 -", "= Constraint(expr= m.x183 - m.x543 - m.x546 == 0) m.c799", "+ m.b714 <= 1) m.c1160 = Constraint(expr= m.b714 + m.b715", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 =", "0.999* m.b638) <= 0) m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) -", "m.x783 == 0) m.c931 = Constraint(expr= 4*m.b694 + m.x784 ==", "== 0) m.c589 = Constraint(expr= m.x409 == 0) m.c590 =", "= Constraint(expr= m.x538 - 15*m.b670 <= 0) m.c785 = Constraint(expr=", "Constraint(expr= m.x223 == 0) m.c86 = Constraint(expr= m.x233 == 0)", "m.x500 - m.x503 == 0) m.c729 = Constraint(expr= m.x165 -", "0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632) <=", "m.c1291 = Constraint(expr= - m.b602 - m.b603 + m.b604 -", "Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0) m.c246 = Constraint(expr= m.x291", "m.c779 = Constraint(expr= m.x515 + 30*m.b668 <= 30) m.c780 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376) m.c576 =", "Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 +", "m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0) m.c452 =", "0) m.c969 = Constraint(expr= 5*m.b732 + m.x822 == 0) m.c970", "m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 =", "= Constraint(expr= m.b598 + m.b601 == 1) m.c1376 = Constraint(expr=", "m.b618 - m.b708 <= 0) m.c1306 = Constraint(expr= - m.b617", "= Constraint(expr= m.x281 + 15*m.b608 <= 15) m.c201 = Constraint(expr=", "m.x421 == 0) m.c380 = Constraint(expr= m.x62 - m.x317 -", "m.c141 = Constraint(expr= m.x273 == 0) m.c142 = Constraint(expr= m.x274", "- m.b629 + m.b630 - m.b720 <= 0) m.c1318 =", "+ 0.999*m.b611)))*(0.001 + 0.999* m.b611) <= 0) m.c204 = Constraint(expr=(m.x315/(0.001", "m.x547 == 0) m.c800 = Constraint(expr= m.x197 - m.x566 -", "Constraint(expr= m.x37 - m.x256 - m.x259 == 0) m.c182 =", "- 4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 =", "<= 0.572481933717686) m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686)", "- m.x474 == 0) m.c649 = Constraint(expr= m.x142 - m.x472", "Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517) m.c127 = Constraint(expr= m.x241", "m.x376 - 20*m.b631 <= 0) m.c398 = Constraint(expr= m.x377 +", "m.x795 == 0) m.c943 = Constraint(expr= 2*m.b706 + m.x796 ==", "Constraint(expr= m.b611 - m.b612 <= 0) m.c1029 = Constraint(expr= m.b611", "m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382", ">= 0) m.c1447 = Constraint(expr= m.b625 - m.b646 >= 0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b650 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c947 = Constraint(expr= 2*m.b710 + m.x800 == 0) m.c948 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b653 + m.b656 + m.b659 >= 0) m.c1458 = Constraint(expr=", "0) m.c87 = Constraint(expr= m.x234 == 0) m.c88 = Constraint(expr=", "m.c1228 = Constraint(expr= m.b747 + m.b748 <= 1) m.c1229 =", "m.c1173 = Constraint(expr= m.b719 + m.b721 <= 1) m.c1174 =", "m.b675 = Var(within=Binary,bounds=(0,1),initialize=0) m.b676 = Var(within=Binary,bounds=(0,1),initialize=0) m.b677 = Var(within=Binary,bounds=(0,1),initialize=0) m.b678", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 =", "9) m.c523 = Constraint(expr= m.x391 + 9*m.b643 <= 9) m.c524", "0) m.c320 = Constraint(expr= m.x56 - m.x302 - m.x305 ==", "4.45628648004517) m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517) m.c190", "0) m.c794 = Constraint(expr= m.x569 == 0) m.c795 = Constraint(expr=", "= Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388) m.c249 = Constraint(expr=", "= Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0) m.c456 = Constraint(expr=", "= Constraint(expr= m.x213 - 40*m.b597 <= 0) m.c70 = Constraint(expr=", "0) m.c478 = Constraint(expr= m.x85 - m.x370 - m.x373 ==", "2*m.b750 + m.x840 == 0) m.c988 = Constraint(expr= 9*m.b751 +", "<= 1) m.c1271 = Constraint(expr= m.b770 + m.b771 <= 1)", "0) m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0) m.c157", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858 =", "= Constraint(expr= - m.x250 + m.x280 == 0) m.c164 =", "0) m.c848 = Constraint(expr= m.x581 == 0) m.c849 = Constraint(expr=", "m.c479 = Constraint(expr= m.x92 - m.x380 - m.x383 == 0)", "- m.x499 == 0) m.c704 = Constraint(expr= m.x173 - m.x518", "0) m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719) m.c813", "= Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5) m.c894 = Constraint(expr=", "m.b655 - m.b658 >= 0) m.c1469 = Constraint(expr= m.b653 -", "0.994083415506506*m.b666 <= 0.994083415506506) m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 =", "Constraint(expr= m.x6 - m.x213 - m.x216 == 0) m.c64 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.994083415506506*m.b666 <= 0.994083415506506) m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667", "Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719) m.c813 = Constraint(expr= m.x570", "= Constraint(expr= 3*m.b760 + m.x850 == 0) m.c998 = Constraint(expr=", "+ m.b745 <= 1) m.c1221 = Constraint(expr= m.b743 + m.b745", "<= 0) m.c1022 = Constraint(expr= m.b605 - m.b606 <= 0)", "+ m.b709 <= 1) m.c1150 = Constraint(expr= m.b708 + m.b709", "m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241", "m.c267 = Constraint(expr= m.x42 - m.x270 - m.x276 == 0)", "= Constraint(expr= m.x67 - m.x328 - m.x334 == 0) m.c245", "m.b731 <= 0) m.c1329 = Constraint(expr= - m.b641 + m.b642", "m.x29 - m.x32 == 0) m.c12 = Constraint(expr= m.x24 -", "<= 1) m.c1170 = Constraint(expr= m.b719 + m.b721 <= 1)", "Constraint(expr= m.x324 == 0) m.c373 = Constraint(expr= m.x325 == 0)", "- 6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692", "0) m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0) m.c151", "+ 1.83548069293539*m.b612 <= 1.83548069293539) m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613", "== 0) m.c294 = Constraint(expr= m.x54 - m.x297 - m.x300", "m.x85 - m.x370 - m.x373 == 0) m.c479 = Constraint(expr=", "m.x182 - m.x185 == 0) m.c48 = Constraint(expr= m.x174 -", "m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553) m.c490 =", "== 0) m.c510 = Constraint(expr= m.x444 == 0) m.c511 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 =", "m.c564 = Constraint(expr= m.x456 == 0) m.c565 = Constraint(expr= m.x457", "0) m.c885 = Constraint(expr= m.x555 - 15*m.b681 <= 0) m.c886", "== 0) m.c995 = Constraint(expr= 10*m.b758 + m.x848 == 0)", "0) m.c1048 = Constraint(expr= m.b630 - m.b631 <= 0) m.c1049", "30*m.b609 <= 0) m.c193 = Constraint(expr= m.x256 - 30*m.b610 <=", "m.b670 <= 0) m.c1087 = Constraint(expr= m.b669 - m.b670 <=", "6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743 -", "1.26558121681553*m.b635 <= 0) m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <=", "= Constraint(expr= m.b623 - m.b641 >= 0) m.c1443 = Constraint(expr=", "m.x268 - m.x274 == 0) m.c149 = Constraint(expr= m.x242 -", "- m.x329 - m.x335 == 0) m.c417 = Constraint(expr= m.x66", "m.b643 >= 0) m.c1445 = Constraint(expr= m.b623 - m.b644 >=", "9*m.b751 - 5*m.b752 - 8*m.b753 - 4*m.b754 - 2*m.b755 -", "- m.x170 - m.x171 - m.x172 + 80*m.x194 + 90*m.x195", "= Constraint(expr= m.x74 - m.x350 - m.x353 == 0) m.c324", "8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693 -", "m.c515 = Constraint(expr= m.x122 - m.x440 - m.x443 == 0)", "+ m.b700 <= 1) m.c1129 = Constraint(expr= m.b698 + m.b699", "m.x218 - 40*m.b599 <= 0) m.c96 = Constraint(expr= m.x219 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 =", "0.9*m.x317 + m.x416 == 0) m.c366 = Constraint(expr= - 0.9*m.x318", ">= 0) m.c1399 = Constraint(expr= - m.b625 + m.b643 +", "== 0) m.c19 = Constraint(expr= m.x46 - m.x55 - m.x58", "Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.x16 - m.x232 - m.x235 == 0) m.c95", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 =", "m.c1439 = Constraint(expr= m.b617 - m.b638 >= 0) m.c1440 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 =", "1) m.c1235 = Constraint(expr= m.b752 + m.b753 <= 1) m.c1236", "Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624 =", "= Constraint(expr= m.x546 == 0) m.c793 = Constraint(expr= m.x547 ==", "= Constraint(expr= m.x102 - m.x399 - m.x402 == 0) m.c568", "Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103) m.c610 = Constraint(expr= m.x463", "+ m.b733 <= 1) m.c1197 = Constraint(expr= m.b731 + m.b733", "0) m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186) m.c432", "- 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999* m.b665) <=", "m.b617 >= 0) m.c1386 = Constraint(expr= - m.b606 + m.b618", "= Constraint(expr= m.x272 == 0) m.c141 = Constraint(expr= m.x273 ==", "m.b615 - m.b705 <= 0) m.c1303 = Constraint(expr= - m.b614", "0) m.c49 = Constraint(expr= m.x175 - m.x184 - m.x187 ==", "m.x186 == 0) m.c49 = Constraint(expr= m.x175 - m.x184 -", "m.b764 + m.b765 <= 1) m.c1262 = Constraint(expr= m.b765 +", "m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202", "Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0) m.c628 = Constraint(expr= m.x412", "== 0) m.c953 = Constraint(expr= 3*m.b716 + m.x806 == 0)", "Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353) m.c278 = Constraint(expr= m.x338", "0) m.c179 = Constraint(expr= m.x35 - m.x254 - m.x257 ==", "0.940066550763924*m.b664 <= 0) m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 =", "Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924) m.c774 = Constraint(expr= m.x510", "m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001 +", "Constraint(expr= m.x191 - m.x560 - m.x563 == 0) m.c906 =", "<= 1) m.c1193 = Constraint(expr= m.b731 + m.b732 <= 1)", "3.34221486003388) m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388) m.c251", "1) m.c1165 = Constraint(expr= m.b716 + m.b717 <= 1) m.c1166", "= Constraint(expr= - m.b602 + m.b603 - m.b693 <= 0)", "0) m.c595 = Constraint(expr= m.x106 - m.x406 - m.x409 ==", "m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250", "m.c899 = Constraint(expr= m.x563 == 0) m.c900 = Constraint(expr= m.x564", "- m.b651 >= 0) m.c1453 = Constraint(expr= m.b628 - m.b652", "m.c46 = Constraint(expr= m.x160 - m.x163 - m.x166 - m.x169", "+ m.b647 + m.b650 + m.b653 >= 0) m.c1401 =", "Constraint(expr= m.x25 - m.x28 - m.x31 - m.x34 == 0)", "<= 0) m.c1373 = Constraint(expr= m.b596 + m.b599 == 1)", "m.c17 = Constraint(expr= m.x44 - m.x53 - m.x56 - m.x59", "= Constraint(expr= m.x561 - 15*m.b684 <= 0) m.c913 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0) m.x846 = Var(within=Reals,bounds=(None,None),initialize=0)", "= Constraint(expr= m.x389 + 9*m.b641 <= 9) m.c522 = Constraint(expr=", "30*m.x119 + 20*m.x120 + 20*m.x121 + 35*m.x122 + 50*m.x123 +", "= Constraint(expr= m.x162 - m.x495 - m.x498 == 0) m.c703", ">= 0) m.c1386 = Constraint(expr= - m.b606 + m.b618 >=", "0) m.c517 = Constraint(expr= m.x124 - m.x442 - m.x445 ==", "= Constraint(expr= m.b728 + m.b729 <= 1) m.c1190 = Constraint(expr=", "m.c776 = Constraint(expr= m.x512 - 30*m.b668 <= 0) m.c777 =", "0 # # Reformulation has removed 1 variable and 1", "Var(within=Reals,bounds=(None,None),initialize=0) m.x804 = Var(within=Reals,bounds=(None,None),initialize=0) m.x805 = Var(within=Reals,bounds=(None,None),initialize=0) m.x806 = Var(within=Reals,bounds=(None,None),initialize=0)", "0) m.c333 = Constraint(expr= m.x351 - 9*m.b624 <= 0) m.c334", "m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553) m.c455 = Constraint(expr= m.x428 -", "m.x411 - m.x414 == 0) m.c622 = Constraint(expr= m.x109 -", "- m.b659 - m.b660 + m.b661 - m.b751 <= 0)", "+ m.b649 + m.b652 + m.b655 >= 0) m.c1403 =", "m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001 +", "<= 15) m.c332 = Constraint(expr= m.x350 - 9*m.b623 <= 0)", "+ 3.04984759446376*m.b626 <= 3.04984759446376) m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627", "0) m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0) m.c629", "Var(within=Reals,bounds=(0,30),initialize=0) m.x172 = Var(within=Reals,bounds=(0,30),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b612)))*(0.001 + 0.999* m.b612) <= 0) m.c205 = Constraint(expr=(m.x316/(0.001", "= Constraint(expr= m.x557 + 15*m.b680 <= 15) m.c888 = Constraint(expr=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b685 = Var(within=Binary,bounds=(0,1),initialize=0) m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687 =", "= Constraint(expr= m.b722 + m.b724 <= 1) m.c1177 = Constraint(expr=", "m.b642 >= 0) m.c1444 = Constraint(expr= m.b625 - m.b643 >=", "Constraint(expr= m.b655 - m.b661 >= 0) m.c1472 = Constraint(expr= m.b662", "= Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0) m.c227 = Constraint(expr=", "+ m.b699 <= 1) m.c1130 = Constraint(expr= m.b699 + m.b700", "- m.x106 - m.x109 == 0) m.c32 = Constraint(expr= m.x134", "m.b691 <= 1) m.c1111 = Constraint(expr= m.b689 + m.b690 <=", "m.c379 = Constraint(expr= m.x421 == 0) m.c380 = Constraint(expr= m.x62", "= Constraint(expr= m.x205 - m.x580 - m.x583 == 0) m.c857", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 =", "m.x55 - m.x58 - m.x61 == 0) m.c20 = Constraint(expr=", "<= 1.26558121681553) m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b610 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0) m.c457 =", "m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943) m.c718 = Constraint(expr= m.x523 +", "= Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001", "= Constraint(expr= m.x198 - m.x567 - m.x570 == 0) m.c802", "0) m.c1086 = Constraint(expr= m.b668 - m.b670 <= 0) m.c1087", "m.c1337 = Constraint(expr= m.b650 - m.b740 <= 0) m.c1338 =", "m.x351 == 0) m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352", "1) m.c1157 = Constraint(expr= m.b713 + m.b714 <= 1) m.c1158", "m.c291 = Constraint(expr= m.x348 == 0) m.c292 = Constraint(expr= m.x349", "= Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0) m.c865 = Constraint(expr=", "- m.x489 - m.x492 == 0) m.c679 = Constraint(expr= m.x151", "= Constraint(expr= m.b627 - m.b628 <= 0) m.c1046 = Constraint(expr=", "<= 0) m.c1355 = Constraint(expr= m.b668 - m.b758 <= 0)", "30*m.b669 <= 30) m.c781 = Constraint(expr= m.x517 + 30*m.b670 <=", "- m.x56 - m.x59 == 0) m.c18 = Constraint(expr= m.x45", "m.b637 <= 0) m.c1054 = Constraint(expr= m.b636 - m.b637 <=", "m.c1141 = Constraint(expr= m.b704 + m.b705 <= 1) m.c1142 =", "+ 40*m.b601 <= 40) m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599", "m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <= 0) m.c493 =", "0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999* m.b651)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 =", "<= 0) m.c467 = Constraint(expr= m.x371 == 0) m.c468 =", "Constraint(expr= 9*m.b717 + m.x807 == 0) m.c955 = Constraint(expr= 3*m.b718", "m.c1250 = Constraint(expr= m.b759 + m.b760 <= 1) m.c1251 =", "m.b749 + m.b751 <= 1) m.c1234 = Constraint(expr= m.b750 +", "- 9*m.b623 <= 0) m.c333 = Constraint(expr= m.x351 - 9*m.b624", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 =", "m.b598 - m.b688 <= 0) m.c1286 = Constraint(expr= m.b599 -", "Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0) m.c812 = Constraint(expr= m.x569", "m.c1336 = Constraint(expr= - m.b647 - m.b648 + m.b649 -", "0) m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0) m.c811", "+ m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679) <= 0) m.c845 =", "<= 0) m.c1066 = Constraint(expr= m.b648 - m.b649 <= 0)", "== 0) m.c472 = Constraint(expr= m.x385 == 0) m.c473 =", "13.5) m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5) m.c895", "= Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001", "m.c1142 = Constraint(expr= m.b705 + m.b706 <= 1) m.c1143 =", "+ m.b627 >= 0) m.c1393 = Constraint(expr= - m.b610 +", "0) m.c58 = Constraint(expr= m.x217 == 0) m.c59 = Constraint(expr=", "- 10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702 - 4*m.b703", "Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x483 - 0.78338879230327*m.b657 <= 0) m.c661 = Constraint(expr= m.x484 -", "0) m.c18 = Constraint(expr= m.x45 - m.x54 - m.x57 -", "m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 +", "== 0) m.c443 = Constraint(expr= m.x80 - m.x362 - m.x365", "== 0) m.c9 = Constraint(expr= m.x18 - m.x21 - m.x24", "0) m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5) m.c309", "NL DLL # 3373 3193 180 0 # # Reformulation", "Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0) m.c274 = Constraint(expr= m.x271", "Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x457 == 0) m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <=", "m.x523 == 0) m.c701 = Constraint(expr= m.x161 - m.x494 -", "Constraint(expr= m.b617 - m.b618 <= 0) m.c1035 = Constraint(expr= m.b617", "m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41", "m.b632 + m.b633 - m.b723 <= 0) m.c1321 = Constraint(expr=", "Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917) m.c840 = Constraint(expr= m.x576", "m.c1094 = Constraint(expr= m.b677 - m.b678 <= 0) m.c1095 =", "m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46", "+ m.b642 + m.b645 >= 0) m.c1399 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 =", "= Constraint(expr= m.b719 + m.b720 <= 1) m.c1172 = Constraint(expr=", "Constraint(expr= m.x178 - m.x526 - m.x532 == 0) m.c734 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)", "0) m.c176 = Constraint(expr= m.x32 - m.x248 - m.x251 ==", "Constraint(expr= m.b669 - m.b681 >= 0) m.c1483 = Constraint(expr= m.b670", "m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738", "m.x797 == 0) m.c945 = Constraint(expr= 6*m.b708 + m.x798 ==", "m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517) m.c153 = Constraint(expr= m.x246 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)", "- log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658) <= 0)", "m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506", "m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574", "Constraint(expr= m.x480 == 0) m.c670 = Constraint(expr= m.x481 == 0)", "<= 0.994083415506506) m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506)", "m.b611 - m.b612 <= 0) m.c1029 = Constraint(expr= m.b611 -", ">= 0) m.c1476 = Constraint(expr= m.b663 - m.b675 >= 0)", "Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924) m.c711 = Constraint(expr= m.x498", "m.x85 == 0) m.c23 = Constraint(expr= - m.x71 - m.x89", "m.c884 = Constraint(expr= m.x554 - 15*m.b680 <= 0) m.c885 =", "<= 0) m.c199 = Constraint(expr= m.x280 - 15*m.b610 <= 0)", "= Constraint(expr= m.x66 - m.x327 - m.x333 == 0) m.c244", "0) m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0) m.c627", "m.x486 == 0) m.c652 = Constraint(expr= m.x148 - m.x484 -", "= Constraint(expr= m.b735 + m.b736 <= 1) m.c1205 = Constraint(expr=", "<= 0) m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0)", "m.b608 >= 0) m.c1410 = Constraint(expr= m.b597 + m.b600 -", "<= 0) m.c1291 = Constraint(expr= - m.b602 - m.b603 +", "m.b646 >= 0) m.c1448 = Constraint(expr= m.b626 - m.b647 >=", "= Constraint(expr= m.x355 == 0) m.c320 = Constraint(expr= m.x56 -", "m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999* m.b600) <= 0) m.c82 =", "1) m.c1136 = Constraint(expr= m.b702 + m.b703 <= 1) m.c1137", "Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)", "1) m.c1255 = Constraint(expr= m.b761 + m.b762 <= 1) m.c1256", "m.b703 <= 0) m.c1301 = Constraint(expr= m.b614 - m.b704 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 =", "- 9*m.b646 <= 0) m.c554 = Constraint(expr= m.x449 + 9*m.b644", "m.b753 + m.b754 <= 1) m.c1241 = Constraint(expr= m.b755 +", "9*m.b625 <= 9) m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1", "m.x374 - 20*m.b629 <= 0) m.c396 = Constraint(expr= m.x375 -", "m.x839 == 0) m.c987 = Constraint(expr= 2*m.b750 + m.x840 ==", "== 0) m.c763 = Constraint(expr= m.x169 - m.x508 - m.x511", "Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 =", "0) m.c348 = Constraint(expr= m.x60 - m.x309 - m.x312 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 =", "m.b673 + m.b676 >= 0) m.c1463 = Constraint(expr= - m.b665", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 =", "- 1.83548069293539*m.b631 <= 0) m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629", "m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 =", "0) m.c1015 = Constraint(expr= m.b597 - m.b598 <= 0) m.c1016", "m.x7 - m.x10 == 0) m.c5 = Constraint(expr= - m.x11", "m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568", "- m.x399 - m.x402 == 0) m.c568 = Constraint(expr= m.x103", "+ 1.04900943706034*m.b648 <= 1.04900943706034) m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649", "m.x119 - m.x434 - m.x437 == 0) m.c483 = Constraint(expr=", "m.c822 = Constraint(expr= m.x576 == 0) m.c823 = Constraint(expr= m.x577", "Constraint(expr= 4*m.b728 + m.x818 == 0) m.c966 = Constraint(expr= 8*m.b729", "Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x32 - m.x248 - m.x251 == 0) m.c177 = Constraint(expr=", "0) m.c548 = Constraint(expr= m.x395 + 9*m.b644 <= 9) m.c549", "Constraint(expr= m.b610 - m.b622 >= 0) m.c1424 = Constraint(expr= m.b608", "<= 0) m.c1289 = Constraint(expr= m.b602 - m.b692 <= 0)", "m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553) m.c453 = Constraint(expr= m.x366 +", "<= 0) m.c1370 = Constraint(expr= m.b683 - m.b773 <= 0)", "m.x530 == 0) m.c726 = Constraint(expr= m.x531 == 0) m.c727", "+ m.b760 <= 1) m.c1252 = Constraint(expr= m.b759 + m.b760", "m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422", "= Constraint(expr= m.x592 - 9*m.b685 <= 0) m.c920 = Constraint(expr=", "m.c1364 = Constraint(expr= m.b677 - m.b767 <= 0) m.c1365 =", "Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376) m.c605 = Constraint(expr= m.x458", "m.b682 <= 0) m.c1099 = Constraint(expr= m.b681 - m.b682 <=", "m.x517 == 0) m.c767 = Constraint(expr= m.x179 - m.x536 -", "== 0) m.c1013 = Constraint(expr= m.b596 - m.b597 <= 0)", "<= 0) m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 =", "m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517) m.c127 = Constraint(expr= m.x241 +", "685 180 0 0 0 0 0 # FX 0", "Constraint(expr= m.b767 + m.b769 <= 1) m.c1267 = Constraint(expr= m.b767", "Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0)", "<= 0) m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 +", "m.b675) <= 0) m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1", "m.c1093 = Constraint(expr= m.b675 - m.b676 <= 0) m.c1094 =", "= Constraint(expr= m.b617 - m.b707 <= 0) m.c1305 = Constraint(expr=", "m.c1397 = Constraint(expr= - m.b623 + m.b641 + m.b644 >=", "0) m.c1406 = Constraint(expr= m.b596 + m.b599 - m.b605 >=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ m.x446 == 0) m.c531 = Constraint(expr= - m.x393 +", "m.x788 == 0) m.c936 = Constraint(expr= 10*m.b699 + m.x789 ==", "m.c1040 = Constraint(expr= m.b623 - m.b624 <= 0) m.c1041 =", "1.18887736200171) m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171) m.c638", "m.b686 = Var(within=Binary,bounds=(0,1),initialize=0) m.b687 = Var(within=Binary,bounds=(0,1),initialize=0) m.b688 = Var(within=Binary,bounds=(0,1),initialize=0) m.b689", "== 0) m.c298 = Constraint(expr= m.x73 - m.x346 - m.x349", "- m.x9 == 0) m.c4 = Constraint(expr= m.x4 - m.x7", "= Constraint(expr= m.x135 - m.x465 - m.x468 == 0) m.c625", "= Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943) m.c807 = Constraint(expr=", "- m.x372 == 0) m.c478 = Constraint(expr= m.x85 - m.x370", "Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x307 == 0) m.c317 = Constraint(expr= m.x353 == 0)", "m.x367 == 0) m.c446 = Constraint(expr= m.x116 - m.x428 -", "Constraint(expr= m.x440 - 9*m.b641 <= 0) m.c525 = Constraint(expr= m.x441", "m.b708 <= 1) m.c1146 = Constraint(expr= m.b707 + m.b709 <=", "Constraint(expr= m.b725 + m.b726 <= 1) m.c1182 = Constraint(expr= m.b725", "Var(within=Binary,bounds=(0,1),initialize=0) m.b634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.c1012 = Constraint(expr= 4*m.b775 + m.x865 == 0) m.c1013 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443", "m.b632 >= 0) m.c1434 = Constraint(expr= m.b615 - m.b633 >=", "<= 0) m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 =", "== 0) m.c49 = Constraint(expr= m.x175 - m.x184 - m.x187", "<= 1) m.c1212 = Constraint(expr= m.b740 + m.b742 <= 1)", "= Constraint(expr= m.b665 - m.b677 >= 0) m.c1479 = Constraint(expr=", "m.b640) <= 0) m.c467 = Constraint(expr= m.x371 == 0) m.c468", "m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126", "m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 =", "m.c168 = Constraint(expr= m.x252 == 0) m.c169 = Constraint(expr= m.x253", "0) m.c518 = Constraint(expr= m.x386 - 9*m.b641 <= 0) m.c519", "<= 20) m.c399 = Constraint(expr= m.x378 + 20*m.b630 <= 20)", "m.x292 - 3.34221486003388*m.b616 <= 0) m.c248 = Constraint(expr= m.x293 +", "m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523", "m.b679 <= 0) m.c1096 = Constraint(expr= m.b678 - m.b679 <=", "m.x247 == 0) m.c146 = Constraint(expr= m.x41 - m.x266 -", "+ 0.999* m.b651) <= 0) m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652)", "m.b675 >= 0) m.c1462 = Constraint(expr= - m.b664 + m.b673", "Constraint(expr= m.x173 - m.x182 - m.x185 == 0) m.c48 =", "- 5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701", "Constraint(expr= m.x433 == 0) m.c443 = Constraint(expr= m.x80 - m.x362", "m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0) m.c425 =", "0.999*m.b617) <= 0) m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1", "m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141", "- m.x169 == 0) m.c47 = Constraint(expr= m.x173 - m.x182", "+ 4.45628648004517*m.b603 <= 4.45628648004517) m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604", "0) m.c1074 = Constraint(expr= m.b656 - m.b658 <= 0) m.c1075", "m.b748 <= 1) m.c1228 = Constraint(expr= m.b747 + m.b748 <=", "0 0 0 0 # # Variable counts # x", "m.c1076 = Constraint(expr= m.b659 - m.b660 <= 0) m.c1077 =", "m.b671 - m.b672 + m.b673 - m.b763 <= 0) m.c1361", "- m.b767 <= 0) m.c1365 = Constraint(expr= - m.b677 +", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 =", "m.b627 - m.b717 <= 0) m.c1315 = Constraint(expr= - m.b626", "m.x863 = Var(within=Reals,bounds=(None,None),initialize=0) m.x864 = Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj", "Constraint(expr= m.x421 + 20*m.b631 <= 20) m.c407 = Constraint(expr=(m.x422/(0.001 +", "Constraint(expr= m.x546 == 0) m.c793 = Constraint(expr= m.x547 == 0)", "0) m.c1398 = Constraint(expr= - m.b624 + m.b642 + m.b645", "Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0) m.c859 = Constraint(expr= m.x529", "- m.b714 <= 0) m.c1312 = Constraint(expr= - m.b623 -", "m.x72 - m.x345 - m.x348 == 0) m.c298 = Constraint(expr=", "Constraint(expr= m.x372 == 0) m.c469 = Constraint(expr= m.x373 == 0)", "+ m.b748 <= 1) m.c1229 = Constraint(expr= m.b749 + m.b750", "Var(within=Reals,bounds=(None,None),initialize=0) m.x856 = Var(within=Reals,bounds=(None,None),initialize=0) m.x857 = Var(within=Reals,bounds=(None,None),initialize=0) m.x858 = Var(within=Reals,bounds=(None,None),initialize=0)", "- m.b637 <= 0) m.c1055 = Constraint(expr= m.b638 - m.b639", "Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x41 - m.x269 - m.x275 == 0) m.c267 = Constraint(expr=", "0) m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0) m.c661", "m.b658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b661", "m.c2 = Constraint(expr= m.x2 - m.x5 - m.x8 == 0)", "1) m.c1244 = Constraint(expr= m.b756 + m.b757 <= 1) m.c1245", "Constraint(expr= m.x55 - m.x298 - m.x301 == 0) m.c296 =", "m.c1232 = Constraint(expr= m.b750 + m.b751 <= 1) m.c1233 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)", "+ 15*m.b684 <= 15) m.c916 = Constraint(expr= m.x565 + 15*m.b685", "m.b722 <= 0) m.c1320 = Constraint(expr= - m.b632 + m.b633", "m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556", "m.b668 - m.b670 <= 0) m.c1087 = Constraint(expr= m.b669 -", "m.c1022 = Constraint(expr= m.b605 - m.b606 <= 0) m.c1023 =", "<= 1) m.c1244 = Constraint(expr= m.b756 + m.b757 <= 1)", "7*m.b701 + m.x791 == 0) m.c939 = Constraint(expr= 7*m.b702 +", "m.x819 = Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822", "m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395", "Constraint(expr= m.x119 - m.x434 - m.x437 == 0) m.c483 =", "0) m.c846 = Constraint(expr= m.x534 == 0) m.c847 = Constraint(expr=", "m.b605 + m.b606 - m.b696 <= 0) m.c1294 = Constraint(expr=", "+ 1.18887736200171*m.b661 <= 1.18887736200171) m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659", "m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 +", "= Constraint(expr= - m.x374 + m.x416 == 0) m.c369 =", "m.x531 == 0) m.c733 = Constraint(expr= m.x178 - m.x526 -", "== 0) m.c372 = Constraint(expr= m.x324 == 0) m.c373 =", "0) m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0) m.c832", "- m.b733 <= 0) m.c1331 = Constraint(expr= m.b644 - m.b734", "+ 13.5*m.b622 <= 13.5) m.c311 = Constraint(expr= - 0.6*m.x302 +", "- 0.994083415506506*m.b665 <= 0) m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666", "m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163", "m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553) m.c282 = Constraint(expr= m.x342 +", "m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999* m.b675) <= 0) m.c817 =", "= Constraint(expr= m.x204 - m.x579 - m.x582 == 0) m.c856", "m.c883 = Constraint(expr= m.x208 - m.x586 - m.x589 == 0)", "m.x203 - m.x578 - m.x581 == 0) m.c855 = Constraint(expr=", "m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532", "+ 0.999*m.b678) <= 0) m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) -", "m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770", "m.x356 - m.x359 == 0) m.c351 = Constraint(expr= m.x78 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 =", "<= 1.18887736200171) m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171)", "4.45628648004517) m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517) m.c155", "Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37 = Var(within=Reals,bounds=(0,30),initialize=0)", "- m.b621 <= 0) m.c1038 = Constraint(expr= m.b620 - m.b622", "m.x391 == 0) m.c509 = Constraint(expr= m.x443 == 0) m.c510", "0) m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001", "<= 0) m.c1068 = Constraint(expr= m.b650 - m.b652 <= 0)", "<= 0) m.c1295 = Constraint(expr= m.b608 - m.b698 <= 0)", "= Constraint(expr= m.b728 + m.b729 <= 1) m.c1188 = Constraint(expr=", "m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x828 = Var(within=Reals,bounds=(None,None),initialize=0) m.x829 = Var(within=Reals,bounds=(None,None),initialize=0) m.x830 =", "+ 15*m.x112 + 15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116", "0) m.c1069 = Constraint(expr= m.b651 - m.b652 <= 0) m.c1070", "Constraint(expr= m.x561 - 15*m.b684 <= 0) m.c913 = Constraint(expr= m.x562", "m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,30),initialize=0) m.x36 = Var(within=Reals,bounds=(0,30),initialize=0) m.x37", "- 9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727 - 4*m.b728", "0.999*m.b653)))*(0.001 + 0.999* m.b653) <= 0) m.c612 = Constraint(expr=(m.x465/(0.001 +", "Constraint(expr= m.x257 == 0) m.c171 = Constraint(expr= m.x258 == 0)", "+ 0.78338879230327*m.b657 <= 0.78338879230327) m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658", "Constraint(expr= 8*m.b773 + m.x863 == 0) m.c1011 = Constraint(expr= 3*m.b774", "m.x81 - m.x363 - m.x366 == 0) m.c445 = Constraint(expr=", "- m.b660 + m.b661 - m.b751 <= 0) m.c1349 =", "m.x194 == 0) m.c51 = Constraint(expr= m.x180 - m.x189 -", "m.x214 - 40*m.b598 <= 0) m.c71 = Constraint(expr= m.x215 +", "= Constraint(expr= - m.b604 + m.b613 + m.b616 >= 0)", "m.x441 - 9*m.b642 <= 0) m.c526 = Constraint(expr= m.x442 -", "Constraint(expr= m.x200 - m.x572 - m.x575 == 0) m.c828 =", "Constraint(expr= 7*m.b764 + m.x854 == 0) m.c1002 = Constraint(expr= 3*m.b765", "m.b719 <= 0) m.c1317 = Constraint(expr= - m.b629 + m.b630", "Constraint(expr= m.x365 == 0) m.c438 = Constraint(expr= m.x366 == 0)", "+ m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999* m.b637) <= 0) m.c437", "9*m.b693 + m.x783 == 0) m.c931 = Constraint(expr= 4*m.b694 +", "0) m.c936 = Constraint(expr= 10*m.b699 + m.x789 == 0) m.c937", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 =", "log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619) <= 0) m.c260", "0.999*m.b605)))*(0.001 + 0.999* m.b605) <= 0) m.c135 = Constraint(expr=(m.x267/(0.001 +", "m.x818 == 0) m.c966 = Constraint(expr= 8*m.b729 + m.x819 ==", "m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 =", "Constraint(expr= m.b701 + m.b703 <= 1) m.c1135 = Constraint(expr= m.b701", "m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0) m.c431 =", "<= 0) m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0)", "= Constraint(expr= m.x313 + 15*m.b628 <= 15) m.c359 = Constraint(expr=", "m.b643 <= 0) m.c1061 = Constraint(expr= m.b644 - m.b645 <=", "Constraint(expr= m.x562 - 15*m.b685 <= 0) m.c914 = Constraint(expr= m.x563", "= Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376) m.c578 = Constraint(expr=", "= Constraint(expr= m.x256 - 30*m.b610 <= 0) m.c194 = Constraint(expr=", "= Constraint(expr= m.x377 == 0) m.c375 = Constraint(expr= m.x378 ==", "m.b751 <= 1) m.c1235 = Constraint(expr= m.b752 + m.b753 <=", ">= 0) m.c1470 = Constraint(expr= m.b654 - m.b660 >= 0)", "- m.x212 - m.x215 == 0) m.c63 = Constraint(expr= m.x6", "m.c1311 = Constraint(expr= - m.b623 + m.b624 - m.b714 <=", "180 0 # # Reformulation has removed 1 variable and", "m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320", "+ 13.5*m.b681 <= 13.5) m.c895 = Constraint(expr= m.x589 + 13.5*m.b682", "m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423", "m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380", "m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0) m.c710 =", "m.x308 - 15*m.b626 <= 0) m.c354 = Constraint(expr= m.x309 -", "Constraint(expr= m.b738 + m.b739 <= 1) m.c1209 = Constraint(expr= m.b737", "Constraint(expr= m.x514 - 30*m.b670 <= 0) m.c779 = Constraint(expr= m.x515", "m.c1069 = Constraint(expr= m.b651 - m.b652 <= 0) m.c1070 =", "m.x479 == 0) m.c669 = Constraint(expr= m.x480 == 0) m.c670", "Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x257 + 30*m.b608 <= 30) m.c195 = Constraint(expr= m.x258 +", "m.x467 == 0) m.c624 = Constraint(expr= m.x135 - m.x465 -", "0) m.c1331 = Constraint(expr= m.b644 - m.b734 <= 0) m.c1332", "== 0) m.c979 = Constraint(expr= 4*m.b742 + m.x832 == 0)", "+ m.x803 == 0) m.c951 = Constraint(expr= 7*m.b714 + m.x804", "m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220", "+ 4.45628648004517*m.b608 <= 4.45628648004517) m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609", "Constraint(expr= m.b762 + m.b763 <= 1) m.c1259 = Constraint(expr= m.b764", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 =", "- m.b672 >= 0) m.c1474 = Constraint(expr= m.b664 - m.b673", "= Constraint(expr= m.x143 - m.x476 - m.x479 == 0) m.c675", "m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44", "Constraint(expr= m.x522 == 0) m.c700 = Constraint(expr= m.x523 == 0)", "m.c1035 = Constraint(expr= m.b617 - m.b619 <= 0) m.c1036 =", "m.b745 <= 0) m.c1343 = Constraint(expr= m.b656 - m.b746 <=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 =", "m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243", "+ m.b742 <= 1) m.c1213 = Constraint(expr= m.b740 + m.b741", "Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0)", "= Constraint(expr= m.x509 == 0) m.c753 = Constraint(expr= m.x510 ==", "== 0) m.c141 = Constraint(expr= m.x273 == 0) m.c142 =", "m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102", ">= 0) m.c1391 = Constraint(expr= - m.b608 + m.b620 +", "m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279 == 0) m.c166", "== 0) m.c516 = Constraint(expr= m.x123 - m.x441 - m.x444", "- 8*m.b753 - 4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757", "m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943) m.c808 =", "m.c1196 = Constraint(expr= m.b732 + m.b733 <= 1) m.c1197 =", "+ m.b618 >= 0) m.c1387 = Constraint(expr= - m.b607 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 15*m.b683 <= 15) m.c915 = Constraint(expr= m.x564 + 15*m.b684", "3.71357206670431) m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431) m.c79", "Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c829 = Constraint(expr= m.x202 - m.x574 - m.x577 == 0)", "3.71357206670431) m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431) m.c80", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 =", "m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444", "Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0) m.c425 = Constraint(expr= m.x335", "Constraint(expr= 2*m.b746 + m.x836 == 0) m.c984 = Constraint(expr= 5*m.b747", "Constraint(expr= m.b653 - m.b656 >= 0) m.c1467 = Constraint(expr= m.b654", "m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336", "0.842233385663186) m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186) m.c434", "- 0.705049913072943*m.b676 <= 0) m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674", "m.x216 == 0) m.c64 = Constraint(expr= m.x7 - m.x214 -", "Constraint(expr= m.x35 - m.x254 - m.x257 == 0) m.c180 =", "m.c1057 = Constraint(expr= m.b639 - m.b640 <= 0) m.c1058 =", "m.c1440 = Constraint(expr= m.b618 - m.b639 >= 0) m.c1441 =", "1.04900943706034*m.b648 <= 1.04900943706034) m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <=", "Constraint(expr= m.x510 == 0) m.c754 = Constraint(expr= m.x511 == 0)", "m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171) m.c659 =", "- 0.75*m.x494 + m.x518 == 0) m.c693 = Constraint(expr= -", "== 0) m.c872 = Constraint(expr= m.x557 == 0) m.c873 =", "4.45628648004517*m.b599 <= 4.45628648004517) m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <=", "m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517) m.c107 = Constraint(expr= - 0.75*m.x236", "4*m.b775, sense=maximize) m.c2 = Constraint(expr= m.x2 - m.x5 - m.x8", "Var(within=Reals,bounds=(0,None),initialize=0) m.b596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.b750 <= 0) m.c1348 = Constraint(expr= - m.b659 - m.b660", "0) m.c288 = Constraint(expr= m.x300 == 0) m.c289 = Constraint(expr=", "m.x168 == 0) m.c46 = Constraint(expr= m.x160 - m.x163 -", "- 0.705049913072943*m.b673 <= 0) m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671", "= Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0) m.c187 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 =", "+ 4.45628648004517*m.b607 <= 4.45628648004517) m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605", "m.x453 - m.x456 == 0) m.c571 = Constraint(expr= m.x130 -", "0) m.c1056 = Constraint(expr= m.b638 - m.b640 <= 0) m.c1057", "m.x536 == 0) m.c747 = Constraint(expr= - m.x507 + m.x537", "m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60", "m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 =", "m.x326 - 1.32154609891348*m.b614 <= 0) m.c252 = Constraint(expr= m.x327 -", "<= 1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 +", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 =", "m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943) m.c808 = Constraint(expr= m.x547 +", "- m.b649 <= 0) m.c1067 = Constraint(expr= m.b650 - m.b651", "Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0) m.c124 = Constraint(expr= m.x238", "- 5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91", "- m.x581 == 0) m.c855 = Constraint(expr= m.x204 - m.x579", "m.c1180 = Constraint(expr= m.b723 + m.b724 <= 1) m.c1181 =", "0) m.c302 = Constraint(expr= m.x299 + 15*m.b620 <= 15) m.c303", "Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x823 = Var(within=Reals,bounds=(None,None),initialize=0) m.x824 = Var(within=Reals,bounds=(None,None),initialize=0) m.x825 =", "= Constraint(expr= m.x530 == 0) m.c726 = Constraint(expr= m.x531 ==", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c344 = Constraint(expr= m.x359 == 0) m.c345 = Constraint(expr= m.x360", "m.x197 - m.x566 - m.x569 == 0) m.c801 = Constraint(expr=", "m.c1195 = Constraint(expr= m.b731 + m.b732 <= 1) m.c1196 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)", "0.705049913072943*m.b673 <= 0.705049913072943) m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x851 = Var(within=Reals,bounds=(None,None),initialize=0) m.x852 = Var(within=Reals,bounds=(None,None),initialize=0) m.x853 = Var(within=Reals,bounds=(None,None),initialize=0)", "<= 0) m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 +", "0) m.c542 = Constraint(expr= m.x125 - m.x446 - m.x449 ==", "9*m.b724 + m.x814 == 0) m.c962 = Constraint(expr= 2*m.b725 +", "Var(within=Binary,bounds=(0,1),initialize=0) m.b638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b640 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x264 == 0) m.c121 = Constraint(expr= m.x40 - m.x262 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c617 = Constraint(expr= m.x467 == 0) m.c618 = Constraint(expr= m.x468", "Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c552 = Constraint(expr= m.x447 - 9*m.b645 <= 0) m.c553 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 =", "+ 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999*", "1.18887736200171) m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171) m.c659", "= Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103) m.c609 = Constraint(expr=", "Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 =", "m.x562 - m.x565 == 0) m.c908 = Constraint(expr= m.x209 -", "= Constraint(expr= m.b628 - m.b652 >= 0) m.c1454 = Constraint(expr=", "- 1.18887736200171*m.b660 <= 0) m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661", "m.b634 <= 0) m.c1052 = Constraint(expr= m.b635 - m.b636 <=", "1) m.c1239 = Constraint(expr= m.b752 + m.b754 <= 1) m.c1240", "Constraint(expr= m.b758 + m.b759 <= 1) m.c1248 = Constraint(expr= m.b758", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 =", "= Var(within=Reals,bounds=(0,20),initialize=0) m.x87 = Var(within=Reals,bounds=(0,20),initialize=0) m.x88 = Var(within=Reals,bounds=(0,20),initialize=0) m.x89 =", "== 0) m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585 ==", "m.b610 - m.b700 <= 0) m.c1298 = Constraint(expr= m.b611 -", "- m.b634 >= 0) m.c1436 = Constraint(expr= m.b617 - m.b635", "== 0) m.c34 = Constraint(expr= m.x136 - m.x139 == 0)", "m.c414 = Constraint(expr= m.x426 == 0) m.c415 = Constraint(expr= m.x427", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 =", "== 0) m.c24 = Constraint(expr= - m.x72 - m.x90 +", "m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396", "4.45628648004517) m.c191 = Constraint(expr= m.x254 - 30*m.b608 <= 0) m.c192", "m.c192 = Constraint(expr= m.x255 - 30*m.b609 <= 0) m.c193 =", "== 0) m.c619 = Constraint(expr= m.x469 == 0) m.c620 =", "m.x265 == 0) m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <=", "m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258", "m.b679 >= 0) m.c1466 = Constraint(expr= m.b653 - m.b656 >=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b637 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c676 = Constraint(expr= m.x145 - m.x478 - m.x481 ==", "binary integer sos1 sos2 scont sint # 865 685 180", "Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)", "Constraint(expr= m.x118 - m.x430 - m.x433 == 0) m.c449 =", "0) m.c178 = Constraint(expr= m.x34 - m.x250 - m.x253 ==", "0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999* m.b650) <= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c95 = Constraint(expr= m.x218 - 40*m.b599 <= 0) m.c96 =", "- m.b613 <= 0) m.c1030 = Constraint(expr= m.b612 - m.b613", "m.b741 <= 1) m.c1212 = Constraint(expr= m.b740 + m.b742 <=", "m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477", "0) m.c623 = Constraint(expr= m.x134 - m.x464 - m.x467 ==", "- m.x419 == 0) m.c387 = Constraint(expr= m.x111 - m.x417", "0) m.c1349 = Constraint(expr= m.b662 - m.b752 <= 0) m.c1350", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 =", "- m.x231 - m.x234 == 0) m.c94 = Constraint(expr= m.x16", "- m.x234 == 0) m.c94 = Constraint(expr= m.x16 - m.x232", "- m.x422 - m.x425 == 0) m.c420 = Constraint(expr= m.x114", "Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x559 == 0) m.c875 = Constraint(expr= m.x587 == 0)", "+ 20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125", "- m.x147 - m.x150 + m.x153 == 0) m.c40 =", "- 3.71357206670431*m.b598 <= 0) m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596", "Constraint(expr= m.x36 - m.x255 - m.x258 == 0) m.c181 =", "m.c748 = Constraint(expr= - m.x508 + m.x538 == 0) m.c749", "m.c1152 = Constraint(expr= m.b710 + m.b712 <= 1) m.c1153 =", "1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999* m.b613) <= 0)", "m.b738 <= 0) m.c1336 = Constraint(expr= - m.b647 - m.b648", "- m.b629 >= 0) m.c1431 = Constraint(expr= m.b612 - m.b630", "1.18887736200171) m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171) m.c658", "0) m.c387 = Constraint(expr= m.x111 - m.x417 - m.x420 ==", "m.c752 = Constraint(expr= m.x509 == 0) m.c753 = Constraint(expr= m.x510", "= Constraint(expr= m.x565 == 0) m.c902 = Constraint(expr= m.x593 ==", "- m.b601 <= 0) m.c1018 = Constraint(expr= m.b600 - m.b601", "- m.x249 + m.x279 == 0) m.c163 = Constraint(expr= -", ">= 0) m.c1434 = Constraint(expr= m.b615 - m.b633 >= 0)", "Constraint(expr= m.x213 - 40*m.b597 <= 0) m.c70 = Constraint(expr= m.x214", "= Constraint(expr= m.x337 == 0) m.c413 = Constraint(expr= m.x425 ==", "= Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0) m.c656 = Constraint(expr=", "- m.x579 - m.x582 == 0) m.c856 = Constraint(expr= m.x205", "m.x832 == 0) m.c980 = Constraint(expr= m.b743 + m.x833 ==", "= Constraint(expr= m.x388 - 9*m.b643 <= 0) m.c521 = Constraint(expr=", "2*m.b720 + m.x810 == 0) m.c958 = Constraint(expr= 9*m.b721 +", "m.b766 <= 0) m.c1364 = Constraint(expr= m.b677 - m.b767 <=", "m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 +", "m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 +", "m.b741 <= 0) m.c1339 = Constraint(expr= - m.b650 - m.b651", "Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)", "20) m.c400 = Constraint(expr= m.x379 + 20*m.b631 <= 20) m.c401", "Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924) m.c740 = Constraint(expr= m.x524", "# # Variable counts # x b i s1s s2s", "0) m.c872 = Constraint(expr= m.x557 == 0) m.c873 = Constraint(expr=", "0) m.c882 = Constraint(expr= m.x207 - m.x585 - m.x588 ==", "m.c1330 = Constraint(expr= - m.b641 - m.b642 + m.b643 -", "Constraint(expr= - m.b662 + m.b663 - m.b753 <= 0) m.c1351", "Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c619 = Constraint(expr= m.x469 == 0) m.c620 = Constraint(expr= m.x107", "= Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0) m.c810 = Constraint(expr=", "4.45628648004517*m.b606 <= 4.45628648004517) m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <=", "Constraint(expr= m.x313 == 0) m.c344 = Constraint(expr= m.x359 == 0)", "b i s1s s2s sc si # Total cont binary", "m.b698 + m.b699 <= 1) m.c1130 = Constraint(expr= m.b699 +", "m.b608 - m.b620 >= 0) m.c1422 = Constraint(expr= m.b609 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x43 - m.x268 - m.x274 == 0) m.c149 =", "- m.b606 >= 0) m.c1408 = Constraint(expr= m.b598 + m.b601", "m.c923 = Constraint(expr= 5*m.b686 + m.x776 == 0) m.c924 =", "m.c1387 = Constraint(expr= - m.b607 + m.b619 >= 0) m.c1388", "= Constraint(expr= m.x181 - m.x538 - m.x541 == 0) m.c770", "- m.b672 <= 0) m.c1089 = Constraint(expr= m.b671 - m.b673", "= Constraint(expr= m.b695 + m.b697 <= 1) m.c1126 = Constraint(expr=", "9*m.b624 <= 9) m.c337 = Constraint(expr= m.x355 + 9*m.b625 <=", "0.940066550763924*m.b665 <= 0.940066550763924) m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <=", "= Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553) m.c489 = Constraint(expr=", "0.999*m.b678)))*(0.001 + 0.999*m.b678) <= 0) m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679)", "+ m.b706 <= 1) m.c1145 = Constraint(expr= m.b707 + m.b708", "2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752 -", "+ 0.572481933717686*m.b637 <= 0.572481933717686) m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) -", "m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782 = Var(within=Reals,bounds=(None,None),initialize=0) m.x783 = Var(within=Reals,bounds=(None,None),initialize=0) m.x784", "= Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431) m.c80 = Constraint(expr=(m.x230/(0.001", "m.x30 - m.x33 == 0) m.c13 = Constraint(expr= m.x25 -", "<= 1.18887736200171) m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171)", "m.x332 == 0) m.c243 = Constraint(expr= m.x66 - m.x327 -", "+ 30*m.b668 <= 30) m.c780 = Constraint(expr= m.x516 + 30*m.b669", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c440 = Constraint(expr= m.x431 == 0) m.c441 =", "m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565", "+ 1.32154609891348*m.b614 <= 1.32154609891348) m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615", "<= 1) m.c1120 = Constraint(expr= m.b693 + m.b694 <= 1)", "+ m.x843 == 0) m.c991 = Constraint(expr= 4*m.b754 + m.x844", "Constraint(expr= 6*m.b692 + m.x782 == 0) m.c930 = Constraint(expr= 9*m.b693", "m.x440 == 0) m.c504 = Constraint(expr= - m.x387 + m.x441", "<= 1) m.c1250 = Constraint(expr= m.b759 + m.b760 <= 1)", "3.04984759446376*m.b648 <= 0) m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <=", "m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147", "- m.b607 >= 0) m.c1409 = Constraint(expr= m.b596 + m.b599", "4*m.b754 + m.x844 == 0) m.c992 = Constraint(expr= 2*m.b755 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c1044 = Constraint(expr= m.b626 - m.b628 <= 0) m.c1045 =", "<= 0) m.c1065 = Constraint(expr= m.b647 - m.b649 <= 0)", "m.x232 - m.x235 == 0) m.c95 = Constraint(expr= m.x218 -", "+ m.b619 - m.b709 <= 0) m.c1307 = Constraint(expr= m.b620", "- m.b621 + m.b639 >= 0) m.c1396 = Constraint(expr= -", "m.x848 == 0) m.c996 = Constraint(expr= 6*m.b759 + m.x849 ==", "Constraint(expr= m.b686 + m.b688 <= 1) m.c1108 = Constraint(expr= m.b687", "m.b743 + m.b744 <= 1) m.c1218 = Constraint(expr= m.b743 +", "- 9*m.b643 <= 0) m.c521 = Constraint(expr= m.x389 + 9*m.b641", "m.c1128 = Constraint(expr= m.b698 + m.b700 <= 1) m.c1129 =", "- m.b656 - m.b657 + m.b658 - m.b748 <= 0)", "= Constraint(expr= m.b620 - m.b710 <= 0) m.c1308 = Constraint(expr=", "m.x350 - m.x353 == 0) m.c324 = Constraint(expr= m.x75 -", "= Constraint(expr= 4*m.b715 + m.x805 == 0) m.c953 = Constraint(expr=", "+ m.b679 >= 0) m.c1466 = Constraint(expr= m.b653 - m.b656", "m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677) <= 0) m.c843 = Constraint(expr=(m.x579/(0.001", "m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166", "+ m.x835 == 0) m.c983 = Constraint(expr= 2*m.b746 + m.x836", "Constraint(expr= m.b720 + m.b721 <= 1) m.c1173 = Constraint(expr= m.b719", "- m.b674 - m.b675 + m.b676 - m.b766 <= 0)", "m.x257 == 0) m.c171 = Constraint(expr= m.x258 == 0) m.c172", "m.c1396 = Constraint(expr= - m.b622 + m.b640 >= 0) m.c1397", "Constraint(expr= m.x212 - 40*m.b596 <= 0) m.c69 = Constraint(expr= m.x213", "m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480", "- m.x404 - m.x407 == 0) m.c594 = Constraint(expr= m.x105", "= Constraint(expr= - m.b608 + m.b609 - m.b699 <= 0)", "Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0) m.c833 = Constraint(expr= m.x551", "m.x251 == 0) m.c177 = Constraint(expr= m.x33 - m.x249 -", "m.b737 + m.b738 <= 1) m.c1206 = Constraint(expr= m.b737 +", "m.x3 = Var(within=Reals,bounds=(0,40),initialize=0) m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6", "m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59", "0) m.c874 = Constraint(expr= m.x559 == 0) m.c875 = Constraint(expr=", "m.x407 == 0) m.c594 = Constraint(expr= m.x105 - m.x405 -", "m.x174 - m.x519 - m.x522 == 0) m.c706 = Constraint(expr=", "m.b625 + m.b643 + m.b646 >= 0) m.c1400 = Constraint(expr=", "0) m.c29 = Constraint(expr= m.x77 - m.x101 - m.x104 -", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 =", "- m.x452 - m.x455 == 0) m.c570 = Constraint(expr= m.x129", "0) m.c1317 = Constraint(expr= - m.b629 + m.b630 - m.b720", "+ m.b601 - m.b604 >= 0) m.c1406 = Constraint(expr= m.b596", "Constraint(expr= m.x165 - m.x501 - m.x504 == 0) m.c730 =", "- m.b672 + m.b673 - m.b763 <= 0) m.c1361 =", "= Constraint(expr= m.b690 + m.b691 <= 1) m.c1115 = Constraint(expr=", "- 5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112 + 15*m.x113", "m.b620 + m.b621 - m.b711 <= 0) m.c1309 = Constraint(expr=", "<= 1) m.c1109 = Constraint(expr= m.b689 + m.b690 <= 1)", "0) m.c1034 = Constraint(expr= m.b617 - m.b618 <= 0) m.c1035", "m.c1483 = Constraint(expr= m.b670 - m.b682 >= 0) m.c1484 =", "+ m.b741 <= 1) m.c1214 = Constraint(expr= m.b741 + m.b742", "= Constraint(expr= m.x163 - m.x496 - m.x499 == 0) m.c704", "m.x128 - m.x452 - m.x455 == 0) m.c570 = Constraint(expr=", "4*m.b735 + m.x825 == 0) m.c973 = Constraint(expr= 3*m.b736 +", "0.9*m.x319 + m.x418 == 0) m.c368 = Constraint(expr= - m.x374", "Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171) m.c685 = Constraint(expr= m.x481", "<= 0.480234946352917) m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917)", "- m.x506 - m.x509 == 0) m.c762 = Constraint(expr= m.x168", "0) m.c845 = Constraint(expr= m.x533 == 0) m.c846 = Constraint(expr=", "- 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999* m.b655) <=", "m.x462 == 0) m.c598 = Constraint(expr= m.x133 - m.x460 -", "<= 0) m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0)", "5*m.b707 + m.x797 == 0) m.c945 = Constraint(expr= 6*m.b708 +", "Constraint(expr= m.b716 + m.b717 <= 1) m.c1164 = Constraint(expr= m.b716", "m.b668 - m.b758 <= 0) m.c1356 = Constraint(expr= - m.b668", "m.x162 - m.x165 - m.x168 == 0) m.c46 = Constraint(expr=", "= Constraint(expr= 6*m.b692 + m.x782 == 0) m.c930 = Constraint(expr=", "m.c802 = Constraint(expr= m.x199 - m.x568 - m.x571 == 0)", "0) m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0) m.c390", "m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0) m.c574 =", "Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5) m.c894 = Constraint(expr= m.x588", "m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788", "m.x577 == 0) m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <=", "m.b665) <= 0) m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1", "m.x476 - m.x479 == 0) m.c675 = Constraint(expr= m.x144 -", "m.b680 - m.b681 <= 0) m.c1098 = Constraint(expr= m.b680 -", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x789 = Var(within=Reals,bounds=(None,None),initialize=0) m.x790 = Var(within=Reals,bounds=(None,None),initialize=0) m.x791 =", "m.b700 <= 1) m.c1132 = Constraint(expr= m.b699 + m.b700 <=", "m.c1171 = Constraint(expr= m.b719 + m.b720 <= 1) m.c1172 =", "1) m.c1174 = Constraint(expr= m.b720 + m.b721 <= 1) m.c1175", "+ m.b721 <= 1) m.c1175 = Constraint(expr= m.b722 + m.b723", "- m.b663 <= 0) m.c1080 = Constraint(expr= m.b662 - m.b664", "+ m.b616 - m.b706 <= 0) m.c1304 = Constraint(expr= m.b617", "- m.x224 - m.x227 == 0) m.c66 = Constraint(expr= m.x12", "m.c1092 = Constraint(expr= m.b674 - m.b676 <= 0) m.c1093 =", "- m.b726 <= 0) m.c1324 = Constraint(expr= - m.b635 -", "m.c1235 = Constraint(expr= m.b752 + m.b753 <= 1) m.c1236 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 =", "0.705049913072943) m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943) m.c835", "= Constraint(expr= m.b667 - m.b679 >= 0) m.c1481 = Constraint(expr=", "m.b608 - m.b698 <= 0) m.c1296 = Constraint(expr= - m.b608", "m.x4 = Var(within=Reals,bounds=(0,40),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7", "Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.b730 <= 0) m.c1328 = Constraint(expr= m.b641 - m.b731", "= Constraint(expr= - m.b644 - m.b645 + m.b646 - m.b736", "Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c504 = Constraint(expr= - m.x387 + m.x441 ==", "+ m.b774 <= 1) m.c1278 = Constraint(expr= m.b773 + m.b775", "Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x860 = Var(within=Reals,bounds=(None,None),initialize=0) m.x861 = Var(within=Reals,bounds=(None,None),initialize=0) m.x862 =", "- m.b706 <= 0) m.c1304 = Constraint(expr= m.b617 - m.b707", "+ 4.45628648004517*m.b604 <= 4.45628648004517) m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602", "- m.b606 + m.b607 - m.b697 <= 0) m.c1295 =", "0) m.c64 = Constraint(expr= m.x7 - m.x214 - m.x217 ==", "= Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539) m.c393 = Constraint(expr=", "m.b603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606", "Constraint(expr= - m.b659 - m.b660 + m.b661 - m.b751 <=", "m.b621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b624", "- 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999* m.b616) <=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)", "Var(within=Reals,bounds=(None,None),initialize=0) m.x843 = Var(within=Reals,bounds=(None,None),initialize=0) m.x844 = Var(within=Reals,bounds=(None,None),initialize=0) m.x845 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x297 - m.x300 == 0) m.c295 = Constraint(expr= m.x55 -", "m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339", "0) m.c947 = Constraint(expr= 2*m.b710 + m.x800 == 0) m.c948", "m.b664 - m.b676 >= 0) m.c1478 = Constraint(expr= m.b665 -", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x820 = Var(within=Reals,bounds=(None,None),initialize=0) m.x821 = Var(within=Reals,bounds=(None,None),initialize=0) m.x822 =", "<= 3.04984759446376) m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376)", "m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536", "== 0) m.c998 = Constraint(expr= 4*m.b761 + m.x851 == 0)", "<= 0.940066550763924) m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924)", "m.x545 == 0) m.c798 = Constraint(expr= m.x183 - m.x543 -", "m.b607 >= 0) m.c1409 = Constraint(expr= m.b596 + m.b599 -", "- m.b624 + m.b642 + m.b645 >= 0) m.c1399 =", "m.c1414 = Constraint(expr= m.b604 - m.b613 >= 0) m.c1415 =", "Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0) m.c811 = Constraint(expr= m.x568", "m.x206 - m.x584 - m.x587 == 0) m.c882 = Constraint(expr=", "m.x360 == 0) m.c346 = Constraint(expr= m.x361 == 0) m.c347", "0) m.c184 = Constraint(expr= m.x46 - m.x280 - m.x283 ==", "= Constraint(expr= m.b711 + m.b712 <= 1) m.c1157 = Constraint(expr=", "m.b771 <= 1) m.c1274 = Constraint(expr= m.b771 + m.b772 <=", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 =", "m.x513 - m.x516 == 0) m.c766 = Constraint(expr= m.x172 -", "= Constraint(expr= m.x420 == 0) m.c379 = Constraint(expr= m.x421 ==", "m.b632 - m.b722 <= 0) m.c1320 = Constraint(expr= - m.b632", "m.b623 - m.b644 >= 0) m.c1446 = Constraint(expr= m.b624 -", "0) m.c327 = Constraint(expr= m.x303 - 15*m.b624 <= 0) m.c328", "m.c1258 = Constraint(expr= m.b762 + m.b763 <= 1) m.c1259 =", "m.b626 - m.b653 >= 0) m.c1455 = Constraint(expr= m.b627 -", "<= 1) m.c1278 = Constraint(expr= m.b773 + m.b775 <= 1)", "m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539) m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614)", "== 0) m.c38 = Constraint(expr= - m.x146 - m.x149 +", "+ m.b643 - m.b733 <= 0) m.c1331 = Constraint(expr= m.b644", "0) m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171) m.c684", "- m.x302 - m.x305 == 0) m.c321 = Constraint(expr= m.x57", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 =", "m.x154 - m.x157 - m.x160 == 0) m.c44 = Constraint(expr=", "0) m.c355 = Constraint(expr= m.x310 - 15*m.b628 <= 0) m.c356", "m.b722 + m.b723 <= 1) m.c1176 = Constraint(expr= m.b722 +", "m.x170 - m.x171 - m.x172 + 80*m.x194 + 90*m.x195 +", "- m.x245 == 0) m.c144 = Constraint(expr= m.x30 - m.x243", "Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 +", "= Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539) m.c228 = Constraint(expr=", "0) m.c1043 = Constraint(expr= m.b626 - m.b627 <= 0) m.c1044", "m.c1422 = Constraint(expr= m.b609 - m.b621 >= 0) m.c1423 =", "m.x225 - m.x228 == 0) m.c67 = Constraint(expr= m.x13 -", "Constraint(expr= m.x374 - 20*m.b629 <= 0) m.c396 = Constraint(expr= m.x375", "Constraint(expr= m.x150 - m.x489 - m.x492 == 0) m.c679 =", "m.b675 - m.b676 <= 0) m.c1094 = Constraint(expr= m.b677 -", "m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391", "m.x550 - 0.705049913072943*m.b676 <= 0) m.c833 = Constraint(expr= m.x551 +", "<= 1.11894339953103) m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103)", "m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711", "+ 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999*", "m.x185 == 0) m.c48 = Constraint(expr= m.x174 - m.x183 -", "m.b716 + m.b717 <= 1) m.c1164 = Constraint(expr= m.b716 +", "m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427", "m.x418 == 0) m.c371 = Constraint(expr= m.x323 == 0) m.c372", "m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541", "m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581", "= Constraint(expr= m.b597 + m.b600 - m.b606 >= 0) m.c1408", "0) m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001", "m.b743 + m.b745 <= 1) m.c1222 = Constraint(expr= m.b744 +", "Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0) m.c580 = Constraint(expr= m.x454", "1.26558121681553) m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553) m.c454", "<= 0) m.c1088 = Constraint(expr= m.b671 - m.b672 <= 0)", "m.x156 - m.x159 == 0) m.c43 = Constraint(expr= m.x154 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)", "== 0) m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417 ==", "m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48", "= Constraint(expr= m.x63 - m.x315 - m.x321 == 0) m.c217", "Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)", "- m.x487 == 0) m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656", "m.b622 >= 0) m.c1424 = Constraint(expr= m.b608 - m.b623 >=", "- m.x290 - m.x293 == 0) m.c240 = Constraint(expr= m.x51", "<= 0) m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517)", "= Constraint(expr= m.x576 == 0) m.c823 = Constraint(expr= m.x577 ==", "m.x270 - 2.54515263975353*m.b618 <= 0) m.c274 = Constraint(expr= m.x271 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 15*m.b681 <= 15) m.c889 = Constraint(expr= m.x559 + 15*m.b682", "m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924) m.c692 = Constraint(expr= - 0.75*m.x494", "m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542", "Constraint(expr= m.x234 == 0) m.c88 = Constraint(expr= m.x235 == 0)", "- m.x458 - m.x461 == 0) m.c597 = Constraint(expr= m.x132", "+ 0.994083415506506*m.b678 <= 0.994083415506506) m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679", "+ m.b690 <= 1) m.c1110 = Constraint(expr= m.b689 + m.b691", "m.b623 - m.b641 >= 0) m.c1443 = Constraint(expr= m.b624 -", "m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0) m.c708 =", "- m.b704 <= 0) m.c1302 = Constraint(expr= - m.b614 +", "0) m.c1323 = Constraint(expr= - m.b635 + m.b636 - m.b726", "Constraint(expr= m.x217 == 0) m.c59 = Constraint(expr= m.x227 == 0)", "m.x167 == 0) m.c45 = Constraint(expr= m.x159 - m.x162 -", "= Constraint(expr= m.x118 - m.x430 - m.x433 == 0) m.c449", "40*m.x156 + 40*m.x157 - m.x170 - m.x171 - m.x172 +", "- 3.04984759446376*m.b652 <= 0) m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650", "Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)", "+ 0.999*m.b607)))*(0.001 + 0.999* m.b607) <= 0) m.c137 = Constraint(expr=", "Var(within=Reals,bounds=(None,None),initialize=0) m.x832 = Var(within=Reals,bounds=(None,None),initialize=0) m.x833 = Var(within=Reals,bounds=(None,None),initialize=0) m.x834 = Var(within=Reals,bounds=(None,None),initialize=0)", "m.x105 - m.x108 == 0) m.c31 = Constraint(expr= m.x79 -", "m.x190 - m.x193 - m.x196 == 0) m.c53 = Constraint(expr=(m.x224/(0.001", "== 0) m.c267 = Constraint(expr= m.x42 - m.x270 - m.x276", ">= 0) m.c1475 = Constraint(expr= m.b662 - m.b674 >= 0)", "Constraint(expr= m.x63 - m.x315 - m.x321 == 0) m.c217 =", "+ m.b660 >= 0) m.c1459 = Constraint(expr= - m.b655 +", "+ 0.78338879230327*m.b656 <= 0.78338879230327) m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657", "Constraint(expr= m.x38 - m.x260 - m.x263 == 0) m.c120 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 =", "Var(within=Binary,bounds=(0,1),initialize=0) m.b630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b632 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c1099 = Constraint(expr= m.b681 - m.b682 <= 0) m.c1100", "= Constraint(expr= m.x259 + 30*m.b610 <= 30) m.c197 = Constraint(expr=", "m.x784 = Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787", "log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634) <= 0) m.c410", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 =", "Constraint(expr= m.b613 - m.b631 >= 0) m.c1433 = Constraint(expr= m.b614", "Constraint(expr= m.x79 - m.x358 - m.x361 == 0) m.c353 =", "m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388) m.c133 =", "0) m.c1415 = Constraint(expr= m.b602 - m.b614 >= 0) m.c1416", "0.940066550763924*m.b662 <= 0.940066550763924) m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <=", "m.x378 == 0) m.c376 = Constraint(expr= m.x379 == 0) m.c377", "Var(within=Reals,bounds=(None,None),initialize=0) m.x865 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= - m.x2 -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 =", "m.b638) <= 0) m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1", "+ 0.940066550763924*m.b665 <= 0.940066550763924) m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666", "<= 0) m.c1086 = Constraint(expr= m.b668 - m.b670 <= 0)", "= Constraint(expr= m.b728 + m.b730 <= 1) m.c1192 = Constraint(expr=", "Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 +", "m.x556 - m.x559 == 0) m.c881 = Constraint(expr= m.x206 -", "m.x252 == 0) m.c169 = Constraint(expr= m.x253 == 0) m.c170", "m.x138 == 0) m.c34 = Constraint(expr= m.x136 - m.x139 ==", "m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86", "m.x444 == 0) m.c511 = Constraint(expr= m.x445 == 0) m.c512", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 =", "== 0) m.c847 = Constraint(expr= m.x535 == 0) m.c848 =", "- m.b743 - 4*m.b744 - m.b745 - 2*m.b746 - 5*m.b747", "0) m.c954 = Constraint(expr= 9*m.b717 + m.x807 == 0) m.c955", "Constraint(expr= m.x352 - 9*m.b625 <= 0) m.c335 = Constraint(expr= m.x353", "+ m.x820 == 0) m.c968 = Constraint(expr= 2*m.b731 + m.x821", "Constraint(expr= m.b668 - m.b670 <= 0) m.c1087 = Constraint(expr= m.b669", "= Constraint(expr= m.b752 + m.b753 <= 1) m.c1238 = Constraint(expr=", "m.b654 - m.b744 <= 0) m.c1342 = Constraint(expr= - m.b653", "- 2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750", "Constraint(expr= - m.x249 + m.x279 == 0) m.c163 = Constraint(expr=", "== 0) m.c116 = Constraint(expr= m.x26 - m.x236 - m.x239", "- m.x420 == 0) m.c388 = Constraint(expr= m.x112 - m.x418", "m.x437 == 0) m.c483 = Constraint(expr= m.x120 - m.x435 -", "m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374", "Constraint(expr= m.b670 - m.b682 >= 0) m.c1484 = Constraint(expr= m.b668", "m.x69 - m.x339 - m.x342 == 0) m.c271 = Constraint(expr=", "<= 1.18887736200171) m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0)", "- m.x542 - m.x545 == 0) m.c798 = Constraint(expr= m.x183", "+ 2.30162356062425*m.b640 <= 2.30162356062425) m.c503 = Constraint(expr= - m.x386 +", "m.x403 == 0) m.c563 = Constraint(expr= m.x455 == 0) m.c564", "m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69", "m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924) m.c739 = Constraint(expr= m.x505 +", "Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.b692 + m.b694 <= 1) m.c1120 = Constraint(expr= m.b693 +", "<= 0) m.c1346 = Constraint(expr= m.b659 - m.b749 <= 0)", "0) m.c142 = Constraint(expr= m.x274 == 0) m.c143 = Constraint(expr=", "m.x779 = Var(within=Reals,bounds=(None,None),initialize=0) m.x780 = Var(within=Reals,bounds=(None,None),initialize=0) m.x781 = Var(within=Reals,bounds=(None,None),initialize=0) m.x782", "m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524", "<= 1) m.c1262 = Constraint(expr= m.b765 + m.b766 <= 1)", "== 0) m.c761 = Constraint(expr= m.x167 - m.x506 - m.x509", "0) m.c1384 = Constraint(expr= - m.b616 + m.b634 >= 0)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316", "m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412", "<= 0) m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <= 0)", "m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101", "Constraint(expr= m.x414 == 0) m.c616 = Constraint(expr= m.x415 == 0)", "== 0) m.c852 = Constraint(expr= m.x177 - m.x528 - m.x534", "Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 +", "0) m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <= 0) m.c307", "m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28", "Constraint(expr= m.x396 + 9*m.b645 <= 9) m.c550 = Constraint(expr= m.x397", "- m.x281 == 0) m.c183 = Constraint(expr= m.x45 - m.x279", "== 0) m.c420 = Constraint(expr= m.x114 - m.x423 - m.x426", "- 0.842233385663186*m.b633 <= 0) m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634", "m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506) m.c746 =", "m.c235 = Constraint(expr= m.x295 == 0) m.c236 = Constraint(expr= m.x332", "m.x313 == 0) m.c350 = Constraint(expr= m.x77 - m.x356 -", "m.x15 - m.x231 - m.x234 == 0) m.c94 = Constraint(expr=", "<= 1) m.c1204 = Constraint(expr= m.b735 + m.b736 <= 1)", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x785 = Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787 =", "= Var(within=Reals,bounds=(None,None),initialize=0) m.x786 = Var(within=Reals,bounds=(None,None),initialize=0) m.x787 = Var(within=Reals,bounds=(None,None),initialize=0) m.x788 =", "m.c7 = Constraint(expr= - m.x13 - m.x16 + m.x19 ==", "m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8", "m.x89 = Var(within=Reals,bounds=(0,20),initialize=0) m.x90 = Var(within=Reals,bounds=(0,20),initialize=0) m.x91 = Var(within=Reals,bounds=(0,20),initialize=0) m.x92", "- 1.18887736200171*m.b654 <= 0) m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655", "+ m.x94 == 0) m.c26 = Constraint(expr= m.x74 - m.x95", "0) m.c1306 = Constraint(expr= - m.b617 - m.b618 + m.b619", "m.c1480 = Constraint(expr= m.b667 - m.b679 >= 0) m.c1481 =", "8*m.b729 + m.x819 == 0) m.c967 = Constraint(expr= m.b730 +", "m.c904 = Constraint(expr= m.x595 == 0) m.c905 = Constraint(expr= m.x191", "m.x807 = Var(within=Reals,bounds=(None,None),initialize=0) m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810", "m.x80 - m.x83 == 0) m.c21 = Constraint(expr= m.x69 -", "m.x355 == 0) m.c320 = Constraint(expr= m.x56 - m.x302 -", "m.c1159 = Constraint(expr= m.b713 + m.b714 <= 1) m.c1160 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= - m.b611 - m.b612 + m.b613 - m.b703 <=", "m.c1226 = Constraint(expr= m.b747 + m.b748 <= 1) m.c1227 =", "0.940066550763924) m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924) m.c712", "Constraint(expr= m.b707 + m.b709 <= 1) m.c1147 = Constraint(expr= m.b707", "Constraint(expr= m.x87 - m.x375 - m.x378 == 0) m.c385 =", "m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5) m.c311 =", "m.x268 - 2.54515263975353*m.b607 <= 0) m.c158 = Constraint(expr= m.x272 +", "m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145", "Constraint(expr= m.x64 - m.x316 - m.x322 == 0) m.c218 =", "- 0.9*m.x297 + m.x345 == 0) m.c286 = Constraint(expr= -", "Constraint(expr= m.x323 == 0) m.c372 = Constraint(expr= m.x324 == 0)", "<= 0) m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 +", "<= 3.04984759446376) m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376)", "<= 0) m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0)", "Constraint(expr= m.b618 - m.b636 >= 0) m.c1438 = Constraint(expr= m.b619", "Constraint(expr= m.b623 - m.b624 <= 0) m.c1041 = Constraint(expr= m.b623", "m.c848 = Constraint(expr= m.x581 == 0) m.c849 = Constraint(expr= m.x582", "m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198", "<= 4.45628648004517) m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517)", "m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158", "Constraint(expr= - 0.5*m.x256 + m.x280 == 0) m.c167 = Constraint(expr=", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 =", "= Constraint(expr= m.x132 - m.x459 - m.x462 == 0) m.c598", "0.999* m.b661) <= 0) m.c668 = Constraint(expr= m.x479 == 0)", "Constraint(expr= m.b695 + m.b696 <= 1) m.c1124 = Constraint(expr= m.b696", "<= 1) m.c1169 = Constraint(expr= m.b719 + m.b720 <= 1)", "m.x36 - m.x255 - m.x258 == 0) m.c181 = Constraint(expr=", "Constraint(expr= m.b761 + m.b762 <= 1) m.c1254 = Constraint(expr= m.b761", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b699 = Var(within=Binary,bounds=(0,1),initialize=0) m.b700 = Var(within=Binary,bounds=(0,1),initialize=0) m.b701 =", "+ 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999*", "Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0) m.c253 = Constraint(expr= m.x328", "Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.c406 = Constraint(expr= m.x421 + 20*m.b631 <= 20) m.c407 =", "Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376) m.c632 = Constraint(expr= m.x464", "- m.x357 - m.x360 == 0) m.c352 = Constraint(expr= m.x79", "= Constraint(expr= m.b625 - m.b643 >= 0) m.c1445 = Constraint(expr=", "Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)", "m.x808 = Var(within=Reals,bounds=(None,None),initialize=0) m.x809 = Var(within=Reals,bounds=(None,None),initialize=0) m.x810 = Var(within=Reals,bounds=(None,None),initialize=0) m.x811", "0) m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592 == 0)", "Constraint(expr= m.b617 - m.b707 <= 0) m.c1305 = Constraint(expr= -", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 =", "0) m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607 =", "Constraint(expr= m.x299 == 0) m.c288 = Constraint(expr= m.x300 == 0)", "Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0) m.c488 = Constraint(expr= m.x371", "m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 =", "Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x140 - m.x470 - m.x473 == 0) m.c648", "m.x595 + 9*m.b685 <= 9) m.c923 = Constraint(expr= 5*m.b686 +", "== 0) m.c931 = Constraint(expr= 4*m.b694 + m.x784 == 0)", "Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672) m.c868 = Constraint(expr= m.x583", "m.c346 = Constraint(expr= m.x361 == 0) m.c347 = Constraint(expr= m.x59", "= Var(within=Binary,bounds=(0,1),initialize=0) m.b616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b618 =", "0.999*m.b660)))*(0.001 + 0.999* m.b660) <= 0) m.c667 = Constraint(expr=(m.x490/(0.001 +", "+ 0.999*m.b666)))*(0.001 + 0.999* m.b666) <= 0) m.c721 = Constraint(expr=(m.x526/(0.001", "- 0.6*m.x304 + m.x352 == 0) m.c314 = Constraint(expr= m.x305", "= Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0) m.c714 = Constraint(expr=", "+ m.b730 <= 1) m.c1192 = Constraint(expr= m.b729 + m.b730", "m.c913 = Constraint(expr= m.x562 - 15*m.b685 <= 0) m.c914 =", "m.b668 - m.b669 + m.b670 - m.b760 <= 0) m.c1358", "m.x844 == 0) m.c992 = Constraint(expr= 2*m.b755 + m.x845 ==", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 =", "m.x67 - m.x331 - m.x337 == 0) m.c419 = Constraint(expr=", "m.b685 <= 0) m.c1103 = Constraint(expr= m.b686 + m.b687 <=", "m.b748 <= 1) m.c1225 = Constraint(expr= m.b746 + m.b747 <=", "1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999* m.b606) <= 0)", "m.x153 - m.x156 - m.x159 == 0) m.c43 = Constraint(expr=", "m.c950 = Constraint(expr= 4*m.b713 + m.x803 == 0) m.c951 =", "+ 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999*", "m.b666) <= 0) m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1", "<= 0) m.c98 = Constraint(expr= m.x221 + 40*m.b599 <= 40)", "m.x347 == 0) m.c297 = Constraint(expr= m.x72 - m.x345 -", "m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351 == 0) m.c313", "= Constraint(expr= - 0.9*m.x317 + m.x416 == 0) m.c366 =", "= Constraint(expr= m.x374 - 20*m.b629 <= 0) m.c396 = Constraint(expr=", "= Constraint(expr= m.b731 + m.b732 <= 1) m.c1196 = Constraint(expr=", "+ 0.705049913072943*m.b663 <= 0.705049913072943) m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 =", "= Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924) m.c738 = Constraint(expr=", "<= 0) m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0)", "0.994083415506506) m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0) m.c864", "m.x416 == 0) m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417", "9*m.b645 <= 0) m.c547 = Constraint(expr= m.x394 - 9*m.b646 <=", "m.b606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b609", "0) m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0) m.c607", "- m.x347 == 0) m.c297 = Constraint(expr= m.x72 - m.x345", "m.c1266 = Constraint(expr= m.b767 + m.b769 <= 1) m.c1267 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 =", "= Constraint(expr= 2*m.b710 + m.x800 == 0) m.c948 = Constraint(expr=", "0) m.c696 = Constraint(expr= m.x498 == 0) m.c697 = Constraint(expr=", "= Constraint(expr= m.b611 - m.b629 >= 0) m.c1431 = Constraint(expr=", "m.x583 == 0) m.c851 = Constraint(expr= m.x176 - m.x527 -", "m.x271 - m.x277 == 0) m.c269 = Constraint(expr= m.x68 -", "m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0) m.c450 =", "m.b599 == 1) m.c1374 = Constraint(expr= m.b597 + m.b600 ==", "== 0) m.c704 = Constraint(expr= m.x173 - m.x518 - m.x521", "m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81", "- m.x53 - m.x56 - m.x59 == 0) m.c18 =", "- m.b736 <= 0) m.c1334 = Constraint(expr= m.b647 - m.b737", "- 0.9*m.x556 + m.x586 == 0) m.c872 = Constraint(expr= m.x557", "- 30*m.b609 <= 0) m.c193 = Constraint(expr= m.x256 - 30*m.b610", "1.26558121681553*m.b640 <= 1.26558121681553) m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <=", "m.x280 - m.x283 == 0) m.c185 = Constraint(expr= m.x248 -", "Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)", "m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503", "m.x290 - m.x293 == 0) m.c240 = Constraint(expr= m.x51 -", "0.940066550763924*m.b659 <= 0) m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <=", "m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943) m.c835 = Constraint(expr= m.x553 +", "Constraint(expr= m.b665 - m.b667 <= 0) m.c1084 = Constraint(expr= m.b666", "m.x24 == 0) m.c10 = Constraint(expr= m.x19 - m.x22 -", "Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943) m.c835 = Constraint(expr= m.x553", "Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0) m.c221 = Constraint(expr= m.x287", "m.x358 - m.x361 == 0) m.c353 = Constraint(expr= m.x308 -", "15*m.b624 <= 15) m.c331 = Constraint(expr= m.x307 + 15*m.b625 <=", "Constraint(expr= m.x207 - m.x585 - m.x588 == 0) m.c883 =", "m.b651 + m.b652 - m.b742 <= 0) m.c1340 = Constraint(expr=", "m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191", "m.b599 - m.b600 + m.b601 - m.b691 <= 0) m.c1289", "m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237", "0) m.c1012 = Constraint(expr= 4*m.b775 + m.x865 == 0) m.c1013", "<= 0) m.c1043 = Constraint(expr= m.b626 - m.b627 <= 0)", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 =", "m.x287 == 0) m.c207 = Constraint(expr= m.x288 == 0) m.c208", "Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0) m.c273 = Constraint(expr= m.x270", "m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453", "== 0) m.c416 = Constraint(expr= m.x65 - m.x329 - m.x335", "Constraint(expr= m.b656 - m.b657 <= 0) m.c1074 = Constraint(expr= m.b656", "m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278 == 0) m.c165", "m.x376 - m.x379 == 0) m.c386 = Constraint(expr= m.x110 -", "+ 30*m.b670 <= 30) m.c782 = Constraint(expr= m.x536 - 15*m.b668", "m.b681 - m.b771 <= 0) m.c1369 = Constraint(expr= - m.b680", "1) m.c1168 = Constraint(expr= m.b717 + m.b718 <= 1) m.c1169", "Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)", "0) m.c512 = Constraint(expr= m.x95 - m.x386 - m.x389 ==", "== 0) m.c144 = Constraint(expr= m.x30 - m.x243 - m.x246", "Constraint(expr= m.x69 - m.x339 - m.x342 == 0) m.c271 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 =", "<= 1) m.c1161 = Constraint(expr= m.b713 + m.b715 <= 1)", "m.c1137 = Constraint(expr= m.b701 + m.b703 <= 1) m.c1138 =", "= Constraint(expr= m.x521 == 0) m.c699 = Constraint(expr= m.x522 ==", "m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506) m.c745 =", "33.5) m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5) m.c497", "+ m.b658 - m.b748 <= 0) m.c1346 = Constraint(expr= m.b659", "m.b670 = Var(within=Binary,bounds=(0,1),initialize=0) m.b671 = Var(within=Binary,bounds=(0,1),initialize=0) m.b672 = Var(within=Binary,bounds=(0,1),initialize=0) m.b673", "m.b621 - m.b622 <= 0) m.c1040 = Constraint(expr= m.b623 -", "= Constraint(expr= m.b717 + m.b718 <= 1) m.c1167 = Constraint(expr=", "0.480234946352917*m.b674 <= 0) m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <=", "m.c1278 = Constraint(expr= m.b773 + m.b775 <= 1) m.c1279 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 =", "m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001 +", "0) m.c670 = Constraint(expr= m.x481 == 0) m.c671 = Constraint(expr=", "<= 0) m.c1103 = Constraint(expr= m.b686 + m.b687 <= 1)", "== 0) m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520 ==", "Constraint(expr= - m.x13 - m.x16 + m.x19 == 0) m.c8", "m.b704 + m.b705 <= 1) m.c1140 = Constraint(expr= m.b704 +", "0) m.c1405 = Constraint(expr= m.b598 + m.b601 - m.b604 >=", "m.x541 + 15*m.b670 <= 15) m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671)", "Constraint(expr= m.b716 + m.b718 <= 1) m.c1168 = Constraint(expr= m.b717", "m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <= 0) m.c307 =", "0) m.c1378 = Constraint(expr= - m.b604 + m.b613 + m.b616", "3.34221486003388) m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388) m.c134", "Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0)", "Constraint(expr= m.x83 - m.x368 - m.x371 == 0) m.c477 =", "= Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 =", "3.04984759446376) m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376) m.c604", "== 0) m.c928 = Constraint(expr= 6*m.b691 + m.x781 == 0)", "m.x595 == 0) m.c911 = Constraint(expr= m.x560 - 15*m.b683 <=", "m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98", "<= 1) m.c1197 = Constraint(expr= m.b731 + m.b733 <= 1)", "Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)", "= Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0) m.c246 = Constraint(expr=", "= Constraint(expr= m.b644 - m.b734 <= 0) m.c1332 = Constraint(expr=", "Constraint(expr= m.x2 - m.x5 - m.x8 == 0) m.c3 =", "= Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0) m.c458 = Constraint(expr=", "m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199", ">= 0) m.c1474 = Constraint(expr= m.b664 - m.b673 >= 0)", "+ m.x806 == 0) m.c954 = Constraint(expr= 9*m.b717 + m.x807", "0) m.c1381 = Constraint(expr= - m.b613 + m.b631 >= 0)", "== 0) m.c934 = Constraint(expr= 5*m.b697 + m.x787 == 0)", "m.b680 >= 0) m.c1482 = Constraint(expr= m.b669 - m.b681 >=", "m.x501 - m.x504 == 0) m.c730 = Constraint(expr= m.x166 -", "Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0)", "- m.x248 - m.x251 == 0) m.c177 = Constraint(expr= m.x33", "- m.b717 <= 0) m.c1315 = Constraint(expr= - m.b626 -", "m.x7 - m.x214 - m.x217 == 0) m.c65 = Constraint(expr=" ]
[ "Great Resource') logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv", "self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c = self.construct_category() r = self.construct_resource()", "% c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response))", "response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c = self.construct_category() r =", "self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource') rv", "u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv = self.app.get('api/resource/%i' % r.id, content_type=\"application/json\")", "1, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def test_resource_change_log_types(self):", "life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c = self.construct_category() r = self.construct_resource()", "rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r,", "self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"], 'A delightful Resource destined", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self): r = self.construct_resource()", "self.construct_category(name=\"c3\") r = self.construct_resource() rc_data = [ { \"category_id\": c1.id", "self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self): c", "db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers())", "self.assertEqual(response['title'], 'Super Great Resource') logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type,", "r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource() r_id", "and your car.' response['website'] = 'http://sartography.com' orig_date = response['last_updated'] rv", "self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv = self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\")", "% r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource()", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change') self.assertEqual(response['description'],", "and Great\", 'description': \"You need this resource in your life.\"}", "len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv)", "follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv)", "test_delete_resource(self): r = self.construct_resource() r_id = r.id rv = self.app.get('api/resource/%i'", "self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) rv = self.app.delete('api/resource/%i' % r_id,", "= self.construct_resource() rc_data = [ { \"category_id\": c1.id }, {", "need this resource in your life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c", "= self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) rv = self.app.delete('api/resource/%i' %", "rv = self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "{ \"category_id\": c1.id }, { \"category_id\": c2.id }, { \"category_id\":", "% r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource') rv =", "Resource that is Super and Great\", 'description': \"You need this", "content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404,", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\")", "r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv = self.app.get('api/resource/%i'", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at UVA',", "= self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response =", "resource in your life.\", 'organization_name': \"Resource Org\"} rv = self.app.post('api/resource',", "Great\", 'description': \"You need this resource in your life.\"} rv", "'Resource of Resources') self.assertEqual(response['description'], 'You need this resource in your", "\"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self): c = self.construct_category() r =", "% r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id)", "your life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c = self.construct_category() r =", "self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism',", "cr2]) db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers())", "<reponame>sartography/star-drive import unittest from flask import json from tests.base_test import", "{'title': \"Resource of Resources\", 'description': \"You need this resource in", "from app.model.resource import Resource from app.model.resource_category import ResourceCategory from app.model.resource_change_log", "data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def test_remove_category_from_resource(self):", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def", "self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) response =", "self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False)", "= ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i'", "Resource destined to create rejoicing') def test_modify_resource_basics(self): self.construct_resource() r =", "r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data", "Oil Change') self.assertEqual(response['description'], 'Better fluids for you and your car.')", "= self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('api/resource/%i'", "= self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response =", "self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True,", "rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response", "import db, elastic_index from app.model.resource import Resource from app.model.resource_category import", "import Resource from app.model.resource_category import ResourceCategory from app.model.resource_change_log import ResourceChangeLog", "db.session.commit() rv = self.app.get( '/api/resource/%i/category' % r.id, content_type=\"application/json\") self.assert_success(rv) response", "content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"]))", "= 'Edwarardos Lemonade and Oil Change' response['description'] = 'Better fluids", "% c.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def", "is Super and Great\", 'description': \"You need this resource in", "\"category_id\": c2.id }, { \"category_id\": c3.id }, ] rv =", "self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\",", "= self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv = self.app.get('api/resource/%i' %", "self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\",", "test_get_resource_change_log_by_user(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv =", "= self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") r = self.construct_resource() cr =", "def test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") c3 =", "= self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_create_resource(self): resource", "is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and the Arts',", "the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False)", "self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c = self.construct_category() r", "= self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all()", "rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4)", "= r.id rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) rv", "c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id,", "self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self): c = self.construct_category() r", "logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self): r", "= self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'],", "self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv = self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response =", "self.construct_user(email=\"<EMAIL>\", role=Role.admin) r = {'id': 258, 'title': \"A Resource that", "r_id = r.id rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv)", "of Resources\", 'description': \"You need this resource in your life.\",", "test_get_category_by_resource(self): c = self.construct_category() r = self.construct_resource() cr = ResourceCategory(resource=r,", "4) rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "rv = self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at", "rv = self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response =", "{ \"category_id\": c3.id }, ] rv = self.app.post( '/api/resource/%i/category' %", "self.assertEqual(len(response), 2) rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "= db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id rv = self.app.get('/api/resource/%i' %", "'title': \"A Resource that is Super and Great\", 'description': \"You", "[{\"category_id\": c1.id}] rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\")", "response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c = self.construct_category() r = self.construct_resource() rc_data", "response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great Resource' rv =", "test_add_category_to_resource(self): c = self.construct_category() r = self.construct_resource() rc_data = {\"resource_id\":", "data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log' % u.id,", "from tests.base_test import BaseTest from app import db, elastic_index from", "cr = ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get(", "rv = self.app.get( '/api/resource/%i/category' % 1, content_type=\"application/json\") self.assert_success(rv) response =", "self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self): c =", "json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change') self.assertEqual(response['description'], 'Better fluids", "self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv =", "ResourceChangeLog from app.model.user import Role class TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self):", "ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr, cr2]) db.session.commit() rv = self.app.get( '/api/category/%i/resource'", "rc_data = [ { \"category_id\": c1.id }, { \"category_id\": c2.id", "delightful Resource destined to create rejoicing') def test_modify_resource_basics(self): self.construct_resource() r", "json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) r", "r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response", "content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"])", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response =", "'/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3,", "r_id = r.id rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") response", "self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv = self.app.get('api/resource/%i' % r.id,", "r = self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit()", "Resource') self.assertEqual(response[\"description\"], 'A delightful Resource destined to create rejoicing') def", "that is Super and Great\", 'description': \"You need this resource", "app.model.resource_change_log import ResourceChangeLog from app.model.user import Role class TestResources(BaseTest, unittest.TestCase):", "self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def", "this resource in your life.\", 'organization_name': \"Resource Org\"} rv =", "= self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') cr2 = ResourceCategory(resource=r,", "self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\")", "2) rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "= self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource')", "self.assertEqual(response['description'], 'You need this resource in your life.') self.assertIsNotNone(response['id']) def", "self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\") c2", "r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_create_resource(self): resource = {'title': \"Resource", "UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and the", "ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self): r = self.construct_resource()", "r.id rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r)", "c3.id }, ] rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data),", "def test_create_resource(self): resource = {'title': \"Resource of Resources\", 'description': \"You", "Oil Change' response['description'] = 'Better fluids for you and your", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self): r =", "r = self.construct_resource() r_id = r.id rv = self.app.get('api/resource/%i' %", "import unittest from flask import json from tests.base_test import BaseTest", "self.construct_resource() r_id = r.id rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\")", "rv = self.app.get( '/api/resource/%i/category' % r.id, content_type=\"application/json\") self.assert_success(rv) response =", "self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv =", "is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv = self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response", "self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self): c = self.construct_category() r = self.construct_resource()", "unittest from flask import json from tests.base_test import BaseTest from", "len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\")", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response =", "self.assertEqual(404, rv.status_code) def test_create_resource(self): resource = {'title': \"Resource of Resources\",", "'/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id,", "self.assertEqual(response[\"description\"], 'A delightful Resource destined to create rejoicing') def test_modify_resource_basics(self):", "this resource in your life.\"} rv = self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\",", "for you and your car.' response['website'] = 'http://sartography.com' orig_date =", "self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self): r = self.construct_resource() r_id", "self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('api/resource/%i' %", "def test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource() r_id = r.id rv =", "db.session.add(cr) db.session.commit() rv = self.app.get( '/api/resource/%i/category' % r.id, content_type=\"application/json\") self.assert_success(rv)", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource()", "self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'],", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response", "rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1)", "'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism',", "= 'Better fluids for you and your car.' response['website'] =", "c = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") r = self.construct_resource() cr", "test_get_resource_by_category(self): c = self.construct_category() r = self.construct_resource() cr = ResourceCategory(resource=r,", "1) self.assert_success(rv) rv = self.app.get( '/api/resource/%i/category' % 1, content_type=\"application/json\") self.assert_success(rv)", "'organization_name': \"Resource Org\"} rv = self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers())", "headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv)", "self.assert_success(rv) rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv", "'/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1,", "{'id': 258, 'title': \"A Resource that is Super and Great\",", "r.id, \"category_id\": c.id} rv = self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv)", "% r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i'", "rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response =", "self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv = self.app.get('api/resource/education',", "tests.base_test import BaseTest from app import db, elastic_index from app.model.resource", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data = [{\"category_id\": c1.id}] rv", "content_type=\"application/json\") self.assert_success(rv) rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv)", "self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self): r = self.construct_resource() u =", "self.app.get('/api/resource/%i' % r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"],", "self.assertIsNotNone(r) r_id = r.id rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\")", "'Better fluids for you and your car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date,", "category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/resource/%i/category' % r.id,", "= self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title']", "self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') cr2 = ResourceCategory(resource=r, category=c2,", "rv = self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "Lemonade and Oil Change') self.assertEqual(response['description'], 'Better fluids for you and", "= 'Super Great Resource' rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response),", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources'])", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response =", "rv = self.app.get( '/api/category/%i' % c.id, content_type=\"application/json\") self.assert_success(rv) response =", "% r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great", "self.app.get( '/api/resource/%i/category' % 1, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0,", "follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv)", "{\"resource_id\": r.id, \"category_id\": c.id} rv = self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\")", "self.assertEqual(3, len(response)) rc_data = [{\"category_id\": c1.id}] rv = self.app.post( '/api/resource/%i/category'", "'/api/resource/%i/category' % 1, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response))", "self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv =", "self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True)", "json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data = [{\"category_id\": c1.id}] rv = self.app.post(", "self.app.get( '/api/category/%i' % c.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1,", "% r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super", "'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self): r = self.construct_resource() r_id =", "r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log' %", "follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of Resources')", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great Resource'", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource', content_type=\"application/json\") self.assert_success(rv) response =", "ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i/resource' %", "Resources\", 'description': \"You need this resource in your life.\", 'organization_name':", "response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\") c2 =", "= self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv =", "\"Resource Org\"} rv = self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv)", "json from tests.base_test import BaseTest from app import db, elastic_index", "= self.construct_resource() rc_data = {\"resource_id\": r.id, \"category_id\": c.id} rv =", "to create rejoicing') def test_modify_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r)", "% r_id, content_type=\"application/json\") self.assert_success(rv) rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\",", "from app import db, elastic_index from app.model.resource import Resource from", "response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos Lemonade and Oil Change'", "fluids for you and your car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated'])", "self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment',", "c3 = self.construct_category(name=\"c3\") r = self.construct_resource() rc_data = [ {", "u.id) def test_get_resource_change_log_by_user(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin)", "test_modify_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id rv", "= 'http://sartography.com' orig_date = response['last_updated'] rv = self.app.put('/api/resource/%i' % r_id,", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data = [{\"category_id\": c1.id}]", "r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great", "'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv)", "= self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id,", "in your life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c = self.construct_category() r", "= self.construct_category() r = self.construct_resource() rc_data = {\"resource_id\": r.id, \"category_id\":", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def test_resource_change_log_types(self): u", "data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"])", "r = {'id': 258, 'title': \"A Resource that is Super", "title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv = self.app.get('api/resource/education', content_type=\"application/json\")", "your car.' response['website'] = 'http://sartography.com' orig_date = response['last_updated'] rv =", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response", "test_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id rv", "r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i' %", "= self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\") r =", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource/covid19/Supports_with_Living',", "= self.construct_category() r = self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource')", "r_id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade and", "'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "response['last_updated'] rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers())", "self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv = self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\",", "follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+", "% r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response))", "resource in your life.\"} rv = self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True,", "json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"], 'A delightful Resource", "response['last_updated']) def test_delete_resource(self): r = self.construct_resource() r_id = r.id rv", "self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id)", "response['description'] = 'Better fluids for you and your car.' response['website']", "self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id)", "r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log' %", "rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1)", "= self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'],", "json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great Resource' rv = self.app.put('/api/resource/%i' %", "app.model.resource import Resource from app.model.resource_category import ResourceCategory from app.model.resource_change_log import", "rv = self.app.get('/api/resource/%i' % r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response =", "self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c = self.construct_category() r = self.construct_resource() cr", "content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource() r_id =", "= self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\",", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource', content_type=\"application/json\") self.assert_success(rv) response", "ResourceCategory from app.model.resource_change_log import ResourceChangeLog from app.model.user import Role class", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource', content_type=\"application/json\")", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great Resource' rv", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response =", "def test_get_category_by_resource(self): c = self.construct_category() r = self.construct_resource() cr =", "self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv = self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\", headers=self.logged_in_headers())", "import json from tests.base_test import BaseTest from app import db,", "self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\")", "self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] =", "elastic_index from app.model.resource import Resource from app.model.resource_category import ResourceCategory from", "BaseTest from app import db, elastic_index from app.model.resource import Resource", "unittest.TestCase): def test_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id =", "self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv = self.app.get('api/resource/%i' % r.id, content_type=\"application/json\") self.assert_success(rv) response", "json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def", "rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self):", "self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\") r = self.construct_resource()", "is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One',", "from flask import json from tests.base_test import BaseTest from app", "response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self): c = self.construct_category() r = self.construct_resource()", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self): c = self.construct_category() r", "% c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"])", "c = self.construct_category() r = self.construct_resource() rc_data = {\"resource_id\": r.id,", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv)", "response['title'] = 'Edwarardos Lemonade and Oil Change' response['description'] = 'Better", "cr = ResourceCategory(resource=r, category=c, type='resource') cr2 = ResourceCategory(resource=r, category=c2, type='resource')", "= json.loads(rv.get_data(as_text=True)) response['title'] = 'Super Great Resource' rv = self.app.put('/api/resource/%i'", "Resource' rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u))", "= self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv", "category=c2, type='resource') db.session.add_all([cr, cr2]) db.session.commit() rv = self.app.get( '/api/category/%i/resource' %", "rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c", "r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type,", "def test_delete_resource(self): r = self.construct_resource() r_id = r.id rv =", "self.app.get('api/resource/%i' % r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] =", "import Role class TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self): self.construct_resource() r =", "type='resource') db.session.add_all([cr, cr2]) db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id,", "\"category_id\": c3.id }, ] rv = self.app.post( '/api/resource/%i/category' % r.id,", "self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv =", "type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/resource/%i/category' % r.id, content_type=\"application/json\")", "}, { \"category_id\": c2.id }, { \"category_id\": c3.id }, ]", "test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource() r_id = r.id rv = self.app.get('api/resource/%i'", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"],", "= self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True)) response['title'] =", "self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource() r_id = r.id", "follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\", headers=self.logged_in_headers())", "= r.id rv = self.app.get('/api/resource/%i' % r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv)", "u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) r = {'id': 258, 'title': \"A", "c1 = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\") r", "category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id,", "need this resource in your life.\", 'organization_name': \"Resource Org\"} rv", "= {\"resource_id\": r.id, \"category_id\": c.id} rv = self.app.post( '/api/resource_category', data=self.jsonify(rc_data),", "rc_data = [{\"category_id\": c1.id}] rv = self.app.post( '/api/resource/%i/category' % r.id,", "'Edu-tainment', 'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "% r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id)", "type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\",", "Great Resource' rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True,", "in your life.\"} rv = self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers())", "db, elastic_index from app.model.resource import Resource from app.model.resource_category import ResourceCategory", "'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment',", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response", "= ResourceCategory(resource=r, category=c, type='resource') cr2 = ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr,", "test_create_resource(self): resource = {'title': \"Resource of Resources\", 'description': \"You need", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True)", "self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c =", "r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great Resource')", "'description': \"You need this resource in your life.\", 'organization_name': \"Resource", "'create') rv = self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response =", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"]", "Resource' rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u))", "orig_date = response['last_updated'] rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type=\"application/json\",", "title='Two', is_uva_education_content=False) rv = self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name,", "rv = self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "self.assertIsNotNone(r) r_id = r.id rv = self.app.get('/api/resource/%i' % r_id, follow_redirects=True,", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great Resource') logs", "r_id) self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"], 'A delightful Resource destined to", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self):", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment',", "import BaseTest from app import db, elastic_index from app.model.resource import", "self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self):", "self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response =", "self.assertEqual(len(response), 4) rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "= ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv = self.app.get('api/resource/%i' %", "type='resource') cr2 = ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr, cr2]) db.session.commit() rv", "db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i' % c.id, content_type=\"application/json\") self.assert_success(rv)", "}, ] rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\")", "rv = self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs =", "and Oil Change' response['description'] = 'Better fluids for you and", "self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_create_resource(self): resource =", "resource in your life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c = self.construct_category()", "test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) r = {'id': 258, 'title':", "self.assert_success(rv) rv = self.app.get( '/api/resource/%i/category' % 1, content_type=\"application/json\") self.assert_success(rv) response", "u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def", "app.model.resource_category import ResourceCategory from app.model.resource_change_log import ResourceChangeLog from app.model.user import", "self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv = self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv)", "db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id rv = self.app.get('/api/resource/%i' % r_id,", "self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"])", "self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource') rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\",", "c = self.construct_category() r = self.construct_resource() cr = ResourceCategory(resource=r, category=c,", "response[\"resource_count\"]) def test_get_category_by_resource(self): c = self.construct_category() r = self.construct_resource() cr", "258, 'title': \"A Resource that is Super and Great\", 'description':", "r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv)", "self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self): r = self.construct_resource() r_id = r.id", "content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete')", "c2 = self.construct_category(name=\"c2\") r = self.construct_resource() cr = ResourceCategory(resource=r, category=c,", "and the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two',", "'description': \"You need this resource in your life.\"} rv =", "'Edwarardos Lemonade and Oil Change') self.assertEqual(response['description'], 'Better fluids for you", "1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def test_is_uva_education_content(self):", "= {'title': \"Resource of Resources\", 'description': \"You need this resource", "}, { \"category_id\": c3.id }, ] rv = self.app.post( '/api/resource/%i/category'", "= response['last_updated'] rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True,", "r = self.construct_resource() rc_data = {\"resource_id\": r.id, \"category_id\": c.id} rv", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of Resources') self.assertEqual(response['description'], 'You need this", "db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv)", "json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources'])", "self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c =", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv)", "you and your car.' response['website'] = 'http://sartography.com' orig_date = response['last_updated']", "= self.app.get( '/api/category/%i' % c.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv)", "destined to create rejoicing') def test_modify_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first()", "1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy", "logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv = self.app.get('api/resource/%i'", "you and your car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self):", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv = self.app.get('api/resource/covid19/Free_educational_resources',", "r = self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') cr2 =", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\")", "len(response)) rc_data = [{\"category_id\": c1.id}] rv = self.app.post( '/api/resource/%i/category' %", "% 1, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def", "= self.app.get( '/api/resource/%i/category' % 1, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "\"category_id\": c1.id }, { \"category_id\": c2.id }, { \"category_id\": c3.id", "category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i' % c.id,", "c1.id }, { \"category_id\": c2.id }, { \"category_id\": c3.id },", "\"A Resource that is Super and Great\", 'description': \"You need", "json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great Resource') logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id)", "'Super Great Resource') logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit')", "ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv = self.app.delete('api/resource/%i' % r['id'],", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self):", "'/api/category/%i/resource' % c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id,", "car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self): r = self.construct_resource()", "resource = {'title': \"Resource of Resources\", 'description': \"You need this", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\")", "% r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/api/resource/%i'", "response['title'] = 'Super Great Resource' rv = self.app.put('/api/resource/%i' % r.id,", "headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code)", "json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of Resources') self.assertEqual(response['description'], 'You need this resource", "self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\")", "] rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv)", "= r.id rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(),", "'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth'])", "test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True)", "c.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2,", "= self.app.get('/api/resource/%i' % r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "app import db, elastic_index from app.model.resource import Resource from app.model.resource_category", "ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv = self.app.get('api/resource/%i' % r['id'],", "% r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Super", "= ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr, cr2]) db.session.commit() rv = self.app.get(", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1 =", "= self.construct_category(name=\"c2\") r = self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource')", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self): c =", "= ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv = self.app.delete('api/resource/%i' %", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data = [{\"category_id\":", "test_remove_category_from_resource(self): self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv) rv =", "= self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv = self.app.get('api/resource/%i' % r.id, content_type=\"application/json\") self.assert_success(rv)", "json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c", "import ResourceCategory from app.model.resource_change_log import ResourceChangeLog from app.model.user import Role", "self.assertEqual(len(response), 1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False,", "def test_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id", "your car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self): r =", "headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of Resources') self.assertEqual(response['description'],", "headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description,", "content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\",", "for you and your car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def", "in your life.\", 'organization_name': \"Resource Org\"} rv = self.app.post('api/resource', data=self.jsonify(resource),", "ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/resource/%i/category' %", "[\"name\"]) def test_category_resource_count(self): c = self.construct_category() r = self.construct_resource() cr", "rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response =", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource', content_type=\"application/json\") self.assert_success(rv)", "rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1)", "create rejoicing') def test_modify_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id", "data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data =", "life.\"} rv = self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs", "'http://sartography.com' orig_date = response['last_updated'] rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response),", "json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i' %", "self.assertEqual(logs[-1].type, 'edit') rv = self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv)", "'A delightful Resource destined to create rejoicing') def test_modify_resource_basics(self): self.construct_resource()", "life.\", 'organization_name': \"Resource Org\"} rv = self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True,", "Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\")", "def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating',", "'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv =", "self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos", "content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\",", "'Super Great Resource' rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type=\"application/json\",", "content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\")", "TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id", "self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv", "rv = self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv) rv = self.app.get( '/api/resource/%i/category'", "self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv =", "response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\") c2 =", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv =", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great Resource') logs =", "= self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv", "\"Resource of Resources\", 'description': \"You need this resource in your", "r_id, content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource') rv = self.app.delete('api/resource/%i'", "json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos Lemonade and Oil Change' response['description'] =", "from app.model.resource_category import ResourceCategory from app.model.resource_change_log import ResourceChangeLog from app.model.user", "def test_category_resource_count(self): c = self.construct_category() r = self.construct_resource() cr =", "\"category_id\": c.id} rv = self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv)", "self.construct_category() r = self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr)", "self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response", "rv = self.app.get('api/resource', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5)", "rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True)) response['title']", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth',", "r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/api/resource/%i' %", "'/api/category/%i' % c.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"])", "self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") r = self.construct_resource() cr = ResourceCategory(resource=r,", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change') self.assertEqual(response['description'], 'Better", "car.' response['website'] = 'http://sartography.com' orig_date = response['last_updated'] rv = self.app.put('/api/resource/%i'", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"], 'A", "test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism',", "= self.construct_resource() r_id = r.id rv = self.app.get('api/resource/%i' % r_id,", "'Resource') rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv", "[ { \"category_id\": c1.id }, { \"category_id\": c2.id }, {", "% r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id)", "self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r =", "type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i' % c.id, content_type=\"application/json\")", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"])", "resource=r) elastic_index.remove_document(r, 'Resource') rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers())", "r_id = r.id rv = self.app.get('/api/resource/%i' % r_id, follow_redirects=True, content_type=\"application/json\")", "= self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 4) rv", "content_type=\"application/json\") self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource') rv = self.app.delete('api/resource/%i' %", "Role class TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first()", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"])", "Super and Great\", 'description': \"You need this resource in your", "Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv", "rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_create_resource(self):", "self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv) rv = self.app.get(", "self.construct_category(name=\"c2\") r = self.construct_resource() cr = ResourceCategory(resource=r, category=c, type='resource') cr2", "category=c, type='resource') cr2 = ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr, cr2]) db.session.commit()", "= ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i/resource'", "% r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log'", "self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response =", "title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False,", "self.assertEqual(len(response), 5) rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "your life.\"} rv = self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv)", "def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv) rv", "self.assert_success(rv) rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def", "class TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r)", "self.assertEqual(response['description'], 'Better fluids for you and your car.') self.assertEqual(response['website'], 'http://sartography.com')", "'You need this resource in your life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self):", "c2 = self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\") r = self.construct_resource() rc_data", "'delete') def test_get_resource_change_log_by_resource(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin)", "Change') self.assertEqual(response['description'], 'Better fluids for you and your car.') self.assertEqual(response['website'],", "content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of", "self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\") c2", "self.assertEqual(0, len(response)) def test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) r =", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Visual_Aids',", "self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos", "response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") c3", "content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos Lemonade and Oil", "and your car.') self.assertEqual(response['website'], 'http://sartography.com') self.assertNotEqual(orig_date, response['last_updated']) def test_delete_resource(self): r", "self.construct_category() r = self.construct_resource() rc_data = {\"resource_id\": r.id, \"category_id\": c.id}", "headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self): r", "% r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('api/resource/%i' % r_id,", "= self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r", "logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv = self.app.delete('api/resource/%i'", "rejoicing') def test_modify_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id =", "= [{\"category_id\": c1.id}] rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data),", "def test_get_resource_change_log_by_user(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv", "cr2 = ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr, cr2]) db.session.commit() rv =", "db.session.add_all([cr, cr2]) db.session.commit() rv = self.app.get( '/api/category/%i/resource' % c.id, content_type=\"application/json\",", "self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self):", "fluids for you and your car.' response['website'] = 'http://sartography.com' orig_date", "= self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "len(response)) def test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) r = {'id':", "r.id rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True))", "r.id rv = self.app.get('/api/resource/%i' % r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response", "\"You need this resource in your life.\", 'organization_name': \"Resource Org\"}", "rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5)", "= self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv", "% r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log'", "self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super", "% r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response))", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(3, len(response)) rc_data = [{\"category_id\": c1.id}] rv =", "is_uva_education_content=False) rv = self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "rv.status_code) def test_create_resource(self): resource = {'title': \"Resource of Resources\", 'description':", "c.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self):", "test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\")", "rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv)", "headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv)", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self): c = self.construct_category()", "and Oil Change') self.assertEqual(response['description'], 'Better fluids for you and your", "r_id, content_type=\"application/json\") self.assert_success(rv) rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers())", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True, title='Autism", "title='Autism at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\", role=Role.admin)", "One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv = self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv)", "= self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(r.description, response[0][\"resource\"][\"description\"]) def", "def test_get_resource_change_log_by_resource(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv", "self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change') self.assertEqual(response['description'], 'Better fluids for", "self.construct_resource() rc_data = {\"resource_id\": r.id, \"category_id\": c.id} rv = self.app.post(", "def test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) r = {'id': 258,", "content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type,", "{ \"category_id\": c2.id }, { \"category_id\": c3.id }, ] rv", "= {'id': 258, 'title': \"A Resource that is Super and", "self.assert_success(rv) self.construct_admin_note(user=self.construct_user(), resource=r) elastic_index.remove_document(r, 'Resource') rv = self.app.delete('api/resource/%i' % r_id,", "= self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\") r = self.construct_resource() rc_data =", "rv = self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2)", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response", "self.construct_category(name=\"c2\") c3 = self.construct_category(name=\"c3\") r = self.construct_resource() rc_data = [", "2) rv = self.app.get('api/resource', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv) rv = self.app.get( '/api/resource/%i/category' % 1,", "c.id} rv = self.app.post( '/api/resource_category', data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response =", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"], 'A delightful", "def test_modify_resource_basics(self): self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id", "'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth']) rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type=\"application/json\") self.assert_success(rv) response", "response['website'] = 'http://sartography.com' orig_date = response['last_updated'] rv = self.app.put('/api/resource/%i' %", "self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids']) self.construct_resource(covid19_categories=['COVID-19_for_Autism',", "self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv =", "import ResourceChangeLog from app.model.user import Role class TestResources(BaseTest, unittest.TestCase): def", "= ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self): r =", "Resource') logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'edit') rv =", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of Resources') self.assertEqual(response['description'], 'You", "this resource in your life.') self.assertIsNotNone(response['id']) def test_get_resource_by_category(self): c =", "self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'],", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def test_is_uva_education_content(self): self.construct_resource(is_draft=True,", "Org\"} rv = self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response", "data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r['id'],", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv", "headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism',", "= self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv", "self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv =", "r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\")", "ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/category/%i' %", "self.assertEqual(response[\"title\"], 'A+ Resource') self.assertEqual(response[\"description\"], 'A delightful Resource destined to create", "follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create')", "def test_get_resource_by_category(self): c = self.construct_category() r = self.construct_resource() cr =", "rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv)", "flask import json from tests.base_test import BaseTest from app import", "r_id, content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos Lemonade and", "self.construct_resource() r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id rv =", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great Resource') logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id)", "Lemonade and Oil Change' response['description'] = 'Better fluids for you", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Super Great Resource') logs = ResourceChangeLog.query.all()", "data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log' % r.id,", "rv = self.app.post('api/resource', data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs =", "json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response =", "= self.app.get( '/api/resource/%i/category' % r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "= self.construct_category(name=\"c3\") r = self.construct_resource() rc_data = [ { \"category_id\":", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i'", "% r_id, content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_create_resource(self): resource = {'title':", "role=Role.admin) r = {'id': 258, 'title': \"A Resource that is", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource of Resources') self.assertEqual(response['description'], 'You need", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"], 'A+ Resource')", "headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) response", "'Super Great Resource' rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type=\"application/json\",", "self.assert_success(rv) rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response", "json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self): r = self.construct_resource() u =", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self):", "self.construct_resource() rc_data = [ { \"category_id\": c1.id }, { \"category_id\":", "r.id rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) rv =", "Resource from app.model.resource_category import ResourceCategory from app.model.resource_change_log import ResourceChangeLog from", "self.assertEqual(response['title'], 'Resource of Resources') self.assertEqual(response['description'], 'You need this resource in", "of Resources') self.assertEqual(response['description'], 'You need this resource in your life.')", "r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"])", "from app.model.resource_change_log import ResourceChangeLog from app.model.user import Role class TestResources(BaseTest,", "db.session.commit() rv = self.app.get( '/api/category/%i' % c.id, content_type=\"application/json\") self.assert_success(rv) response", "data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Resource", "headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response", "r = db.session.query(Resource).first() self.assertIsNotNone(r) r_id = r.id rv = self.app.get('/api/resource/%i'", "= self.construct_user(email=\"<EMAIL>\", role=Role.admin) r = {'id': 258, 'title': \"A Resource", "% u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id)", "% r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id,", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self): c", "% r_id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade", "rv = self.app.get('api/resource/%i' % r_id, content_type=\"application/json\") self.assert_success(rv) rv = self.app.delete('api/resource/%i'", "content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\")", "'/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1,", "= 'Super Great Resource' rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response),", "% 1) self.assert_success(rv) rv = self.app.get( '/api/resource/%i/category' % 1, content_type=\"application/json\")", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"])", "rc_data = {\"resource_id\": r.id, \"category_id\": c.id} rv = self.app.post( '/api/resource_category',", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1", "= json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos Lemonade and Oil Change' response['description']", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv = self.app.get('api/resource/covid19/Edu-tainment',", "'edit') rv = self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs", "= self.app.get('api/resource/education', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv", "content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2) rv = self.app.get('api/resource',", "self.assertEqual(1, len(response)) def test_remove_category_from_resource(self): self.test_add_category_to_resource() rv = self.app.delete('/api/resource_category/%i' % 1)", "Change' response['description'] = 'Better fluids for you and your car.'", "= self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv", "c2.id }, { \"category_id\": c3.id }, ] rv = self.app.post(", "5) rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "data=self.jsonify(r), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id)", "= self.app.delete('api/resource/%i' % r['id'], content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all()", "'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids'])", "rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 2)", "self.assertEqual(len(response), 2) rv = self.app.get('api/resource', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "rv = self.app.get('api/resource/%i' % r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "from app.model.user import Role class TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self): self.construct_resource()", "'Edwarardos Lemonade and Oil Change' response['description'] = 'Better fluids for", "Resources') self.assertEqual(response['description'], 'You need this resource in your life.') self.assertIsNotNone(response['id'])", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def test_resource_change_log_types(self): u =", "need this resource in your life.\"} rv = self.app.post('api/resource', data=self.jsonify(r),", "r_id, follow_redirects=True, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[\"id\"], r_id) self.assertEqual(response[\"title\"],", "app.model.user import Role class TestResources(BaseTest, unittest.TestCase): def test_resource_basics(self): self.construct_resource() r", "= self.app.get('api/resource/covid19/Supports_with_Living', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv", "at UVA', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True) self.construct_resource(is_draft=True, title='Autism and", "data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/api/resource/%i' % r_id,", "len(response)) self.assertEqual(c.id, response[0][\"id\"]) self.assertEqual(c.name, response[0][\"category\"][\"name\"]) def test_add_category_to_resource(self): c = self.construct_category()", "follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\", headers=self.logged_in_headers())", "= self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) def", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(0, len(response)) def test_resource_change_log_types(self): u = self.construct_user(email=\"<EMAIL>\",", "'A+ Resource') self.assertEqual(response[\"description\"], 'A delightful Resource destined to create rejoicing')", "test_get_resource_change_log_by_resource(self): r = self.construct_resource() u = self.construct_user(email=\"<EMAIL>\", role=Role.admin) rv =", "self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv =", "= self.app.delete('/api/resource_category/%i' % 1) self.assert_success(rv) rv = self.app.get( '/api/resource/%i/category' %", "Great Resource' rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True,", "= self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(r.id, response[0][\"resource_id\"]) self.assertEqual(2, len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual(", "test_category_resource_count(self): c = self.construct_category() r = self.construct_resource() cr = ResourceCategory(resource=r,", "content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self):", "is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False) rv =", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 5) rv = self.app.get('api/resource/covid19/Edu-tainment', content_type=\"application/json\")", "= ResourceCategory(resource=r, category=c, type='resource') db.session.add(cr) db.session.commit() rv = self.app.get( '/api/resource/%i/category'", "response['title'] = 'Super Great Resource' rv = self.app.put('/api/resource/%i' % r['id'],", "self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change')", "def test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") r =", "c1.id}] rv = self.app.post( '/api/resource/%i/category' % r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv)", "1) rv = self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response),", "def test_add_category_to_resource(self): c = self.construct_category() r = self.construct_resource() rc_data =", "headers=self.logged_in_headers()) self.assert_success(rv) logs = ResourceChangeLog.query.all() self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'create') rv", "self.assertIsNotNone(logs[-1].resource_id) self.assertIsNotNone(logs[-1].user_id) self.assertEqual(logs[-1].type, 'delete') def test_get_resource_change_log_by_resource(self): r = self.construct_resource() u", "your life.\", 'organization_name': \"Resource Org\"} rv = self.app.post('api/resource', data=self.jsonify(resource), content_type=\"application/json\",", "content_type=\"application/json\") self.assertEqual(404, rv.status_code) def test_create_resource(self): resource = {'title': \"Resource of", "response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism',", "test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") r = self.construct_resource()", "len(response[0][\"resource\"][\"resource_categories\"])) self.assertEqual( \"c1\", response[0][\"resource\"][\"resource_categories\"][0][\"category\"] [\"name\"]) def test_category_resource_count(self): c = self.construct_category()", "self.assertEqual(logs[-1].type, 'create') rv = self.app.get('api/resource/%i' % r['id'], content_type=\"application/json\") self.assert_success(rv) response", "= self.app.get('api/resource/%i' % r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) response['title']", "content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'], r.id) def test_covid19_resource_lists(self):", "title='Autism and the Arts', is_uva_education_content=False) self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True) self.construct_resource(is_draft=False,", "elastic_index.remove_document(r, 'Resource') rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv)", "self.app.get('/api/user/%i/resource_change_log' % u.id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['resource_id'],", "rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers()) self.assert_success(rv)", "json.loads(rv.get_data(as_text=True)) self.assertEqual(c.id, response[\"category_id\"]) self.assertEqual(r.id, response[\"resource_id\"]) def test_set_all_categories_on_resource(self): c1 = self.construct_category(name=\"c1\")", "self.app.get( '/api/resource/%i/category' % r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1,", "json.loads(rv.get_data(as_text=True)) self.assertEqual(1, response[\"resource_count\"]) def test_get_category_by_resource(self): c = self.construct_category() r =", "\"You need this resource in your life.\"} rv = self.app.post('api/resource',", "'/api/resource/%i/category' % r.id, content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response))", "rv = self.app.delete('api/resource/%i' % r_id, content_type=\"application/json\", headers=self.logged_in_headers()) self.assert_success(rv) rv =", "r.id, data=self.jsonify(rc_data), content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(response)) def", "= json.loads(rv.get_data(as_text=True)) self.assertEqual(response[-1]['user_id'], u.id) def test_get_resource_change_log_by_user(self): r = self.construct_resource() u", "self.assertEqual(len(response), 1) rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True))", "self.app.get('api/resource/covid19/Visual_Aids', content_type=\"application/json\") self.assert_success(rv) response = json.loads(rv.get_data(as_text=True)) self.assertEqual(len(response), 1) rv =", "'Better fluids for you and your car.' response['website'] = 'http://sartography.com'", "response[0][\"resource\"][\"description\"]) def test_get_resource_by_category_includes_category_details(self): c = self.construct_category(name=\"c1\") c2 = self.construct_category(name=\"c2\") r", "r = self.construct_resource() rc_data = [ { \"category_id\": c1.id },", "% r_id, content_type=\"application/json\") response = json.loads(rv.get_data(as_text=True)) response['title'] = 'Edwarardos Lemonade", "role=Role.admin) rv = self.app.get('api/resource/%i' % r.id, content_type=\"application/json\") self.assert_success(rv) response =", "= r.id rv = self.app.get('/api/resource/%i' % r_id, content_type=\"application/json\") response =", "def test_covid19_resource_lists(self): self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living'])", "= [ { \"category_id\": c1.id }, { \"category_id\": c2.id },", "ResourceCategory(resource=r, category=c, type='resource') cr2 = ResourceCategory(resource=r, category=c2, type='resource') db.session.add_all([cr, cr2])", "rv.status_code) def test_delete_resource_with_admin_note_and_no_elastic_record(self): r = self.construct_resource() r_id = r.id rv", "self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living']) self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment',", "self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type=\"application/json\", follow_redirects=True, headers=self.logged_in_headers(user=u)) self.assert_success(rv) rv =" ]
[ "with\", ) parser.add_argument( \"--password\", type=str, help=\"password of superuser or facility", "{}\".format( facility.name ) ) # get primary partition scope_params =", "password, dataset_id, network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing has been initiated (this", "the certs we own for the specific facility client_cert =", "has been completed.\") @contextmanager def _lock(self): cancellable = False #", "In that case, we'll assume everything is good. sync_client.finalize(allow_server_timeout=True) def", "increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), )", "not push data to the server\" ) parser.add_argument( \"--no-pull\", action=\"store_true\",", "= controller.create_network_connection(baseurl) # if instance_ids are equal, this means device", "device does not own a certificate for Facility: {}\".format( facility.name", "a while and the request # could timeout. In that", "from ..utils import get_dataset_id from ..utils import get_single_user_sync_filter from ..utils", "get_dataset_id( baseurl, identifier=facility_id, noninteractive=True ) client_cert, server_cert, username = get_client_and_server_certs(", "from the server\" ) parser.add_argument( \"--username\", type=str, help=\"username of superuser", "import Filter from morango.models import InstanceIDModel from morango.models import ScopeDefinition", "kolibri.core.auth.constants.morango_sync import ScopeDefinitions from kolibri.core.auth.constants.morango_sync import State from kolibri.core.auth.management.utils import", "parser.add_argument( \"--username\", type=str, help=\"username of superuser or facility admin on", "CommandError( \"This device does not own a certificate for Facility:", "= MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # if instance_ids are equal,", "send\", State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING,", "user directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command(\"loaddata\",", "data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating data\",", "in if not PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call this", "self._session_tracker_adapter( sync_client.signals.session, \"Creating push transfer session\", \"Completed push transfer session\",", "if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has been cancelled.\") return network_connection.close()", "self.job.cancellable): raise UserCancelledError() def _handle_pull( self, sync_session_client, noninteractive, dataset_id, client_cert,", "syncing, the user ID of the account to be synced\",", "username = get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing", "we'll assume everything is good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction, progress):", "help=\"Do not pull data from the server\" ) parser.add_argument( \"--username\",", "client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() # we can't cancel remotely integrating", "controller.create_network_connection(baseurl) # if instance_ids are equal, this means device is", "try: # pull from server if not no_pull: self._handle_pull( sync_session_client,", "noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, )", "been initiated (this may take a while)...\") sync_session_client = network_connection.create_sync_session(", "self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self, signal_group, started_msg, completed_msg): \"\"\"", "self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self, signal_group, started_msg, completed_msg):", "portal sync setup facility = get_facility( facility_id=facility_id, noninteractive=noninteractive ) #", "dataset_id: str \"\"\" sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing,", "# do portal sync setup facility = get_facility( facility_id=facility_id, noninteractive=noninteractive", "sync_client.signals.dequeuing, \"Locally integrating received data\", State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session,", "\"Completed push transfer session\", ) with self._lock(): if not user_id:", "sync with itself, which we don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id", "dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=True ) client_cert, server_cert, username", "in case user directly syncs without migrating database if not", "import db_lock from kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE", "dataset_id, user_id, is_read=not client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() # we can't", "contextlib import contextmanager from django.core.management import call_command from django.core.management.base import", ") if noninteractive or tracker.progressbar is None: signal_group.started.connect(started) signal_group.started.connect(started) signal_group.started.connect(handler)", "CommandError( \"To do a single-user sync, one device must have", "a while)...\") sync_session_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size ) try:", "at end to capture in logging output signal_group.completed.connect(stats) def _queueing_tracker_adapter(", "with itself. Please recheck base URL and try again.\" )", "of facility data with Kolibri Data Portal or another Kolibri", "morango.sync.syncsession.SyncSessionClient :type noninteractive: bool :type dataset_id: str \"\"\" sync_client =", ") logger.info(\"Syncing has been initiated (this may take a while)...\")", "sync_client.signals.queuing, \"Locally preparing data to send\", State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter(", "if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session): if transfer_session.records_total == 0:", "data\", State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING,", "\"\"\" tracker = self.start_progress(total=2) def started(transfer_session): dataset_cache.clear() if noninteractive or", "logger.info(\"Syncing has been completed.\") @contextmanager def _lock(self): cancellable = False", "self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has been cancelled.\") return network_connection.close() if", "signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg: str :type completed_msg: str", "yield if self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args, **kwargs): if self.is_cancelled()", "is passed in if not PORTAL_SYNC: baseurl = get_baseurl(baseurl) #", "# check if the server already has a cert for", "options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC = baseurl", "UserCancelledError from kolibri.core.tasks.management.commands.base import AsyncCommand from kolibri.core.utils.lock import db_lock from", "help=\"Do not push data to the server\" ) parser.add_argument( \"--no-pull\",", "{transfer_total}\" logger = logging.getLogger(__name__) class Command(AsyncCommand): help = \"Allow the", "from django.core.management.base import CommandError from morango.models import Filter from morango.models", "tracker.progressbar is None: logger.info(message) def handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) )", "to the server server_cert = ( server_certs[0] if server_certs else", "!= 2: raise CommandError( \"To do a single-user sync, one", "certificate for Facility: {}\".format( facility.name ) ) # get primary", "get_baseurl from ..utils import get_client_and_server_certs from ..utils import get_dataset_id from", "records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\"", "import get_baseurl from ..utils import get_client_and_server_certs from ..utils import get_dataset_id", "noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) # and push our", "client_cert, server_cert, user_id, ): \"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive:", "pushing and pulling \"\"\" logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def", "message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), ) if noninteractive or", "+ transfer_session.bytes_received ) return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def", "..utils import create_superuser_and_provision_device from ..utils import get_baseurl from ..utils import", ") # and push our own data to server if", "type=str, help=\"username of superuser or facility admin on server we", "transfer_session.bytes_received ) return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session):", "client_cert, server_cert, chunk_size=chunk_size ) try: # pull from server if", "None: logger.info(message) def handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) ) if noninteractive", "the request # could timeout. In that case, we'll assume", "facility.name ) ) # get primary partition scope_params = json.loads(client_cert.scope_params)", "if noninteractive or tracker.progressbar is None: signal_group.started.connect(started) signal_group.started.connect(started) signal_group.started.connect(handler) signal_group.completed.connect(handler)", "user_id=user_id, ) if not no_provision: with self._lock(): if user_id: provision_single_user_device(user_id)", "dataset_id = scope_params[\"dataset_id\"] # check if the server already has", "= ( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"],", "from ..utils import get_baseurl from ..utils import get_client_and_server_certs from ..utils", "from ..utils import get_client_and_server_certs from ..utils import get_dataset_id from ..utils", "certificate.\" ) elif PORTAL_SYNC: # do portal sync setup facility", "sync_state, noninteractive ): \"\"\" Attaches a signal handler to queuing/dequeuing", "single-user certificate, and the other a full-facility certificate.\" ) elif", "specific facility client_cert = ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if", "sync_client.run() # we can't cancel remotely integrating data if self.job:", "sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing data\",", "noninteractive or tracker.progressbar is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) # log", "and pulling \"\"\" logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session):", "instance_ids are equal, this means device is trying to sync", "kolibri.core.auth.models import dataset_cache from kolibri.core.logger.utils.data import bytes_for_humans from kolibri.core.tasks.exceptions import", "\"\"\" Attaches a signal handler to session creation signals :type", "class Command(AsyncCommand): help = \"Allow the syncing of facility data", "if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) ) else:", "been cancelled.\") return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing", ":type completed_msg: str \"\"\" @run_once def session_creation(transfer_session): \"\"\" A session", ":type noninteractive: bool \"\"\" tracker = self.start_progress(total=100) def stats_msg(transfer_session): transfer_total", "ID of the account to be synced\", ) parser.add_argument( \"--no-provision\",", "remotely integrating data can take a while and the request", ") self._session_tracker_adapter( sync_client.signals.session, \"Creating push transfer session\", \"Completed push transfer", "import State from kolibri.core.auth.management.utils import get_facility from kolibri.core.auth.management.utils import run_once", "<filename>kolibri/core/auth/management/commands/sync.py import json import logging import math import re from", "bytes_for_humans from kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.management.commands.base import AsyncCommand from", "noninteractive or tracker.progressbar is None: logger.info(message) def handler(transfer_session): tracker.update_progress( message=message,", "(not self.job or self.job.cancellable): raise UserCancelledError() def _handle_pull( self, sync_session_client,", "\"--username\", type=str, help=\"username of superuser or facility admin on server", "we are syncing with\", ) parser.add_argument( \"--password\", type=str, help=\"password of", "server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive,", "scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) ) else: # do P2P setup dataset_id", "= \"Allow the syncing of facility data with Kolibri Data", "def _raise_cancel(self, *args, **kwargs): if self.is_cancelled() and (not self.job or", "from the progress tracker we're sent \"\"\" if self.job: self.job.update_progress(progress_fraction,", "tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), ) if noninteractive", "records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\" :type", "from kolibri.core.logger.utils.data import bytes_for_humans from kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.management.commands.base", "def session_creation(transfer_session): \"\"\" A session is created individually for pushing", "logger.info(message) def handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) ) if noninteractive or", "Filter from morango.models import InstanceIDModel from morango.models import ScopeDefinition from", "job can't be cancelled while locked if self.job: cancellable =", "\"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() # try to connect to server controller", "client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id, is_read=not", "def handle_async(self, *args, **options): # noqa C901 ( baseurl, facility_id,", "of records to send/retrieve per request\", ) parser.add_argument( \"--no-push\", action=\"store_true\",", "while locked if self.job: cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock():", "parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do not create a facility and temporary", "if self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args, **kwargs): if self.is_cancelled() and", "({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating data\", State.REMOTE_DEQUEUING,", "progress tracker we're sent \"\"\" if self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data)", "from morango.models import Filter from morango.models import InstanceIDModel from morango.models", "not sync with itself. Please recheck base URL and try", ") filt = get_single_user_sync_filter( dataset_id, user_id, is_read=not client_is_single_user ) sync_client.initialize(Filter(filt))", "a cert for this facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY", "default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int, default=500, help=\"Chunk", ":type sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive: bool :type dataset_id: str \"\"\"", "= ( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params,", "can't be cancelled while locked if self.job: cancellable = self.job.cancellable", "if not no_provision: with self._lock(): if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device(", "facility to sync\" ) parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" )", "since remotely integrating data can take a while and the", "dataset_id, user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() with self._lock(): sync_client.finalize() def", "def stats_msg(transfer_session): transfer_total = ( transfer_session.bytes_sent + transfer_session.bytes_received ) return", "client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive,", "data\", State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating push transfer session\",", "user_id, no_push, no_pull, noninteractive, no_provision, ) = ( options[\"baseurl\"], options[\"facility\"],", "username, password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes = [client_cert.scope_definition_id,", "server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive, )", "self.is_cancelled() and (not self.job or self.job.cancellable): raise UserCancelledError() def _handle_pull(", "import logging import math import re from contextlib import contextmanager", "run_once from kolibri.core.auth.models import dataset_cache from kolibri.core.logger.utils.data import bytes_for_humans from", "single-user sync if not facility_id: raise CommandError( \"Facility ID must", "# get primary partition scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"]", "transfer_session.records_total == 0: logger.info(\"There are no records to transfer\") logger.info(completed_msg)", "transfer session\", \"Completed push transfer session\", ) with self._lock(): if", "no_push, no_pull, noninteractive, no_provision, ) = ( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"],", "import MorangoProfileController from ..utils import create_superuser_and_provision_device from ..utils import get_baseurl", "bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), ) if noninteractive or tracker.progressbar is", "options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL #", "the server server_cert = ( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain(", "to sync with itself, which we don't allow if (", "more time at end to capture in logging output signal_group.completed.connect(stats)", "cert for this facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY )", "\"--password\", type=str, help=\"password of superuser or facility admin on server", "data from the server\" ) parser.add_argument( \"--username\", type=str, help=\"username of", "if not no_push: self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id,", "identifier=facility_id, noninteractive=noninteractive ) client_cert, server_cert, username = get_client_and_server_certs( username, password,", "kolibri.core.logger.utils.data import bytes_for_humans from kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.management.commands.base import", "re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User ID must be a 32-character UUID", "self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session): if transfer_session.records_total == 0: logger.info(\"There", "State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating push transfer session\", \"Completed", "= ( transfer_session.bytes_sent + transfer_session.bytes_received ) return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total,", "noninteractive=noninteractive, ) logger.info(\"Syncing has been initiated (this may take a", "\"\"\" progress = ( 100 * transfer_session.records_transferred / float(transfer_session.records_total) )", "_handle_pull( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id, ): \"\"\"", "\"Locally integrating received data\", State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating", ") if not no_provision: with self._lock(): if user_id: provision_single_user_device(user_id) else:", "InstanceIDModel from morango.models import ScopeDefinition from morango.sync.controller import MorangoProfileController from", "to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self, signal_group, message,", "import get_single_user_sync_filter from ..utils import provision_single_user_device from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA", "), ) if noninteractive or tracker.progressbar is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats)", "data with Kolibri Data Portal or another Kolibri device.\" def", "own for the specific facility client_cert = ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY)", "network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing has been initiated (this may take", "if not facility_id: raise CommandError( \"Facility ID must be specified", "with itself, which we don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id ==", "server_cert = ( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY,", "= network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if necessary, push a", "synced\", ) parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do not create a facility", "\"\"\" A session is created individually for pushing and pulling", ":type sync_state: str :type noninteractive: bool \"\"\" tracker = self.start_progress(total=100)", "single-user syncing, the user ID of the account to be", "\"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating", "logger.info(\"Syncing has been initiated (this may take a while)...\") sync_session_client", "= self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock(): yield if self.job: self.job.save_as_cancellable(cancellable=cancellable) def", "good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction, progress): \"\"\" Override parent progress", "to send\", State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE),", "self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating data\", State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session,", "ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id, is_read=not client_is_single_user )", "this facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if", "self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) if not", "= \"{records_transferred}/{records_total}, {transfer_total}\" logger = logging.getLogger(__name__) class Command(AsyncCommand): help =", "sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction, progress): \"\"\" Override parent progress update", "Override parent progress update callback to report from the progress", "\"\"\" if self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self, signal_group,", ":type noninteractive: bool :type dataset_id: str \"\"\" sync_client = sync_session_client.get_push_client()", "client_cert: raise CommandError( \"This device does not own a certificate", "# allow server timeout since remotely integrating data can take", "if not no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id,", "preparing data to send\", State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending", "== network_connection.server_info[\"instance_id\"] ): raise CommandError( \"Device can not sync with", "noninteractive: bool :type dataset_id: str \"\"\" sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel)", "import conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\" logger", "for pushing and pulling \"\"\" logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once", "str :type sync_state: str :type noninteractive: bool \"\"\" tracker =", ") elif PORTAL_SYNC: # do portal sync setup facility =", "import get_facility from kolibri.core.auth.management.utils import run_once from kolibri.core.auth.models import dataset_cache", "size of records to send/retrieve per request\", ) parser.add_argument( \"--no-push\",", "to connect to server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl)", "user_id=user_id, noninteractive=noninteractive, ) scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes)) !=", "PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url that is", "State.PULLING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating received data\", State.LOCAL_DEQUEUING,", "self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self, signal_group, started_msg, completed_msg): \"\"\" Attaches a", "kolibri.core.auth.management.utils import run_once from kolibri.core.auth.models import dataset_cache from kolibri.core.logger.utils.data import", "user ID of the account to be synced\", ) parser.add_argument(", "noqa C901 ( baseurl, facility_id, chunk_size, username, password, user_id, no_push,", "dashes)\") dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=True ) client_cert, server_cert,", "password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id]", "DATA_PORTAL_SYNCING_BASE_URL # validate url that is passed in if not", "_lock(self): cancellable = False # job can't be cancelled while", "user_id=user_id, ) # and push our own data to server", "or self.job.cancellable): raise UserCancelledError() def _handle_pull( self, sync_session_client, noninteractive, dataset_id,", "scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"] # check if the", "CommandError( \"Facility ID must be specified in order to do", "from morango.models import ScopeDefinition from morango.sync.controller import MorangoProfileController from ..utils", "handler to session creation signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg:", "( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ): raise CommandError( \"Device can not", "\"\"\" sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing", "parser.add_argument( \"--password\", type=str, help=\"password of superuser or facility admin on", "client_cert, server_cert, user_id=user_id, ) # and push our own data", "is created individually for pushing and pulling \"\"\" logger.info(started_msg) if", "do a single-user sync, one device must have a single-user", "if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ): raise CommandError( \"Device can", "signal_group.connect(handler) # log one more time at end to capture", "C901 ( baseurl, facility_id, chunk_size, username, password, user_id, no_push, no_pull,", "self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing data to send\", State.LOCAL_QUEUING, noninteractive, )", "): \"\"\" Attaches a signal handler to pushing/pulling signals :type", "timeout since remotely integrating data can take a while and", "dataset_cache.activate() # try to connect to server controller = MorangoProfileController(PROFILE_FACILITY_DATA)", "facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if not client_cert: raise CommandError( \"This", "except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has been cancelled.\")", "self._lock(): if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive )", "queuing/dequeuing signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type sync_state:", "get_dataset_id from ..utils import get_single_user_sync_filter from ..utils import provision_single_user_device from", "parent progress update callback to report from the progress tracker", "signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self, signal_group, message, sync_state, noninteractive ):", "\"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int,", "client_cert = ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if not client_cert:", "syncing of facility data with Kolibri Data Portal or another", "chunk_size=chunk_size ) try: # pull from server if not no_pull:", "str :type noninteractive: bool \"\"\" tracker = self.start_progress(total=2) def started(transfer_session):", "don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ): raise CommandError(", "can't cancel remotely integrating data if self.job: self.job.save_as_cancellable(cancellable=False) # allow", "and the other a full-facility certificate.\" ) elif PORTAL_SYNC: #", "CommandError( \"Device can not sync with itself. Please recheck base", "= ( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id,", "# noqa C901 ( baseurl, facility_id, chunk_size, username, password, user_id,", "\"--facility\", action=\"store\", type=str, help=\"ID of facility to sync\" ) parser.add_argument(", "to the server\" ) parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do not pull", "the specific facility client_cert = ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() )", "migrating database if not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() #", "# pull from server if not no_pull: self._handle_pull( sync_session_client, noninteractive,", "parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do not push data to the server\"", "single-user syncing\" ) if not re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User ID", "sync_client.signals.queuing, \"Remotely preparing data\", State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving", "timeout. In that case, we'll assume everything is good. sync_client.finalize(allow_server_timeout=True)", "self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing data\", State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring,", "State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating pull transfer session\", \"Completed", "connect to server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) #", "_handle_push( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id, ): \"\"\"", "noninteractive ): \"\"\" Attaches a signal handler to queuing/dequeuing signals", "kolibri.core.utils.lock import db_lock from kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"]", "- tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), ) if", "or facility admin on server we are syncing with\", )", "if not re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User ID must be a", "we own for the specific facility client_cert = ( facility.dataset.get_owned_certificates()", "100 * transfer_session.records_transferred / float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress - tracker.progress),", "import contextmanager from django.core.management import call_command from django.core.management.base import CommandError", "Command(AsyncCommand): help = \"Allow the syncing of facility data with", "django.core.management.base import CommandError from morango.models import Filter from morango.models import", "not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() # try to connect", ") # parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def handle_async(self, *args, **options): #", "server\" ) parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do not pull data from", "do single-user syncing\" ) if not re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User", "signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self, signal_group, message, sync_state, noninteractive ): \"\"\"", "= ( 100 * transfer_session.records_transferred / float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress", "self.start_progress(total=2) def started(transfer_session): dataset_cache.clear() if noninteractive or tracker.progressbar is None:", "client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user", "tracker.progressbar is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) # log one more", "with self._lock(): if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive", "help = \"Allow the syncing of facility data with Kolibri", "self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has been completed.\") @contextmanager def", ") parser.add_argument( \"--user\", type=str, help=\"for single-user syncing, the user ID", "with self._lock(): sync_client.finalize() def _handle_push( self, sync_session_client, noninteractive, dataset_id, client_cert,", "push a cert up to the server server_cert = (", "the server\" ) parser.add_argument( \"--username\", type=str, help=\"username of superuser or", ":type transfer_session: morango.models.core.TransferSession \"\"\" progress = ( 100 * transfer_session.records_transferred", "action=\"store_true\", help=\"Do not push data to the server\" ) parser.add_argument(", "noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating push transfer session\", \"Completed push", "bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), ) if noninteractive or tracker.progressbar is None:", "for Facility: {}\".format( facility.name ) ) # get primary partition", "username, password, user_id, no_push, no_pull, noninteractive, no_provision, ) = (", "server if not no_push: self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert, server_cert,", "are syncing with\", ) parser.add_argument( \"--password\", type=str, help=\"password of superuser", "sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing data\", State.REMOTE_QUEUING, noninteractive,", "again.\" ) if user_id: # it's a single-user sync if", "type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int, default=500,", "another Kolibri device.\" def add_arguments(self, parser): parser.add_argument( \"--facility\", action=\"store\", type=str,", "does not own a certificate for Facility: {}\".format( facility.name )", "( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if not client_cert: raise CommandError(", "help=\"do not create a facility and temporary superuser\", ) #", "len(set(scopes)) != 2: raise CommandError( \"To do a single-user sync,", "network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes))", "check if the server already has a cert for this", "do P2P setup dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive )", "sync_client.run() with self._lock(): sync_client.finalize() def _handle_push( self, sync_session_client, noninteractive, dataset_id,", "facility admin on server we are syncing with\", ) parser.add_argument(", "# call this in case user directly syncs without migrating", "transfer session\", ) if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id))", ") if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else: #", "sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing data\", State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter(", "( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, )", "check for the certs we own for the specific facility", "is trying to sync with itself, which we don't allow", "we are syncing with\", ) parser.add_argument( \"--user\", type=str, help=\"for single-user", "get_client_and_server_certs from ..utils import get_dataset_id from ..utils import get_single_user_sync_filter from", ") if not client_cert: raise CommandError( \"This device does not", "admin on server we are syncing with\", ) parser.add_argument( \"--password\",", "action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int, default=500, help=\"Chunk size of records to", "to sync\" ) parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\",", "facility and temporary superuser\", ) # parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def", "locked if self.job: cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock(): yield", "morango.models import InstanceIDModel from morango.models import ScopeDefinition from morango.sync.controller import", "push data to the server\" ) parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do", "self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock(): yield if self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self,", "if instance_ids are equal, this means device is trying to", "get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing has been", "logger = logging.getLogger(__name__) class Command(AsyncCommand): help = \"Allow the syncing", "be specified in order to do single-user syncing\" ) if", "cancelled.\") return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has", "if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has been completed.\") @contextmanager", "sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing data to send\", State.LOCAL_QUEUING, noninteractive,", "sync_state: str :type noninteractive: bool \"\"\" tracker = self.start_progress(total=100) def", "or tracker.progressbar is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) # log one", "superuser\", ) # parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def handle_async(self, *args, **options):", "baseurl = get_baseurl(baseurl) # call this in case user directly", "self.job.save_meta() logger.info(\"Syncing has been cancelled.\") return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED)", "dataset_id, noninteractive=noninteractive ) except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing", "tracker we're sent \"\"\" if self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta()", "pull data from the server\" ) parser.add_argument( \"--username\", type=str, help=\"username", "pulling \"\"\" logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session): if", "State from kolibri.core.auth.management.utils import get_facility from kolibri.core.auth.management.utils import run_once from", "sync setup facility = get_facility( facility_id=facility_id, noninteractive=noninteractive ) # check", "data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating received", "..utils import get_client_and_server_certs from ..utils import get_dataset_id from ..utils import", "sync_session_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size ) try: # pull", "a signal handler to session creation signals :type signal_group: morango.sync.syncsession.SyncSignalGroup", "= self.start_progress(total=100) def stats_msg(transfer_session): transfer_total = ( transfer_session.bytes_sent + transfer_session.bytes_received", "dataset_id: str \"\"\" sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally", ") self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, ) self._queueing_tracker_adapter(", "while and the request # could timeout. In that case,", "in logging output signal_group.completed.connect(stats) def _queueing_tracker_adapter( self, signal_group, message, sync_state,", "\"--no-push\", action=\"store_true\", help=\"Do not push data to the server\" )", "= get_single_user_sync_filter( dataset_id, user_id, is_read=not client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() #", ") if user_id: # it's a single-user sync if not", "already has a cert for this facility server_certs = network_connection.get_remote_certificates(", "facility_id=facility_id, noninteractive=noninteractive ) # check for the certs we own", "= sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing data to send\",", "one more time at end to capture in logging output", "Attaches a signal handler to queuing/dequeuing signals :type signal_group: morango.sync.syncsession.SyncSignalGroup", "\"\"\" Attaches a signal handler to queuing/dequeuing signals :type signal_group:", "to pushing/pulling signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type", "to server if not no_push: self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert,", ") # get primary partition scope_params = json.loads(client_cert.scope_params) dataset_id =", "server_cert, user_id=user_id, ) if not no_provision: with self._lock(): if user_id:", "per request\", ) parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do not push data", "dataset_id, network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing has been initiated (this may", "default=500, help=\"Chunk size of records to send/retrieve per request\", )", "self._lock(): sync_client.finalize() def _handle_push( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert,", "syncing\" ) if not re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User ID must", "logger.info(\"There are no records to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def", "user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else: # single-user sync client_is_single_user", "tracker = self.start_progress(total=2) def started(transfer_session): dataset_cache.clear() if noninteractive or tracker.progressbar", "\"To do a single-user sync, one device must have a", "call_command from django.core.management.base import CommandError from morango.models import Filter from", "if self.job: cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock(): yield if", "Please recheck base URL and try again.\" ) if user_id:", "username = get_client_and_server_certs( username, password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, )", "kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.management.commands.base import AsyncCommand from kolibri.core.utils.lock import", "baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url that is passed in", "not facility_id: raise CommandError( \"Facility ID must be specified in", "server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) ) else: #", "from morango.sync.controller import MorangoProfileController from ..utils import create_superuser_and_provision_device from ..utils", "sync_state, noninteractive ): \"\"\" Attaches a signal handler to pushing/pulling", "str :type noninteractive: bool \"\"\" tracker = self.start_progress(total=100) def stats_msg(transfer_session):", "capture in logging output signal_group.completed.connect(stats) def _queueing_tracker_adapter( self, signal_group, message,", "not create a facility and temporary superuser\", ) # parser.add_argument(\"--scope-id\",", "started_msg: str :type completed_msg: str \"\"\" @run_once def session_creation(transfer_session): \"\"\"", "sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive: bool :type dataset_id: str \"\"\" sync_client", "noninteractive=noninteractive ) except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has", "that case, we'll assume everything is good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self,", "self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args, **kwargs): if self.is_cancelled() and (not", "options[\"no_provision\"], ) PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url", "Attaches a signal handler to pushing/pulling signals :type signal_group: morango.sync.syncsession.SyncSignalGroup", "of facility to sync\" ) parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\"", "math import re from contextlib import contextmanager from django.core.management import", "request\", ) parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do not push data to", "scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes)) != 2: raise CommandError(", "PORTAL_SYNC: # do portal sync setup facility = get_facility( facility_id=facility_id,", "import CommandError from morango.models import Filter from morango.models import InstanceIDModel", "[client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes)) != 2: raise CommandError( \"To do", "network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if necessary, push a cert", "if self.job: self.job.save_as_cancellable(cancellable=False) # allow server timeout since remotely integrating", "take a while and the request # could timeout. In", "syncing with\", ) parser.add_argument( \"--user\", type=str, help=\"for single-user syncing, the", "username, dataset_id, noninteractive=noninteractive ) except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta()", "None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) # log one more time at", "PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call this in case user", "Kolibri device.\" def add_arguments(self, parser): parser.add_argument( \"--facility\", action=\"store\", type=str, help=\"ID", "tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) ) if noninteractive or tracker.progressbar is None:", "equal, this means device is trying to sync with itself,", "update callback to report from the progress tracker we're sent", "of superuser or facility admin on server we are syncing", "get_facility( facility_id=facility_id, noninteractive=noninteractive ) # check for the certs we", "server\" ) parser.add_argument( \"--username\", type=str, help=\"username of superuser or facility", "else: # single-user sync client_is_single_user = ( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER", "user_id, is_read=not client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() # we can't cancel", "is None: logger.info(message) def handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) ) if", "callback to report from the progress tracker we're sent \"\"\"", "from contextlib import contextmanager from django.core.management import call_command from django.core.management.base", "\"--chunk-size\", type=int, default=500, help=\"Chunk size of records to send/retrieve per", "a signal handler to queuing/dequeuing signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type", "of the account to be synced\", ) parser.add_argument( \"--no-provision\", action=\"store_true\",", "base URL and try again.\" ) if user_id: # it's", "data to server if not no_push: self._handle_push( sync_session_client, noninteractive, dataset_id,", "= get_dataset_id( baseurl, identifier=facility_id, noninteractive=True ) client_cert, server_cert, username =", "completed.\") @contextmanager def _lock(self): cancellable = False # job can't", "dataset_cache.deactivate() logger.info(\"Syncing has been completed.\") @contextmanager def _lock(self): cancellable =", "UserCancelledError() def _handle_pull( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id,", "type=str, default=FULL_FACILITY) def handle_async(self, *args, **options): # noqa C901 (", "..utils import provision_single_user_device from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import", "from ..utils import provision_single_user_device from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync", "the other a full-facility certificate.\" ) elif PORTAL_SYNC: # do", "**kwargs): if self.is_cancelled() and (not self.job or self.job.cancellable): raise UserCancelledError()", "can not sync with itself. Please recheck base URL and", "= get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing has", "import call_command from django.core.management.base import CommandError from morango.models import Filter", "kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import ScopeDefinitions from kolibri.core.auth.constants.morango_sync import", "type=str, help=\"for single-user syncing, the user ID of the account", ") = ( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"],", ") parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do not pull data from the", "= baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url that is passed", "UUID (no dashes)\") dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=True )", "push transfer session\", \"Completed push transfer session\", ) with self._lock():", "signal handler to pushing/pulling signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message:", "handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) ) if noninteractive or tracker.progressbar is", "no_push: self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) if", "): \"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive: bool :type dataset_id:", "import UserCancelledError from kolibri.core.tasks.management.commands.base import AsyncCommand from kolibri.core.utils.lock import db_lock", "def _handle_pull( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id, ):", "received data\", State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating pull transfer", "not client_cert: raise CommandError( \"This device does not own a", "server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if necessary, push", "action=\"store\", type=str, help=\"ID of facility to sync\" ) parser.add_argument( \"--baseurl\",", ":type noninteractive: bool \"\"\" tracker = self.start_progress(total=2) def started(transfer_session): dataset_cache.clear()", "passed in if not PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call", "self.job: cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock(): yield if self.job:", "integrating data if self.job: self.job.save_as_cancellable(cancellable=False) # allow server timeout since", "if the server already has a cert for this facility", "remotely integrating data if self.job: self.job.save_as_cancellable(cancellable=False) # allow server timeout", "if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive ) except", "it's a single-user sync if not facility_id: raise CommandError( \"Facility", "setup facility = get_facility( facility_id=facility_id, noninteractive=noninteractive ) # check for", "= network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size ) try: # pull from", "our own data to server if not no_push: self._handle_push( sync_session_client,", "signal_group.in_progress.connect(stats) signal_group.connect(handler) # log one more time at end to", "data\", State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating pull transfer session\",", "user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive ) except UserCancelledError:", ":type started_msg: str :type completed_msg: str \"\"\" @run_once def session_creation(transfer_session):", "take a while)...\") sync_session_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size )", "ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt))", "import create_superuser_and_provision_device from ..utils import get_baseurl from ..utils import get_client_and_server_certs", ".filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if not client_cert: raise CommandError( \"This device", ") PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url that", "scope_params[\"dataset_id\"] # check if the server already has a cert", "identifier=facility_id, noninteractive=True ) client_cert, server_cert, username = get_client_and_server_certs( username, password,", ") client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection,", "scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if necessary, push a cert up to", "= get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() with self._lock():", "1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self, signal_group, started_msg, completed_msg): \"\"\" Attaches", "from kolibri.core.auth.constants.morango_sync import ScopeDefinitions from kolibri.core.auth.constants.morango_sync import State from kolibri.core.auth.management.utils", "transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\" :type transfer_session:", "self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating received data\", State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter(", "get_single_user_sync_filter from ..utils import provision_single_user_device from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from", ") sync_client.initialize(Filter(filt)) sync_client.run() with self._lock(): sync_client.finalize() def _handle_push( self, sync_session_client,", "return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has been", "raise CommandError(\"User ID must be a 32-character UUID (no dashes)\")", "must be a 32-character UUID (no dashes)\") dataset_id = get_dataset_id(", "str \"\"\" sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely", "sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely", "self.job.save_meta() def _session_tracker_adapter(self, signal_group, started_msg, completed_msg): \"\"\" Attaches a signal", "# and push our own data to server if not", "created individually for pushing and pulling \"\"\" logger.info(started_msg) if self.job:", "transfer_session.bytes_sent + transfer_session.bytes_received ) return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), )", "bool :type dataset_id: str \"\"\" sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter(", "the server\" ) parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do not pull data", "filt = get_single_user_sync_filter( dataset_id, user_id, is_read=not client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run()", "_transfer_tracker_adapter( self, signal_group, message, sync_state, noninteractive ): \"\"\" Attaches a", "dataset_cache.clear() if noninteractive or tracker.progressbar is None: logger.info(message) def handler(transfer_session):", "must have a single-user certificate, and the other a full-facility", "time at end to capture in logging output signal_group.completed.connect(stats) def", "up to the server server_cert = ( server_certs[0] if server_certs", "without migrating database if not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate()", "report from the progress tracker we're sent \"\"\" if self.job:", "State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive,", "sync_client.signals.session, \"Creating push transfer session\", \"Completed push transfer session\", )", ") parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument(", "a single-user sync if not facility_id: raise CommandError( \"Facility ID", "no records to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self,", "records to send/retrieve per request\", ) parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do", "other a full-facility certificate.\" ) elif PORTAL_SYNC: # do portal", "): \"\"\" Attaches a signal handler to queuing/dequeuing signals :type", "for the specific facility client_cert = ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first()", "add_arguments(self, parser): parser.add_argument( \"--facility\", action=\"store\", type=str, help=\"ID of facility to", ") if not re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User ID must be", "recheck base URL and try again.\" ) if user_id: #", "extra_data=dict(sync_state=sync_state) ) if noninteractive or tracker.progressbar is None: signal_group.started.connect(started) signal_group.started.connect(started)", "def handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state) ) if noninteractive or tracker.progressbar", "noninteractive=noninteractive, ) scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes)) != 2:", "float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received,", "Facility: {}\".format( facility.name ) ) # get primary partition scope_params", "logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\" :type transfer_session: morango.models.core.TransferSession \"\"\" progress =", "handle_async(self, *args, **options): # noqa C901 ( baseurl, facility_id, chunk_size,", "if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else: # single-user", "to send/retrieve per request\", ) parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do not", "that is passed in if not PORTAL_SYNC: baseurl = get_baseurl(baseurl)", "2: raise CommandError( \"To do a single-user sync, one device", "handler to pushing/pulling signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str", "one device must have a single-user certificate, and the other", "options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC = baseurl ==", "started_msg, completed_msg): \"\"\" Attaches a signal handler to session creation", "request # could timeout. In that case, we'll assume everything", "def add_arguments(self, parser): parser.add_argument( \"--facility\", action=\"store\", type=str, help=\"ID of facility", "import bytes_for_humans from kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.management.commands.base import AsyncCommand", "= ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if not client_cert: raise", "cert up to the server server_cert = ( server_certs[0] if", "return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session): logger.info(stats_msg(transfer_session)) def", "def _queueing_tracker_adapter( self, signal_group, message, sync_state, noninteractive ): \"\"\" Attaches", "allow if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ): raise CommandError( \"Device", ") self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, ) self._queueing_tracker_adapter(", "data can take a while and the request # could", ":type message: str :type sync_state: str :type noninteractive: bool \"\"\"", "filt = get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() with", "transfer session\", \"Completed pull transfer session\", ) if not user_id:", "bool \"\"\" tracker = self.start_progress(total=100) def stats_msg(transfer_session): transfer_total = (", ") try: # pull from server if not no_pull: self._handle_pull(", "everything is good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction, progress): \"\"\" Override", "to do single-user syncing\" ) if not re.match(\"[a-f0-9]{32}\", user_id): raise", "( transfer_session.bytes_sent + transfer_session.bytes_received ) return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total),", "* transfer_session.records_transferred / float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session),", "@contextmanager def _lock(self): cancellable = False # job can't be", "a single-user sync, one device must have a single-user certificate,", "or another Kolibri device.\" def add_arguments(self, parser): parser.add_argument( \"--facility\", action=\"store\",", "and try again.\" ) if user_id: # it's a single-user", "server if not no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert, server_cert,", "bool :type dataset_id: str \"\"\" sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel)", "started(transfer_session): dataset_cache.clear() if noninteractive or tracker.progressbar is None: logger.info(message) def", "directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\")", ") tracker.update_progress( increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state,", "else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive ) except UserCancelledError: if self.job:", "noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, )", "superuser or facility admin on server we are syncing with\",", "ID must be specified in order to do single-user syncing\"", "are syncing with\", ) parser.add_argument( \"--user\", type=str, help=\"for single-user syncing,", "and temporary superuser\", ) # parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def handle_async(self,", "DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\" logger = logging.getLogger(__name__)", "get primary partition scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"] #", "integrating received data\", State.LOCAL_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating pull", "db_lock from kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE =", "signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type sync_state: str", "tracker = self.start_progress(total=100) def stats_msg(transfer_session): transfer_total = ( transfer_session.bytes_sent +", "def handler(transfer_session): \"\"\" :type transfer_session: morango.models.core.TransferSession \"\"\" progress = (", ") sync_client.initialize(Filter(filt)) sync_client.run() # we can't cancel remotely integrating data", "sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) if not no_provision:", "options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC", ") self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating received data\", State.LOCAL_DEQUEUING, noninteractive, )", "are no records to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter(", "from django.core.management import call_command from django.core.management.base import CommandError from morango.models", "get_baseurl(baseurl) # call this in case user directly syncs without", "try again.\" ) if user_id: # it's a single-user sync", "local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) ) else: # do P2P setup", "network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size ) try: # pull from server", "with db_lock(): yield if self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args, **kwargs):", "server_cert, user_id=user_id, ) # and push our own data to", "to report from the progress tracker we're sent \"\"\" if", "completed_msg): \"\"\" Attaches a signal handler to session creation signals", "MorangoProfileController from ..utils import create_superuser_and_provision_device from ..utils import get_baseurl from", "are equal, this means device is trying to sync with", "progress_fraction, progress): \"\"\" Override parent progress update callback to report", "ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() # try to connect to", "# single-user sync client_is_single_user = ( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER )", "sync client_is_single_user = ( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt =", "import json import logging import math import re from contextlib", "session_destruction(transfer_session): if transfer_session.records_total == 0: logger.info(\"There are no records to", "morango.sync.controller import MorangoProfileController from ..utils import create_superuser_and_provision_device from ..utils import", "dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id] if", "morango.models import Filter from morango.models import InstanceIDModel from morango.models import", "with Kolibri Data Portal or another Kolibri device.\" def add_arguments(self,", "allow server timeout since remotely integrating data can take a", "\"\"\" @run_once def session_creation(transfer_session): \"\"\" A session is created individually", "means device is trying to sync with itself, which we", "signal_group.completed.connect(stats) def _queueing_tracker_adapter( self, signal_group, message, sync_state, noninteractive ): \"\"\"", "noninteractive: bool :type dataset_id: str \"\"\" sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel)", "signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type sync_state: str :type noninteractive:", "\"Remotely integrating data\", State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating push", "not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else: # single-user sync", "transfer session\", ) with self._lock(): if not user_id: # full-facility", "( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"],", "\"\"\" Override parent progress update callback to report from the", "pushing/pulling signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type sync_state:", "client_cert, server_cert, user_id=user_id, ) if not no_provision: with self._lock(): if", "0: logger.info(\"There are no records to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction)", "\"Device can not sync with itself. Please recheck base URL", "self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has been cancelled.\") return network_connection.close() if self.job:", "to server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # if", "cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False) with db_lock(): yield if self.job: self.job.save_as_cancellable(cancellable=cancellable)", "== 0: logger.info(\"There are no records to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation)", ") ) # get primary partition scope_params = json.loads(client_cert.scope_params) dataset_id", "the user ID of the account to be synced\", )", "progress update callback to report from the progress tracker we're", "user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() with self._lock(): sync_client.finalize() def _handle_push(", "def stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\" :type transfer_session: morango.models.core.TransferSession \"\"\"", "signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) # log one more time at end", "own data to server if not no_push: self._handle_push( sync_session_client, noninteractive,", "be synced\", ) parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do not create a", "the progress tracker we're sent \"\"\" if self.job: self.job.update_progress(progress_fraction, 1.0)", "and the request # could timeout. In that case, we'll", "extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ), ) if noninteractive or tracker.progressbar", "= scope_params[\"dataset_id\"] # check if the server already has a", "pull transfer session\", ) if not user_id: # full-facility sync", "session\", ) if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else:", "sync_client.finalize() def _handle_push( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id,", "action=\"store_true\", help=\"Do not pull data from the server\" ) parser.add_argument(", "import get_client_and_server_certs from ..utils import get_dataset_id from ..utils import get_single_user_sync_filter", "= get_baseurl(baseurl) # call this in case user directly syncs", "ID must be a 32-character UUID (no dashes)\") dataset_id =", "provision_single_user_device from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import ScopeDefinitions from", "message, sync_state, noninteractive ): \"\"\" Attaches a signal handler to", "get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive ) client_cert, server_cert, username = get_client_and_server_certs(", "_raise_cancel(self, *args, **kwargs): if self.is_cancelled() and (not self.job or self.job.cancellable):", "facility data with Kolibri Data Portal or another Kolibri device.\"", "stats_msg(transfer_session): transfer_total = ( transfer_session.bytes_sent + transfer_session.bytes_received ) return message.format(", "dataset_id, client_cert, server_cert, user_id=user_id, ) # and push our own", "server timeout since remotely integrating data can take a while", "sync with itself. Please recheck base URL and try again.\"", "handler to queuing/dequeuing signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str", "json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"] # check if the server already", "username, password, dataset_id, network_connection, noninteractive=noninteractive, ) logger.info(\"Syncing has been initiated", "type=str, help=\"ID of facility to sync\" ) parser.add_argument( \"--baseurl\", type=str,", "# check for the certs we own for the specific", "import ScopeDefinition from morango.sync.controller import MorangoProfileController from ..utils import create_superuser_and_provision_device", "from kolibri.core.tasks.exceptions import UserCancelledError from kolibri.core.tasks.management.commands.base import AsyncCommand from kolibri.core.utils.lock", "primary partition scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"] # check", "self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has been completed.\") @contextmanager def _lock(self):", "get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() with self._lock(): sync_client.finalize()", "case, we'll assume everything is good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction,", "kolibri.core.auth.constants.morango_sync import State from kolibri.core.auth.management.utils import get_facility from kolibri.core.auth.management.utils import", ") else: # do P2P setup dataset_id = get_dataset_id( baseurl,", ") return message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session): logger.info(stats_msg(transfer_session))", "server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # if instance_ids", "noninteractive: bool \"\"\" tracker = self.start_progress(total=2) def started(transfer_session): dataset_cache.clear() if", "self.job.save_as_cancellable(cancellable=False) # allow server timeout since remotely integrating data can", ") filt = get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run()", "progress): \"\"\" Override parent progress update callback to report from", "baseurl, identifier=facility_id, noninteractive=True ) client_cert, server_cert, username = get_client_and_server_certs( username,", ":type sync_state: str :type noninteractive: bool \"\"\" tracker = self.start_progress(total=2)", "this in case user directly syncs without migrating database if", ") parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int, default=500, help=\"Chunk size of", "provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive ) except UserCancelledError: if", "def _update_all_progress(self, progress_fraction, progress): \"\"\" Override parent progress update callback", "morango.models import ScopeDefinition from morango.sync.controller import MorangoProfileController from ..utils import", "ScopeDefinitions from kolibri.core.auth.constants.morango_sync import State from kolibri.core.auth.management.utils import get_facility from", "logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self, signal_group, message, sync_state, noninteractive", "trying to sync with itself, which we don't allow if", "transfer_total = ( transfer_session.bytes_sent + transfer_session.bytes_received ) return message.format( records_transferred=transfer_session.records_transferred,", "own a certificate for Facility: {}\".format( facility.name ) ) #", "parser.add_argument( \"--user\", type=str, help=\"for single-user syncing, the user ID of", "State.PUSHING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating data\", State.REMOTE_DEQUEUING, noninteractive,", "for this facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) #", "facility client_cert = ( facility.dataset.get_owned_certificates() .filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY) .first() ) if not", "database if not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() # try", "certificate, and the other a full-facility certificate.\" ) elif PORTAL_SYNC:", ":type noninteractive: bool :type dataset_id: str \"\"\" sync_client = sync_session_client.get_pull_client()", "no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) #", "a 32-character UUID (no dashes)\") dataset_id = get_dataset_id( baseurl, identifier=facility_id,", "a single-user certificate, and the other a full-facility certificate.\" )", "self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args, **kwargs): if self.is_cancelled() and (not self.job", "server_cert.scope_definition_id] if len(set(scopes)) != 2: raise CommandError( \"To do a", "itself, which we don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"]", "signal handler to queuing/dequeuing signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message:", "network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has been completed.\")", "( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id,", "full-facility sync sync_client.initialize(Filter(dataset_id)) else: # single-user sync client_is_single_user = (", "Attaches a signal handler to session creation signals :type signal_group:", "push our own data to server if not no_push: self._handle_push(", "\"\"\" tracker = self.start_progress(total=100) def stats_msg(transfer_session): transfer_total = ( transfer_session.bytes_sent", "dataset_cache.clear() dataset_cache.activate() # try to connect to server controller =", "State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive,", "from kolibri.core.auth.models import dataset_cache from kolibri.core.logger.utils.data import bytes_for_humans from kolibri.core.tasks.exceptions", "for the certs we own for the specific facility client_cert", "parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def handle_async(self, *args, **options): # noqa C901", "from kolibri.core.auth.constants.morango_sync import State from kolibri.core.auth.management.utils import get_facility from kolibri.core.auth.management.utils", "signal_group, started_msg, completed_msg): \"\"\" Attaches a signal handler to session", "if transfer_session.records_total == 0: logger.info(\"There are no records to transfer\")", "server_cert, user_id, ): \"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive: bool", "device must have a single-user certificate, and the other a", "get_single_user_sync_filter( dataset_id, user_id, is_read=not client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() # we", "@run_once def session_destruction(transfer_session): if transfer_session.records_total == 0: logger.info(\"There are no", "if necessary, push a cert up to the server server_cert", "we can't cancel remotely integrating data if self.job: self.job.save_as_cancellable(cancellable=False) #", "noninteractive: bool \"\"\" tracker = self.start_progress(total=100) def stats_msg(transfer_session): transfer_total =", "if noninteractive or tracker.progressbar is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) #", "server we are syncing with\", ) parser.add_argument( \"--user\", type=str, help=\"for", "can take a while and the request # could timeout.", "# if necessary, push a cert up to the server", "db_lock(): yield if self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args, **kwargs): if", "user_id: # it's a single-user sync if not facility_id: raise", "single-user sync, one device must have a single-user certificate, and", "sync if not facility_id: raise CommandError( \"Facility ID must be", "re from contextlib import contextmanager from django.core.management import call_command from", "CommandError from morango.models import Filter from morango.models import InstanceIDModel from", "= self.start_progress(total=2) def started(transfer_session): dataset_cache.clear() if noninteractive or tracker.progressbar is", "a cert up to the server server_cert = ( server_certs[0]", "in order to do single-user syncing\" ) if not re.match(\"[a-f0-9]{32}\",", "help=\"Chunk size of records to send/retrieve per request\", ) parser.add_argument(", "data to the server\" ) parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do not", "@run_once def session_creation(transfer_session): \"\"\" A session is created individually for", "data to send\", State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data", "sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing data to", ":type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type sync_state: str :type", "noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating received data\", State.LOCAL_DEQUEUING, noninteractive,", "logging.getLogger(__name__) class Command(AsyncCommand): help = \"Allow the syncing of facility", "ScopeDefinition from morango.sync.controller import MorangoProfileController from ..utils import create_superuser_and_provision_device from", "not pull data from the server\" ) parser.add_argument( \"--username\", type=str,", "raise CommandError( \"This device does not own a certificate for", "be a 32-character UUID (no dashes)\") dataset_id = get_dataset_id( baseurl,", "facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if necessary,", "def session_destruction(transfer_session): if transfer_session.records_total == 0: logger.info(\"There are no records", "not no_push: self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, )", "not own a certificate for Facility: {}\".format( facility.name ) )", "is_read=not client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() # we can't cancel remotely", "sync\" ) parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\")", "progress = ( 100 * transfer_session.records_transferred / float(transfer_session.records_total) ) tracker.update_progress(", "_queueing_tracker_adapter( self, signal_group, message, sync_state, noninteractive ): \"\"\" Attaches a", "output signal_group.completed.connect(stats) def _queueing_tracker_adapter( self, signal_group, message, sync_state, noninteractive ):", "sync_client.initialize(Filter(dataset_id)) else: # single-user sync client_is_single_user = ( client_cert.scope_definition_id ==", "sync_state=sync_state, ), ) if noninteractive or tracker.progressbar is None: signal_group.started.connect(stats)", "= [client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes)) != 2: raise CommandError( \"To", "initiated (this may take a while)...\") sync_session_client = network_connection.create_sync_session( client_cert,", "= get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive ) client_cert, server_cert, username =", "\"Remotely preparing data\", State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data", "creation signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg: str :type completed_msg:", "self._session_tracker_adapter( sync_client.signals.session, \"Creating pull transfer session\", \"Completed pull transfer session\",", "json import logging import math import re from contextlib import", "get_client_and_server_certs( username, password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes =", "from kolibri.core.utils.lock import db_lock from kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL =", "sync_state: str :type noninteractive: bool \"\"\" tracker = self.start_progress(total=2) def", "or tracker.progressbar is None: logger.info(message) def handler(transfer_session): tracker.update_progress( message=message, extra_data=dict(sync_state=sync_state)", "\"Creating push transfer session\", \"Completed push transfer session\", ) with", "**options): # noqa C901 ( baseurl, facility_id, chunk_size, username, password,", "on server we are syncing with\", ) parser.add_argument( \"--password\", type=str,", "# it's a single-user sync if not facility_id: raise CommandError(", "if self.is_cancelled() and (not self.job or self.job.cancellable): raise UserCancelledError() def", "certs we own for the specific facility client_cert = (", "been completed.\") @contextmanager def _lock(self): cancellable = False # job", "\"Completed pull transfer session\", ) if not user_id: # full-facility", "from kolibri.core.auth.management.utils import get_facility from kolibri.core.auth.management.utils import run_once from kolibri.core.auth.models", "user_id): raise CommandError(\"User ID must be a 32-character UUID (no", "client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, user_id=user_id,", "session is created individually for pushing and pulling \"\"\" logger.info(started_msg)", "from server if not no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert,", "False # job can't be cancelled while locked if self.job:", "with\", ) parser.add_argument( \"--user\", type=str, help=\"for single-user syncing, the user", "options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL", "from kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total},", ") parser.add_argument( \"--password\", type=str, help=\"password of superuser or facility admin", ") scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id] if len(set(scopes)) != 2: raise", "import run_once from kolibri.core.auth.models import dataset_cache from kolibri.core.logger.utils.data import bytes_for_humans", "server already has a cert for this facility server_certs =", ") with self._lock(): if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id))", "_session_tracker_adapter(self, signal_group, started_msg, completed_msg): \"\"\" Attaches a signal handler to", "may take a while)...\") sync_session_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size", "signal_group, message, sync_state, noninteractive ): \"\"\" Attaches a signal handler", "logging output signal_group.completed.connect(stats) def _queueing_tracker_adapter( self, signal_group, message, sync_state, noninteractive", "import get_dataset_id from ..utils import get_single_user_sync_filter from ..utils import provision_single_user_device", "if not client_cert: raise CommandError( \"This device does not own", "not re.match(\"[a-f0-9]{32}\", user_id): raise CommandError(\"User ID must be a 32-character", "self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session): if transfer_session.records_total == 0: logger.info(\"There are", "sync_client.signals.session, \"Creating pull transfer session\", \"Completed pull transfer session\", )", "\"\"\" logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session): if transfer_session.records_total", "no_provision, ) = ( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"],", "self.job.save_meta() dataset_cache.deactivate() logger.info(\"Syncing has been completed.\") @contextmanager def _lock(self): cancellable", "# full-facility sync sync_client.initialize(Filter(dataset_id)) else: # single-user sync client_is_single_user =", "sync sync_client.initialize(Filter(dataset_id)) else: # single-user sync client_is_single_user = ( client_cert.scope_definition_id", "has been initiated (this may take a while)...\") sync_session_client =", "and push our own data to server if not no_push:", "while)...\") sync_session_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size ) try: #", "self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) # and", "cancel remotely integrating data if self.job: self.job.save_as_cancellable(cancellable=False) # allow server", "dataset_cache from kolibri.core.logger.utils.data import bytes_for_humans from kolibri.core.tasks.exceptions import UserCancelledError from", "InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ): raise CommandError( \"Device can not sync", "help=\"for single-user syncing, the user ID of the account to", "noninteractive, dataset_id, client_cert, server_cert, user_id, ): \"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient", "call this in case user directly syncs without migrating database", "noninteractive=noninteractive ) client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id,", "= get_client_and_server_certs( username, password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes", "\"This device does not own a certificate for Facility: {}\".format(", "Kolibri Data Portal or another Kolibri device.\" def add_arguments(self, parser):", "sent \"\"\" if self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self,", "str \"\"\" @run_once def session_creation(transfer_session): \"\"\" A session is created", "order to do single-user syncing\" ) if not re.match(\"[a-f0-9]{32}\", user_id):", "( 100 * transfer_session.records_transferred / float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress -", "the syncing of facility data with Kolibri Data Portal or", ") parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do not push data to the", "Data Portal or another Kolibri device.\" def add_arguments(self, parser): parser.add_argument(", "== ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id, is_read=client_is_single_user )", "\"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive: bool :type dataset_id: str", "data if self.job: self.job.save_as_cancellable(cancellable=False) # allow server timeout since remotely", "str :type completed_msg: str \"\"\" @run_once def session_creation(transfer_session): \"\"\" A", "\"--user\", type=str, help=\"for single-user syncing, the user ID of the", "full-facility certificate.\" ) elif PORTAL_SYNC: # do portal sync setup", "A session is created individually for pushing and pulling \"\"\"", "get_facility from kolibri.core.auth.management.utils import run_once from kolibri.core.auth.models import dataset_cache from", "Portal or another Kolibri device.\" def add_arguments(self, parser): parser.add_argument( \"--facility\",", "type=int, default=500, help=\"Chunk size of records to send/retrieve per request\",", "= sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing data\", State.REMOTE_QUEUING,", "morango.sync.syncsession.SyncSignalGroup :type message: str :type sync_state: str :type noninteractive: bool", "scope_params=scope_params, ) ) else: # do P2P setup dataset_id =", "if noninteractive or tracker.progressbar is None: logger.info(message) def handler(transfer_session): tracker.update_progress(", "contextmanager from django.core.management import call_command from django.core.management.base import CommandError from", "# try to connect to server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection", "= False # job can't be cancelled while locked if", "# job can't be cancelled while locked if self.job: cancellable", "options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC =", "individually for pushing and pulling \"\"\" logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION)", "has been cancelled.\") return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate()", "records to transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self, signal_group,", "== DATA_PORTAL_SYNCING_BASE_URL # validate url that is passed in if", "# do P2P setup dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive", "self.job.save_as_cancellable(cancellable=False) with db_lock(): yield if self.job: self.job.save_as_cancellable(cancellable=cancellable) def _raise_cancel(self, *args,", "morango.models.core.TransferSession \"\"\" progress = ( 100 * transfer_session.records_transferred / float(transfer_session.records_total)", "message.format( records_transferred=transfer_session.records_transferred, records_total=transfer_session.records_total, transfer_total=bytes_for_humans(transfer_total), ) def stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session):", "\"--no-pull\", action=\"store_true\", help=\"Do not pull data from the server\" )", "facility_id, chunk_size, username, password, user_id, no_push, no_pull, noninteractive, no_provision, )", "not PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call this in case", "account to be synced\", ) parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do not", "help=\"password of superuser or facility admin on server we are", "..utils import get_baseurl from ..utils import get_client_and_server_certs from ..utils import", "not no_provision: with self._lock(): if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username,", "\"\"\" sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing data", "noninteractive, no_provision, ) = ( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"],", "def started(transfer_session): dataset_cache.clear() if noninteractive or tracker.progressbar is None: logger.info(message)", "transfer_session.records_transferred / float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session), extra_data=dict(", "PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import ScopeDefinitions from kolibri.core.auth.constants.morango_sync import State from", "self.start_progress(total=100) def stats_msg(transfer_session): transfer_total = ( transfer_session.bytes_sent + transfer_session.bytes_received )", "transfer\") logger.info(completed_msg) signal_group.started.connect(session_creation) signal_group.completed.connect(session_destruction) def _transfer_tracker_adapter( self, signal_group, message, sync_state,", "\"--no-provision\", action=\"store_true\", help=\"do not create a facility and temporary superuser\",", "P2P setup dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive ) client_cert,", "logger.info(\"Syncing has been cancelled.\") return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta()", "no_provision: with self._lock(): if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id,", "from ..utils import get_single_user_sync_filter from ..utils import provision_single_user_device from kolibri.core.auth.constants.morango_sync", "syncing with\", ) parser.add_argument( \"--password\", type=str, help=\"password of superuser or", "network_connection = controller.create_network_connection(baseurl) # if instance_ids are equal, this means", ".first() ) if not client_cert: raise CommandError( \"This device does", "..utils import get_single_user_sync_filter from ..utils import provision_single_user_device from kolibri.core.auth.constants.morango_sync import", "server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) )", "dataset_id, client_cert, server_cert, user_id, ): \"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient :type", "\"{records_transferred}/{records_total}, {transfer_total}\" logger = logging.getLogger(__name__) class Command(AsyncCommand): help = \"Allow", "integrating data can take a while and the request #", "noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) if not no_provision: with", "dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int, default=500, help=\"Chunk size", "on server we are syncing with\", ) parser.add_argument( \"--user\", type=str,", "signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg: str :type completed_msg: str \"\"\" @run_once", "*args, **kwargs): if self.is_cancelled() and (not self.job or self.job.cancellable): raise", ") if noninteractive or tracker.progressbar is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler)", "if not PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call this in", "MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # if instance_ids are equal, this", "bool \"\"\" tracker = self.start_progress(total=2) def started(transfer_session): dataset_cache.clear() if noninteractive", "itself. Please recheck base URL and try again.\" ) if", "else: # do P2P setup dataset_id = get_dataset_id( baseurl, identifier=facility_id,", "= json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"] # check if the server", "URL and try again.\" ) if user_id: # it's a", "morango.sync.syncsession.SyncSignalGroup :type started_msg: str :type completed_msg: str \"\"\" @run_once def", "import ScopeDefinitions from kolibri.core.auth.constants.morango_sync import State from kolibri.core.auth.management.utils import get_facility", "(no dashes)\") dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=True ) client_cert,", "parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\", type=int, default=500, help=\"Chunk size of records", "from morango.models import InstanceIDModel from morango.models import ScopeDefinition from morango.sync.controller", "# if instance_ids are equal, this means device is trying", "log one more time at end to capture in logging", "kolibri.core.tasks.management.commands.base import AsyncCommand from kolibri.core.utils.lock import db_lock from kolibri.utils import", "raise CommandError( \"Device can not sync with itself. Please recheck", "try to connect to server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection =", "({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally integrating received data\",", "with self._lock(): if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else:", ") parser.add_argument( \"--username\", type=str, help=\"username of superuser or facility admin", "import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import ScopeDefinitions from kolibri.core.auth.constants.morango_sync import State", "cancelled while locked if self.job: cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False) with", "raise UserCancelledError() def _handle_pull( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert,", "from kolibri.core.auth.management.utils import run_once from kolibri.core.auth.models import dataset_cache from kolibri.core.logger.utils.data", "from kolibri.core.tasks.management.commands.base import AsyncCommand from kolibri.core.utils.lock import db_lock from kolibri.utils", "necessary, push a cert up to the server server_cert =", "kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\"", "str \"\"\" sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing", "conf DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\" logger =", "raise CommandError( \"To do a single-user sync, one device must", "# parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def handle_async(self, *args, **options): # noqa", "pull transfer session\", \"Completed pull transfer session\", ) if not", "session\", \"Completed push transfer session\", ) with self._lock(): if not", "import AsyncCommand from kolibri.core.utils.lock import db_lock from kolibri.utils import conf", "sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Locally preparing data to send\", State.LOCAL_QUEUING,", "def _session_tracker_adapter(self, signal_group, started_msg, completed_msg): \"\"\" Attaches a signal handler", "\"Allow the syncing of facility data with Kolibri Data Portal", "to be synced\", ) parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do not create", "== ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter( dataset_id, user_id, is_read=not client_is_single_user", "options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"],", "if self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def _session_tracker_adapter(self, signal_group, started_msg,", "self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id, ): \"\"\" :type", "\"Creating pull transfer session\", \"Completed pull transfer session\", ) if", ") self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating data\", State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter(", ") parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do not create a facility and", "we don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ): raise", "self.job: self.job.save_as_cancellable(cancellable=False) # allow server timeout since remotely integrating data", "parser.add_argument( \"--facility\", action=\"store\", type=str, help=\"ID of facility to sync\" )", "= conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\" logger = logging.getLogger(__name__) class", "UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has been cancelled.\") return", "assume everything is good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction, progress): \"\"\"", "network_connection.server_info[\"instance_id\"] ): raise CommandError( \"Device can not sync with itself.", "# could timeout. In that case, we'll assume everything is", "server we are syncing with\", ) parser.add_argument( \"--password\", type=str, help=\"password", "create a facility and temporary superuser\", ) # parser.add_argument(\"--scope-id\", type=str,", "# log one more time at end to capture in", "noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating pull transfer session\", \"Completed pull", "( baseurl, facility_id, chunk_size, username, password, user_id, no_push, no_pull, noninteractive,", "\"Facility ID must be specified in order to do single-user", "device.\" def add_arguments(self, parser): parser.add_argument( \"--facility\", action=\"store\", type=str, help=\"ID of", "partition scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params[\"dataset_id\"] # check if", "is_read=client_is_single_user ) sync_client.initialize(Filter(filt)) sync_client.run() with self._lock(): sync_client.finalize() def _handle_push( self,", "create_superuser_and_provision_device from ..utils import get_baseurl from ..utils import get_client_and_server_certs from", "controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # if instance_ids are", "raise CommandError( \"Facility ID must be specified in order to", "elif PORTAL_SYNC: # do portal sync setup facility = get_facility(", "self, signal_group, message, sync_state, noninteractive ): \"\"\" Attaches a signal", "syncs without migrating database if not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear()", "url that is passed in if not PORTAL_SYNC: baseurl =", "message=message, extra_data=dict(sync_state=sync_state) ) if noninteractive or tracker.progressbar is None: signal_group.started.connect(started)", ") except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info(\"Syncing has been", "session_creation(transfer_session): \"\"\" A session is created individually for pushing and", "have a single-user certificate, and the other a full-facility certificate.\"", "facility = get_facility( facility_id=facility_id, noninteractive=noninteractive ) # check for the", "if not ScopeDefinition.objects.filter(): call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() # try to", "help=\"username of superuser or facility admin on server we are", "specified in order to do single-user syncing\" ) if not", ":type dataset_id: str \"\"\" sync_client = sync_session_client.get_push_client() sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing,", ") def stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\" :type transfer_session: morango.models.core.TransferSession", "\"\"\" :type transfer_session: morango.models.core.TransferSession \"\"\" progress = ( 100 *", "baseurl, identifier=facility_id, noninteractive=noninteractive ) client_cert, server_cert, username = get_client_and_server_certs( username,", "import provision_single_user_device from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import ScopeDefinitions", "completed_msg: str \"\"\" @run_once def session_creation(transfer_session): \"\"\" A session is", "the server already has a cert for this facility server_certs", "a facility and temporary superuser\", ) # parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY)", "dataset_id, client_cert, server_cert, user_id=user_id, ) if not no_provision: with self._lock():", "session creation signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg: str :type", "import re from contextlib import contextmanager from django.core.management import call_command", "sync, one device must have a single-user certificate, and the", "(this may take a while)...\") sync_session_client = network_connection.create_sync_session( client_cert, server_cert,", "sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) # and push", "): raise CommandError( \"Device can not sync with itself. Please", "self.job or self.job.cancellable): raise UserCancelledError() def _handle_pull( self, sync_session_client, noninteractive,", "a full-facility certificate.\" ) elif PORTAL_SYNC: # do portal sync", "pull from server if not no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id,", "server server_cert = ( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert,", "from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA from kolibri.core.auth.constants.morango_sync import ScopeDefinitions from kolibri.core.auth.constants.morango_sync", "not no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, )", "sync_client.signals.dequeuing, \"Remotely integrating data\", State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating", "the account to be synced\", ) parser.add_argument( \"--no-provision\", action=\"store_true\", help=\"do", "django.core.management import call_command from django.core.management.base import CommandError from morango.models import", "AsyncCommand from kolibri.core.utils.lock import db_lock from kolibri.utils import conf DATA_PORTAL_SYNCING_BASE_URL", "parser): parser.add_argument( \"--facility\", action=\"store\", type=str, help=\"ID of facility to sync\"", "dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive ) client_cert, server_cert, username", "CommandError(\"User ID must be a 32-character UUID (no dashes)\") dataset_id", "logging import math import re from contextlib import contextmanager from", "/ float(transfer_session.records_total) ) tracker.update_progress( increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent,", "logger.info(started_msg) if self.job: self.job.extra_metadata.update(sync_state=State.SESSION_CREATION) @run_once def session_destruction(transfer_session): if transfer_session.records_total ==", "client_is_single_user = ( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt = get_single_user_sync_filter(", "help=\"ID of facility to sync\" ) parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL,", "action=\"store_true\", help=\"do not create a facility and temporary superuser\", )", "default=FULL_FACILITY) def handle_async(self, *args, **options): # noqa C901 ( baseurl,", "# we can't cancel remotely integrating data if self.job: self.job.save_as_cancellable(cancellable=False)", "could timeout. In that case, we'll assume everything is good.", "noninteractive=True ) client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id,", ") # check for the certs we own for the", "type=str, help=\"password of superuser or facility admin on server we", "validate url that is passed in if not PORTAL_SYNC: baseurl", "facility_id: raise CommandError( \"Facility ID must be specified in order", "= get_facility( facility_id=facility_id, noninteractive=noninteractive ) # check for the certs", "integrating data\", State.REMOTE_DEQUEUING, noninteractive, ) self._session_tracker_adapter( sync_client.signals.session, \"Creating push transfer", "sync_client.initialize(Filter(filt)) sync_client.run() with self._lock(): sync_client.finalize() def _handle_push( self, sync_session_client, noninteractive,", "from ..utils import create_superuser_and_provision_device from ..utils import get_baseurl from ..utils", "noninteractive ): \"\"\" Attaches a signal handler to pushing/pulling signals", "create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive ) except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED)", "single-user sync client_is_single_user = ( client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER ) filt", "\"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating", "device is trying to sync with itself, which we don't", "push transfer session\", ) with self._lock(): if not user_id: #", "to capture in logging output signal_group.completed.connect(stats) def _queueing_tracker_adapter( self, signal_group,", "TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\" logger = logging.getLogger(__name__) class Command(AsyncCommand): help", "session\", ) with self._lock(): if not user_id: # full-facility sync", "self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing,", "if user_id: # it's a single-user sync if not facility_id:", "send/retrieve per request\", ) parser.add_argument( \"--no-push\", action=\"store_true\", help=\"Do not push", "has a cert for this facility server_certs = network_connection.get_remote_certificates( dataset_id,", "a signal handler to pushing/pulling signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type", "transfer_session: morango.models.core.TransferSession \"\"\" progress = ( 100 * transfer_session.records_transferred /", "import dataset_cache from kolibri.core.logger.utils.data import bytes_for_humans from kolibri.core.tasks.exceptions import UserCancelledError", "import math import re from contextlib import contextmanager from django.core.management", "admin on server we are syncing with\", ) parser.add_argument( \"--user\",", "preparing data\", State.REMOTE_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE),", "..utils import get_dataset_id from ..utils import get_single_user_sync_filter from ..utils import", "noninteractive=noninteractive ) # check for the certs we own for", ") ) else: # do P2P setup dataset_id = get_dataset_id(", "*args, **options): # noqa C901 ( baseurl, facility_id, chunk_size, username,", "import InstanceIDModel from morango.models import ScopeDefinition from morango.sync.controller import MorangoProfileController", "32-character UUID (no dashes)\") dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=True", "stats(transfer_session): logger.info(stats_msg(transfer_session)) def handler(transfer_session): \"\"\" :type transfer_session: morango.models.core.TransferSession \"\"\" progress", "def _handle_push( self, sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id, ):", "chunk_size, username, password, user_id, no_push, no_pull, noninteractive, no_provision, ) =", "parser.add_argument( \"--chunk-size\", type=int, default=500, help=\"Chunk size of records to send/retrieve", "baseurl, facility_id, chunk_size, username, password, user_id, no_push, no_pull, noninteractive, no_provision,", "sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id, ): \"\"\" :type sync_session_client:", "be cancelled while locked if self.job: cancellable = self.job.cancellable self.job.save_as_cancellable(cancellable=False)", ":type signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg: str :type completed_msg: str \"\"\"", "server_cert, chunk_size=chunk_size ) try: # pull from server if not", "# validate url that is passed in if not PORTAL_SYNC:", "case user directly syncs without migrating database if not ScopeDefinition.objects.filter():", "parser.add_argument( \"--baseurl\", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest=\"baseurl\" ) parser.add_argument(\"--noninteractive\", action=\"store_true\") parser.add_argument( \"--chunk-size\",", "message: str :type sync_state: str :type noninteractive: bool \"\"\" tracker", "temporary superuser\", ) # parser.add_argument(\"--scope-id\", type=str, default=FULL_FACILITY) def handle_async(self, *args,", "sync_client.signals.transferring, \"Receiving data ({})\".format(TRANSFER_MESSAGE), State.PULLING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Locally", "no_pull, noninteractive, no_provision, ) = ( options[\"baseurl\"], options[\"facility\"], options[\"chunk_size\"], options[\"username\"],", ") # if necessary, push a cert up to the", "def _transfer_tracker_adapter( self, signal_group, message, sync_state, noninteractive ): \"\"\" Attaches", ") self._session_tracker_adapter( sync_client.signals.session, \"Creating pull transfer session\", \"Completed pull transfer", "is None: signal_group.started.connect(stats) signal_group.in_progress.connect(stats) signal_group.connect(handler) # log one more time", "sync_client.initialize(Filter(filt)) sync_client.run() # we can't cancel remotely integrating data if", "user_id, ): \"\"\" :type sync_session_client: morango.sync.syncsession.SyncSessionClient :type noninteractive: bool :type", "if len(set(scopes)) != 2: raise CommandError( \"To do a single-user", "options[\"noninteractive\"], options[\"no_provision\"], ) PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate", "network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) ) else: # do P2P", "end to capture in logging output signal_group.completed.connect(stats) def _queueing_tracker_adapter( self,", "is good. sync_client.finalize(allow_server_timeout=True) def _update_all_progress(self, progress_fraction, progress): \"\"\" Override parent", "to session creation signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type started_msg: str", "we're sent \"\"\" if self.job: self.job.update_progress(progress_fraction, 1.0) self.job.extra_metadata.update(progress.extra_data) self.job.save_meta() def", "conf.OPTIONS[\"Urls\"][\"DATA_PORTAL_SYNCING_BASE_URL\"] TRANSFER_MESSAGE = \"{records_transferred}/{records_total}, {transfer_total}\" logger = logging.getLogger(__name__) class Command(AsyncCommand):", "setup dataset_id = get_dataset_id( baseurl, identifier=facility_id, noninteractive=noninteractive ) client_cert, server_cert,", "def _lock(self): cancellable = False # job can't be cancelled", "session\", \"Completed pull transfer session\", ) if not user_id: #", "a certificate for Facility: {}\".format( facility.name ) ) # get", "else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, ) ) else: # do", "sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter( sync_client.signals.queuing, \"Remotely preparing data\", State.REMOTE_QUEUING, noninteractive, )", "self._transfer_tracker_adapter( sync_client.signals.transferring, \"Sending data ({})\".format(TRANSFER_MESSAGE), State.PUSHING, noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing,", "noninteractive, ) self._queueing_tracker_adapter( sync_client.signals.dequeuing, \"Remotely integrating data\", State.REMOTE_DEQUEUING, noninteractive, )", "\"\"\" Attaches a signal handler to pushing/pulling signals :type signal_group:", "to queuing/dequeuing signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type message: str :type", "and (not self.job or self.job.cancellable): raise UserCancelledError() def _handle_pull( self,", "kolibri.core.auth.management.utils import get_facility from kolibri.core.auth.management.utils import run_once from kolibri.core.auth.models import", "cancellable = False # job can't be cancelled while locked", "\"Locally preparing data to send\", State.LOCAL_QUEUING, noninteractive, ) self._transfer_tracker_adapter( sync_client.signals.transferring,", ":type dataset_id: str \"\"\" sync_client = sync_session_client.get_pull_client() sync_client.signals.queuing.connect(self._raise_cancel) sync_client.signals.transferring.connect(self._raise_cancel) self._queueing_tracker_adapter(", "self._lock(): if not user_id: # full-facility sync sync_client.initialize(Filter(dataset_id)) else: #", "which we don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info[\"instance_id\"] ):", "password, user_id, no_push, no_pull, noninteractive, no_provision, ) = ( options[\"baseurl\"],", "options[\"facility\"], options[\"chunk_size\"], options[\"username\"], options[\"password\"], options[\"user\"], options[\"no_push\"], options[\"no_pull\"], options[\"noninteractive\"], options[\"no_provision\"], )", "dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY ) # if necessary, push a cert up", "do portal sync setup facility = get_facility( facility_id=facility_id, noninteractive=noninteractive )", "signal handler to session creation signals :type signal_group: morango.sync.syncsession.SyncSignalGroup :type", "parser.add_argument( \"--no-pull\", action=\"store_true\", help=\"Do not pull data from the server\"", "tracker.update_progress( increment=math.ceil(progress - tracker.progress), message=stats_msg(transfer_session), extra_data=dict( bytes_sent=transfer_session.bytes_sent, bytes_received=transfer_session.bytes_received, sync_state=sync_state, ),", "= logging.getLogger(__name__) class Command(AsyncCommand): help = \"Allow the syncing of", "call_command(\"loaddata\", \"scopedefinitions\") dataset_cache.clear() dataset_cache.activate() # try to connect to server", "must be specified in order to do single-user syncing\" )", "handler(transfer_session): \"\"\" :type transfer_session: morango.models.core.TransferSession \"\"\" progress = ( 100", "_update_all_progress(self, progress_fraction, progress): \"\"\" Override parent progress update callback to", "this means device is trying to sync with itself, which" ]
[ "= (w // 2 + 4, h // 2 +", "= 275 if __name__ == \"__main__\": cap = cv.VideoCapture('samples/delta.mp4') if", "X = ((x - o1) * R) / r Y", "len(approx) == 4 and 75 < area < 200: smooth_contours.append(contours[i])", "hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours = list(filter(lambda x: 50", "9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img = cv.morphologyEx(img, cv.MORPH_OPEN,", "not cap.isOpened(): raise IOError(\"Video was not opened!\") mse = 0", "cv.cvtColor(img, cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)", "[x, y + h], [x + w, y + h]]", "= cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols = img.shape img = cv.adaptiveThreshold(img,", "0, 255), 2) # res, mask = cv.threshold(frame_copy, 0, 255,", "(3, 3)) img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img,", "// 10, True) # corners = np.vstack(corners) dst = np.float32([[0,", "reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4', fps=fps) while True: res, frame =", "cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols = img.shape img = cv.adaptiveThreshold(img, 255,", "transformer def fix_rotation(img): img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)", "kernel) img = cv.medianBlur(img, 3) contours, hierarchy = cv.findContours(img, cv.RETR_LIST,", "= img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img, 255,", "1 cv.circle(img_copy, origin, 4, (0, 0, 255), -1) # cv.line(img_copy,", "raise IOError(\"Video was not opened!\") mse = 0 count =", "[407, 293]]) dst = np.float32([[0, 0], [w, 0], [30, h],", "= cv.getPerspectiveTransform(src, dst) img = cv.warpPerspective(img, matrix, (w, h)) cv.imshow('',", "0), 2) # cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2)", "[] for i in range(len(contours)): epsilon = factor * cv.arcLength(contours[i],", "frame_copy) # cv.rectangle(frame_copy, (x, y), (x + w, y +", "= min(e1, e2, e3) if error < 10: mean_error +=", "+= 1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\", img) if cv.waitKey(30)", "y + h], [x + w, y + h]] src", "# x, y, w, h = cv.boundingRect(roi) x, y, w,", "X, Y = round(X, 2), round(Y, 2) cv.circle(img_copy, center, radius,", "cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) origin = (w //", "R) / r X, Y = round(X, 2), round(Y, 2)", "4, (0, 0, 255), -1) # cv.line(img_copy, origin, (origin[0], origin[1]),", "= cv.approxPolyDP(contours[i], epsilon, True) x, y, width, height = cv.boundingRect(approx)", "cv.medianBlur(img, 3) origin = (w // 2 + 4, h", "= 0.1 smooth_contours = [] for i in range(len(contours)): epsilon", "cv.imshow('', rotated_img) D1 = 105 D2 = 175 D3 =", "[x + w, y], [x, y + h], [x +", "img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C,", "mse += mean_error count += 1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) #", "src = np.float32([[0, 0], [w, 0], [38, 293], [407, 293]])", "res: break mean_error = 0 holes_count = 0 img =", "- o2) * R) / r X, Y = round(X,", "2 + 1 ORIGIN = (0, 0) R = 300", "= 0 img = frame.copy() cv.imshow('dfa', img) frame = cv.cvtColor(frame,", "center, radius = cv.minEnclosingCircle(approx) radius = int(radius) center = tuple(map(int,", "= np.reshape(src, (len(src), 1, 2)) # perimeter = cv.arcLength(src, True)", "255), -1) src = np.float32([[0, 0], [w, 0], [38, 293],", "kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)", "cv.minEnclosingCircle(approx) radius = int(radius) center = tuple(map(int, center)) x, y", "as cv import numpy as np import transformer def fix_rotation(img):", "y, width, height = cv.boundingRect(approx) area = width*height if len(approx)", "0, 255, 255), 1, cv.LINE_AA) e1, e2, e3 = map(lambda", "+ h), (0, 255, 0), 2) # cv.drawContours(frame_copy, roi, -1,", "img = img[y: y+h, x: x+w] img = transformer.rotate_along_axis(img, theta=40)", "= cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img", "x: 50 < cv.contourArea(x) < 175, contours)) factor = 0.1", "== 27: break print(\"E:\", mse / count, \"N:\", count) writer.close()", "# src = np.reshape(src, (len(src), 1, 2)) # perimeter =", "mm contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours = list(filter(lambda", "center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1, cv.LINE_AA) e1,", "x, y = center X = ((x - o1) *", "origin[1]), (255, 0, 255), 2) mean_error /= holes_count mse +=", "= cv.medianBlur(img, 3) contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi", "5, (0, 0, 255), -1) src = np.float32([[0, 0], [w,", "matrix, (w, h)) cv.imshow('', img) img_copy = img.copy() img =", "cv.imshow('', img) img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) img", "= cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE,", "corners = cv.approxPolyDP(src, perimeter // 10, True) # corners =", "= frame.copy() cv.imshow('dfa', img) frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy =", "contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi = max(contours,", "factor = 0.1 smooth_contours = [] for i in range(len(contours)):", "3) origin = (w // 2 + 4, h //", "matrix, (cols, rows)) cv.imshow('', rotated_img) D1 = 105 D2 =", "(0, 0, 255), -1) src = np.float32([[0, 0], [w, 0],", "img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols =", "= 115, 0, 445, 360 img = img[y: y+h, x:", "if error < 10: mean_error += error ** 2 holes_count", "range(len(contours)): epsilon = factor * cv.arcLength(contours[i], True) approx = cv.approxPolyDP(contours[i],", "0], [w, 0], [38, 293], [407, 293]]) dst = np.float32([[0,", "img = cv.medianBlur(img, 3) contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)", "h], [x + w, y + h]] src = np.float32(corners)", "- 30, h]]) matrix = cv.getPerspectiveTransform(src, dst) img = cv.warpPerspective(img,", "// 2 + 4, h // 2 + 2) o1,", "for i in range(len(contours)): epsilon = factor * cv.arcLength(contours[i], True)", "(x + w, y + h), (0, 255, 0), 2)", "= cv.approxPolyDP(src, perimeter // 10, True) # corners = np.vstack(corners)", "cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy = frame.copy() # frame = cv.adaptiveThreshold(frame, 255,", "= (0, 0) R = 300 # mm contours, hierarchy", "epsilon, True) x, y, width, height = cv.boundingRect(approx) area =", "105 D2 = 175 D3 = 275 if __name__ ==", "cap.read() if not res: break mean_error = 0 holes_count =", "# perimeter = cv.arcLength(src, True) # corners = cv.approxPolyDP(src, perimeter", "y], [x + w, y], [x, y + h], [x", "= cv.bitwise_and(frame_copy, frame_copy, mask=mask) # corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001,", "frame_copy = frame_copy[y: y+h, x: x+w] frame_copy = transformer.rotate_along_axis(frame_copy, theta=40)", "\"__main__\": cap = cv.VideoCapture('samples/delta.mp4') if not cap.isOpened(): raise IOError(\"Video was", "cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) # kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE,", "2) cv.circle(img_copy, center, radius, (0, 255, 0), 2) cv.putText(img_copy, str((X,", "holes_count += 1 cv.circle(img_copy, origin, 4, (0, 0, 255), -1)", "= width*height if len(approx) == 4 and 75 < area", "cv.THRESH_BINARY_INV, 15, 9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img =", "not opened!\") mse = 0 count = 0 reader =", "2) # res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY) #", "Y = round(X, 2), round(Y, 2) cv.circle(img_copy, center, radius, (0,", "= 0 count = 0 reader = imageio.get_reader('samples/delta.mp4') fps =", "cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) # frame = cv.medianBlur(frame, 3) # contours,", "cv.CHAIN_APPROX_NONE) contours = list(filter(lambda x: 50 < cv.contourArea(x) < 175,", "True) approx = cv.approxPolyDP(contours[i], epsilon, True) x, y, width, height", "# cv.rectangle(frame_copy, (x, y), (x + w, y + h),", "[38, 293], [407, 293]]) dst = np.float32([[0, 0], [w, 0],", "rows)) cv.imshow('', rotated_img) D1 = 105 D2 = 175 D3", "= max(contours, key=cv.contourArea) # x, y, w, h = cv.boundingRect(roi)", "src = np.reshape(src, (len(src), 1, 2)) # perimeter = cv.arcLength(src,", "cols = img.shape img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15,", "perimeter // 10, True) # corners = np.vstack(corners) dst =", "area < 200: smooth_contours.append(contours[i]) center, radius = cv.minEnclosingCircle(approx) radius =", "D3]) error = min(e1, e2, e3) if error < 10:", "for item in corners: # # x, y = map(int,", "+ w, y + h]] src = np.float32(corners) # src", "[cols, 0], [0, rows], [cols, rows]]) matrix = cv.getPerspectiveTransform(src, dst)", "while True: res, frame = cap.read() if not res: break", "((y - o2) * R) / r X, Y =", "30, h]]) matrix = cv.getPerspectiveTransform(src, dst) img = cv.warpPerspective(img, matrix,", "115, 0, 445, 360 img = img[y: y+h, x: x+w]", "= imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4', fps=fps) while", "= cv.arcLength(src, True) # corners = cv.approxPolyDP(src, perimeter // 10,", "255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) # kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3,", "cv.waitKey(30) == 27: break print(\"E:\", mse / count, \"N:\", count)", "frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) # kernel", "True) # corners = cv.approxPolyDP(src, perimeter // 10, True) #", "[cols, rows]]) matrix = cv.getPerspectiveTransform(src, dst) rotated_img = cv.warpPerspective(img_copy, matrix,", "mse = 0 count = 0 reader = imageio.get_reader('samples/delta.mp4') fps", "np import transformer def fix_rotation(img): img_copy = img.copy() img =", "cv.approxPolyDP(contours[i], epsilon, True) x, y, width, height = cv.boundingRect(approx) area", "cv.boundingRect(approx) area = width*height if len(approx) == 4 and 75", "((x - o1) * R) / r Y = ((y", "True) x, y, width, height = cv.boundingRect(approx) area = width*height", "w, y + h]] src = np.float32(corners) # src =", "x[0][1])) # print(corners[-1], corners[-2]) # print() # corners = np.array([[38,", "4 and 75 < area < 200: smooth_contours.append(contours[i]) center, radius", "= map(lambda d: abs(math.hypot(X, Y) - d), [D1, D2, D3])", "frame_copy = transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('', frame_copy) # cv.rectangle(frame_copy, (x,", "3) contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi = max(contours,", "corners = list(sorted(corners, key=lambda x: x[0][1])) # print(corners[-1], corners[-2]) #", "hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi = max(contours, key=cv.contourArea) x,", "# frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask) # corners = cv.goodFeaturesToTrack(frame_copy,", "roi, -1, (0, 0, 255), 2) # res, mask =", "(w, h)) cv.imshow('', img) img_copy = img.copy() img = cv.cvtColor(img,", "corners = np.array([[38, 293], [407, 293]]) # for item in", "y = map(int, item.ravel()) # x, y = item #", "cv.boundingRect(roi) corners = [[x, y], [x + w, y], [x,", "= cv.warpPerspective(img, matrix, (w, h)) cv.imshow('', img) img_copy = img.copy()", "corners: # # x, y = map(int, item.ravel()) # x,", "cv.THRESH_BINARY) # frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask) # corners =", "frame.copy() # frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)", "= 0 reader = imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps'] writer =", "0 count = 0 reader = imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps']", "contours = list(filter(lambda x: 50 < cv.contourArea(x) < 175, contours))", "Y = ((y - o2) * R) / r X,", "theta=40) frame_copy = frame_copy[y: y+h, x: x+w] frame_copy = transformer.rotate_along_axis(frame_copy,", "kernel) # frame = cv.medianBlur(frame, 3) # contours, hierarchy =", "corners = np.vstack(corners) dst = np.float32([[0, 0], [cols, 0], [0,", "0], [w, 0], [30, h], [w - 30, h]]) matrix", "h], [w - 30, h]]) matrix = cv.getPerspectiveTransform(src, dst) img", "imageio.get_writer('samples/result.mp4', fps=fps) while True: res, frame = cap.read() if not", "10: mean_error += error ** 2 holes_count += 1 cv.circle(img_copy,", "error ** 2 holes_count += 1 cv.circle(img_copy, origin, 4, (0,", "img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\", img) if cv.waitKey(30) == 27: break", "y + h]] src = np.float32(corners) # src = np.reshape(src,", "key=cv.contourArea) x, y, w, h = cv.boundingRect(roi) corners = [[x,", "= cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) # frame = cv.medianBlur(frame, 3) #", "origin = (w // 2 + 4, h // 2", "= 175 D3 = 275 if __name__ == \"__main__\": cap", "y+h, x: x+w] frame_copy = transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('', frame_copy)", "x: x+w] frame_copy = transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('', frame_copy) #", "map(lambda d: abs(math.hypot(X, Y) - d), [D1, D2, D3]) error", "src = np.float32(corners) # src = np.reshape(src, (len(src), 1, 2))", "factor * cv.arcLength(contours[i], True) approx = cv.approxPolyDP(contours[i], epsilon, True) x,", "import cv2 as cv import numpy as np import transformer", "width*height if len(approx) == 4 and 75 < area <", "cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi = max(contours, key=cv.contourArea) x, y, w,", "# cv.circle(img, (x, y), 5, (0, 0, 255), -1) src", "cv.circle(img_copy, origin, 4, (0, 0, 255), -1) # cv.line(img_copy, origin,", "frame = cap.read() if not res: break mean_error = 0", "cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours = list(filter(lambda x: 50 < cv.contourArea(x)", "cv.approxPolyDP(src, perimeter // 10, True) # corners = np.vstack(corners) dst", "= np.float32([[0, 0], [w, 0], [30, h], [w - 30,", "frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) # frame = cv.medianBlur(frame, 3)", "x, y, width, height = cv.boundingRect(approx) area = width*height if", "y, w, h = cv.boundingRect(roi) corners = [[x, y], [x", "-1, (0, 0, 255), 2) # res, mask = cv.threshold(frame_copy,", "ORIGIN = (0, 0) R = 300 # mm contours,", "np.float32([[0, 0], [cols, 0], [0, rows], [cols, rows]]) matrix =", "cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1) # corners = list(sorted(corners, key=lambda x:", "count += 1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\", img) if", "w, h = cv.boundingRect(roi) corners = [[x, y], [x +", "cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) origin = (w", "img) frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy = frame.copy() # frame", "// 2 + 1 ORIGIN = (0, 0) R =", "r X, Y = round(X, 2), round(Y, 2) cv.circle(img_copy, center,", "key=cv.contourArea) # x, y, w, h = cv.boundingRect(roi) x, y,", "R = 300 # mm contours, hierarchy = cv.findContours(img, cv.RETR_LIST,", "// 2 + 2) o1, o2 = origin r =", "transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('', frame_copy) # cv.rectangle(frame_copy, (x, y), (x", "* R) / r Y = ((y - o2) *", "cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) contours, hierarchy = cv.findContours(img,", "cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) contours, hierarchy =", "hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi = max(contours, key=cv.contourArea)", "theta=40) # cv.imshow('', frame_copy) # cv.rectangle(frame_copy, (x, y), (x +", "origin, (origin[0], origin[1]), (255, 0, 255), 2) mean_error /= holes_count", "+ 1 ORIGIN = (0, 0) R = 300 #", "= transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('', frame_copy) # cv.rectangle(frame_copy, (x, y),", "cv.CHAIN_APPROX_NONE) roi = max(contours, key=cv.contourArea) x, y, w, h =", "frame_copy[y: y+h, x: x+w] frame_copy = transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('',", "= item # cv.circle(img, (x, y), 5, (0, 0, 255),", "0, 445, 360 img = img[y: y+h, x: x+w] img", "cv.medianBlur(frame, 3) # contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) #", "293], [407, 293]]) dst = np.float32([[0, 0], [w, 0], [30,", "break print(\"E:\", mse / count, \"N:\", count) writer.close() cap.release() cv.destroyAllWindows()", "= img.shape img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)", "0 holes_count = 0 img = frame.copy() cv.imshow('dfa', img) frame", "15, 9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img = cv.morphologyEx(img,", "y), 5, (0, 0, 255), -1) src = np.float32([[0, 0],", "cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1, cv.LINE_AA) e1, e2,", "abs(math.hypot(X, Y) - d), [D1, D2, D3]) error = min(e1,", "rows], [cols, rows]]) matrix = cv.getPerspectiveTransform(src, dst) rotated_img = cv.warpPerspective(img_copy,", "3) # contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi", "numpy as np import transformer def fix_rotation(img): img_copy = img.copy()", "D1 = 105 D2 = 175 D3 = 275 if", "cv.getPerspectiveTransform(src, dst) rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows)) cv.imshow('', rotated_img)", "frame_copy = frame.copy() # frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV,", "= round(X, 2), round(Y, 2) cv.circle(img_copy, center, radius, (0, 255,", "# corners = np.vstack(corners) dst = np.float32([[0, 0], [cols, 0],", "= cv.cvtColor(img, cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15,", "R) / r Y = ((y - o2) * R)", "+ w, y + h), (0, 255, 0), 2) #", "cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi = max(contours, key=cv.contourArea) # x, y,", "= ((y - o2) * R) / r X, Y", "= tuple(map(int, center)) x, y = center X = ((x", "D2 = 175 D3 = 275 if __name__ == \"__main__\":", "item # cv.circle(img, (x, y), 5, (0, 0, 255), -1)", "int(radius) center = tuple(map(int, center)) x, y = center X", "cap = cv.VideoCapture('samples/delta.mp4') if not cap.isOpened(): raise IOError(\"Video was not", "cv.COLOR_BGR2GRAY) rows, cols = img.shape img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C,", "h // 2 + 2) o1, o2 = origin r", "True) # corners = np.vstack(corners) dst = np.float32([[0, 0], [cols,", "fps = reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4', fps=fps) while True: res,", "0.0001, 1) # corners = list(sorted(corners, key=lambda x: x[0][1])) #", "h]] src = np.float32(corners) # src = np.reshape(src, (len(src), 1,", "< 10: mean_error += error ** 2 holes_count += 1", "75 < area < 200: smooth_contours.append(contours[i]) center, radius = cv.minEnclosingCircle(approx)", "img) if cv.waitKey(30) == 27: break print(\"E:\", mse / count,", "frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy = frame.copy() # frame =", "frame.copy() cv.imshow('dfa', img) frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy = frame.copy()", "0.1 smooth_contours = [] for i in range(len(contours)): epsilon =", "kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) # frame = cv.morphologyEx(frame, cv.MORPH_OPEN,", "fix_rotation(img): img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols", "x, y, w, h = 115, 0, 445, 360 img", "< 200: smooth_contours.append(contours[i]) center, radius = cv.minEnclosingCircle(approx) radius = int(radius)", "2 holes_count += 1 cv.circle(img_copy, origin, 4, (0, 0, 255),", "as np import transformer def fix_rotation(img): img_copy = img.copy() img", "mean_error count += 1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\", img)", "(w // 2 + 4, h // 2 + 2)", "1000, 0.0001, 1) # corners = list(sorted(corners, key=lambda x: x[0][1]))", "epsilon = factor * cv.arcLength(contours[i], True) approx = cv.approxPolyDP(contours[i], epsilon,", "= cv.boundingRect(roi) corners = [[x, y], [x + w, y],", "smooth_contours = [] for i in range(len(contours)): epsilon = factor", "# cv.imshow(\"Chg\", img) if cv.waitKey(30) == 27: break print(\"E:\", mse", "= cv.boundingRect(approx) area = width*height if len(approx) == 4 and", "y = center X = ((x - o1) * R)", "writer.append_data(img_copy) # cv.imshow(\"Chg\", img) if cv.waitKey(30) == 27: break print(\"E:\",", "= frame_copy[y: y+h, x: x+w] frame_copy = transformer.rotate_along_axis(frame_copy, theta=40) #", "holes_count = 0 img = frame.copy() cv.imshow('dfa', img) frame =", "h = 115, 0, 445, 360 img = img[y: y+h,", "== \"__main__\": cap = cv.VideoCapture('samples/delta.mp4') if not cap.isOpened(): raise IOError(\"Video", "= int(radius) center = tuple(map(int, center)) x, y = center", "w, y], [x, y + h], [x + w, y", "[0, rows], [cols, rows]]) matrix = cv.getPerspectiveTransform(src, dst) rotated_img =", "= list(sorted(corners, key=lambda x: x[0][1])) # print(corners[-1], corners[-2]) # print()", "+ h], [x + w, y + h]] src =", "[w - 30, h]]) matrix = cv.getPerspectiveTransform(src, dst) img =", "radius = int(radius) center = tuple(map(int, center)) x, y =", "x, y, w, h = cv.boundingRect(roi) corners = [[x, y],", "cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3,", "dst) img = cv.warpPerspective(img, matrix, (w, h)) cv.imshow('', img) img_copy", "255), -1) # cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255),", "if not res: break mean_error = 0 holes_count = 0", "2 + 2) o1, o2 = origin r = w", "Y) - d), [D1, D2, D3]) error = min(e1, e2,", "map(int, item.ravel()) # x, y = item # cv.circle(img, (x,", "x, y, w, h = cv.boundingRect(roi) x, y, w, h", "== 4 and 75 < area < 200: smooth_contours.append(contours[i]) center,", "175 D3 = 275 if __name__ == \"__main__\": cap =", "corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1) # corners = list(sorted(corners,", "15, 9) # kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) # frame", "[D1, D2, D3]) error = min(e1, e2, e3) if error", "in corners: # # x, y = map(int, item.ravel()) #", "cv.imshow('', frame_copy) # cv.rectangle(frame_copy, (x, y), (x + w, y", "y+h, x: x+w] img = transformer.rotate_along_axis(img, theta=40) frame_copy = frame_copy[y:", "= [] for i in range(len(contours)): epsilon = factor *", "255, 255), 1, cv.LINE_AA) e1, e2, e3 = map(lambda d:", "True: res, frame = cap.read() if not res: break mean_error", "255), 1, cv.LINE_AA) e1, e2, e3 = map(lambda d: abs(math.hypot(X,", "img = frame.copy() cv.imshow('dfa', img) frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy", "255), 2) mean_error /= holes_count mse += mean_error count +=", "# res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY) # frame_copy", "area = width*height if len(approx) == 4 and 75 <", "= cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) # kernel =", "(0, 255, 0), 2) # cv.drawContours(frame_copy, roi, -1, (0, 0,", "d: abs(math.hypot(X, Y) - d), [D1, D2, D3]) error =", "/ r Y = ((y - o2) * R) /", "# contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi =", "cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours = list(filter(lambda x: 50 < cv.contourArea(x) <", "# print() # corners = np.array([[38, 293], [407, 293]]) #", "mean_error += error ** 2 holes_count += 1 cv.circle(img_copy, origin,", "np.vstack(corners) dst = np.float32([[0, 0], [cols, 0], [0, rows], [cols,", "0) R = 300 # mm contours, hierarchy = cv.findContours(img,", "0, 255, cv.THRESH_BINARY) # frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask) #", "cv.THRESH_BINARY_INV, 15, 9) # kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) #", "contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours = list(filter(lambda x:", "* cv.arcLength(contours[i], True) approx = cv.approxPolyDP(contours[i], epsilon, True) x, y,", "list(filter(lambda x: 50 < cv.contourArea(x) < 175, contours)) factor =", "0, 255), -1) # cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0,", "import math import imageio import cv2 as cv import numpy", "cv.boundingRect(roi) x, y, w, h = 115, 0, 445, 360", "img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel =", "cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY) # frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask)", "contours)) factor = 0.1 smooth_contours = [] for i in", "# corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1) # corners =", "IOError(\"Video was not opened!\") mse = 0 count = 0", "2)) # perimeter = cv.arcLength(src, True) # corners = cv.approxPolyDP(src,", "if len(approx) == 4 and 75 < area < 200:", "h)) cv.imshow('', img) img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)", "res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY) # frame_copy =", "cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi = max(contours, key=cv.contourArea) x, y, w, h", "cv.imshow('dfa', img) frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy = frame.copy() #", "reader = imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4', fps=fps)", "max(contours, key=cv.contourArea) x, y, w, h = cv.boundingRect(roi) corners =", "h = cv.boundingRect(roi) corners = [[x, y], [x + w,", "= np.float32(corners) # src = np.reshape(src, (len(src), 1, 2)) #", "255, cv.THRESH_BINARY) # frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask) # corners", "1 ORIGIN = (0, 0) R = 300 # mm", "[30, h], [w - 30, h]]) matrix = cv.getPerspectiveTransform(src, dst)", "img = transformer.rotate_along_axis(img, theta=40) frame_copy = frame_copy[y: y+h, x: x+w]", "D2, D3]) error = min(e1, e2, e3) if error <", "(len(src), 1, 2)) # perimeter = cv.arcLength(src, True) # corners", "r Y = ((y - o2) * R) / r", "4, h // 2 + 2) o1, o2 = origin", "np.reshape(src, (len(src), 1, 2)) # perimeter = cv.arcLength(src, True) #", "def fix_rotation(img): img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows,", "was not opened!\") mse = 0 count = 0 reader", "dst = np.float32([[0, 0], [cols, 0], [0, rows], [cols, rows]])", "# frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) # frame = cv.medianBlur(frame,", "matrix = cv.getPerspectiveTransform(src, dst) img = cv.warpPerspective(img, matrix, (w, h))", "w, h = cv.boundingRect(roi) x, y, w, h = 115,", "center X = ((x - o1) * R) / r", "img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) origin", "cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi = max(contours, key=cv.contourArea) # x,", "cv.warpPerspective(img_copy, matrix, (cols, rows)) cv.imshow('', rotated_img) D1 = 105 D2", "writer = imageio.get_writer('samples/result.mp4', fps=fps) while True: res, frame = cap.read()", "Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1, cv.LINE_AA)", "img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img,", "[w, 0], [30, h], [w - 30, h]]) matrix =", "cv.MORPH_OPEN, kernel) # frame = cv.medianBlur(frame, 3) # contours, hierarchy", "rows, cols = img.shape img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV,", "+ 2) o1, o2 = origin r = w //", "imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4', fps=fps) while True:", "import numpy as np import transformer def fix_rotation(img): img_copy =", "< cv.contourArea(x) < 175, contours)) factor = 0.1 smooth_contours =", "0, 255), -1) src = np.float32([[0, 0], [w, 0], [38,", "not res: break mean_error = 0 holes_count = 0 img", "(x, y), 5, (0, 0, 255), -1) src = np.float32([[0,", "0, 255), 2) mean_error /= holes_count mse += mean_error count", "kernel) img = cv.medianBlur(img, 3) origin = (w // 2", "cv.arcLength(src, True) # corners = cv.approxPolyDP(src, perimeter // 10, True)", "= np.float32([[0, 0], [cols, 0], [0, rows], [cols, rows]]) matrix", "cv.circle(img_copy, center, radius, (0, 255, 0), 2) cv.putText(img_copy, str((X, Y)),", "255, 0), 2) # cv.drawContours(frame_copy, roi, -1, (0, 0, 255),", "w, h = 115, 0, 445, 360 img = img[y:", "255, 0), 2) cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255,", "frame = cv.medianBlur(frame, 3) # contours, hierarchy = cv.findContours(frame, cv.RETR_LIST,", "1, 2)) # perimeter = cv.arcLength(src, True) # corners =", "error < 10: mean_error += error ** 2 holes_count +=", "+ 4, h // 2 + 2) o1, o2 =", "# corners = list(sorted(corners, key=lambda x: x[0][1])) # print(corners[-1], corners[-2])", "e3) if error < 10: mean_error += error ** 2", "center)) x, y = center X = ((x - o1)", "img[y: y+h, x: x+w] img = transformer.rotate_along_axis(img, theta=40) frame_copy =", "(0, 0, 255), -1) # cv.line(img_copy, origin, (origin[0], origin[1]), (255,", "and 75 < area < 200: smooth_contours.append(contours[i]) center, radius =", "i in range(len(contours)): epsilon = factor * cv.arcLength(contours[i], True) approx", "0 img = frame.copy() cv.imshow('dfa', img) frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)", "cv.medianBlur(img, 3) contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi =", "[x + w, y + h]] src = np.float32(corners) #", "[407, 293]]) # for item in corners: # # x,", "np.float32([[0, 0], [w, 0], [30, h], [w - 30, h]])", "img = cv.medianBlur(img, 3) origin = (w // 2 +", "- d), [D1, D2, D3]) error = min(e1, e2, e3)", "3)) img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3)", "np.array([[38, 293], [407, 293]]) # for item in corners: #", "# cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2) # res,", "radius, (0, 255, 0), 2) cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX,", "cv.CHAIN_APPROX_NONE) # roi = max(contours, key=cv.contourArea) # x, y, w,", "= cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) origin =", "cv.warpPerspective(img, matrix, (w, h)) cv.imshow('', img) img_copy = img.copy() img", "cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\", img) if cv.waitKey(30) == 27:", "+= 1 cv.circle(img_copy, origin, 4, (0, 0, 255), -1) #", "item.ravel()) # x, y = item # cv.circle(img, (x, y),", "= map(int, item.ravel()) # x, y = item # cv.circle(img,", "1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\", img) if cv.waitKey(30) ==", "= list(filter(lambda x: 50 < cv.contourArea(x) < 175, contours)) factor", "0 reader = imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4',", "360 img = img[y: y+h, x: x+w] img = transformer.rotate_along_axis(img,", "= frame.copy() # frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15,", "293], [407, 293]]) # for item in corners: # #", "img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV,", "cv.contourArea(x) < 175, contours)) factor = 0.1 smooth_contours = []", "o2 = origin r = w // 2 + 1", "= cv.cvtColor(frame, cv.COLOR_BGR2GRAY) frame_copy = frame.copy() # frame = cv.adaptiveThreshold(frame,", "cv.COLOR_BGR2GRAY) frame_copy = frame.copy() # frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C,", "+ h]] src = np.float32(corners) # src = np.reshape(src, (len(src),", "300 # mm contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours", "count = 0 reader = imageio.get_reader('samples/delta.mp4') fps = reader.get_meta_data()['fps'] writer", "cv.rectangle(frame_copy, (x, y), (x + w, y + h), (0,", "mean_error /= holes_count mse += mean_error count += 1 cv.imshow(\"Final\",", "imageio import cv2 as cv import numpy as np import", "fps=fps) while True: res, frame = cap.read() if not res:", "cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img =", "-1) # cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255), 2)", "+ w, y], [x, y + h], [x + w,", "= 105 D2 = 175 D3 = 275 if __name__", "dst) rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows)) cv.imshow('', rotated_img) D1", "= img[y: y+h, x: x+w] img = transformer.rotate_along_axis(img, theta=40) frame_copy", "= np.vstack(corners) dst = np.float32([[0, 0], [cols, 0], [0, rows],", "mask=mask) # corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1) # corners", "# corners = np.array([[38, 293], [407, 293]]) # for item", "# for item in corners: # # x, y =", "293]]) dst = np.float32([[0, 0], [w, 0], [30, h], [w", "(0, 0, 255), 2) # res, mask = cv.threshold(frame_copy, 0,", "center = tuple(map(int, center)) x, y = center X =", "2) mean_error /= holes_count mse += mean_error count += 1", "x, y = map(int, item.ravel()) # x, y = item", "item in corners: # # x, y = map(int, item.ravel())", "= cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY) # frame_copy = cv.bitwise_and(frame_copy, frame_copy,", "= cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours = list(filter(lambda x: 50 <", "= 300 # mm contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)", "x+w] frame_copy = transformer.rotate_along_axis(frame_copy, theta=40) # cv.imshow('', frame_copy) # cv.rectangle(frame_copy,", "__name__ == \"__main__\": cap = cv.VideoCapture('samples/delta.mp4') if not cap.isOpened(): raise", "if __name__ == \"__main__\": cap = cv.VideoCapture('samples/delta.mp4') if not cap.isOpened():", "img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) contours,", "= cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1) # corners = list(sorted(corners, key=lambda", "rows]]) matrix = cv.getPerspectiveTransform(src, dst) rotated_img = cv.warpPerspective(img_copy, matrix, (cols,", "np.float32([[0, 0], [w, 0], [38, 293], [407, 293]]) dst =", "10, True) # corners = np.vstack(corners) dst = np.float32([[0, 0],", "x+w] img = transformer.rotate_along_axis(img, theta=40) frame_copy = frame_copy[y: y+h, x:", "h]]) matrix = cv.getPerspectiveTransform(src, dst) img = cv.warpPerspective(img, matrix, (w,", "o1) * R) / r Y = ((y - o2)", "cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255),", "- o1) * R) / r Y = ((y -", "= cv.medianBlur(img, 3) origin = (w // 2 + 4,", "255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))", "cap.isOpened(): raise IOError(\"Video was not opened!\") mse = 0 count", "# print(corners[-1], corners[-2]) # print() # corners = np.array([[38, 293],", "(255, 0, 255), 2) mean_error /= holes_count mse += mean_error", "e1, e2, e3 = map(lambda d: abs(math.hypot(X, Y) - d),", "** 2 holes_count += 1 cv.circle(img_copy, origin, 4, (0, 0,", "# x, y = item # cv.circle(img, (x, y), 5,", "y], [x, y + h], [x + w, y +", "# cv.imshow('', frame_copy) # cv.rectangle(frame_copy, (x, y), (x + w,", "break mean_error = 0 holes_count = 0 img = frame.copy()", "(0, 255, 0), 2) cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3,", "/ r X, Y = round(X, 2), round(Y, 2) cv.circle(img_copy,", "o2) * R) / r X, Y = round(X, 2),", "rotated_img) D1 = 105 D2 = 175 D3 = 275", "round(X, 2), round(Y, 2) cv.circle(img_copy, center, radius, (0, 255, 0),", "= np.array([[38, 293], [407, 293]]) # for item in corners:", "0], [30, h], [w - 30, h]]) matrix = cv.getPerspectiveTransform(src,", "+= mean_error count += 1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy) # cv.imshow(\"Chg\",", "cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) # frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) #", "key=lambda x: x[0][1])) # print(corners[-1], corners[-2]) # print() # corners", "# roi = max(contours, key=cv.contourArea) # x, y, w, h", "= transformer.rotate_along_axis(img, theta=40) frame_copy = frame_copy[y: y+h, x: x+w] frame_copy", "= cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi = max(contours, key=cv.contourArea) x, y,", "e2, e3 = map(lambda d: abs(math.hypot(X, Y) - d), [D1,", "radius = cv.minEnclosingCircle(approx) radius = int(radius) center = tuple(map(int, center))", "d), [D1, D2, D3]) error = min(e1, e2, e3) if", "= cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) # frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel)", "error = min(e1, e2, e3) if error < 10: mean_error", "(origin[0], origin[1]), (255, 0, 255), 2) mean_error /= holes_count mse", "frame_copy, mask=mask) # corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1) #", "(x, y), (x + w, y + h), (0, 255,", "math import imageio import cv2 as cv import numpy as", "0), 2) cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0,", "smooth_contours.append(contours[i]) center, radius = cv.minEnclosingCircle(approx) radius = int(radius) center =", "175, contours)) factor = 0.1 smooth_contours = [] for i", "# mm contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) contours =", "+= error ** 2 holes_count += 1 cv.circle(img_copy, origin, 4,", "3)) # frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) # frame =", "= center X = ((x - o1) * R) /", "if cv.waitKey(30) == 27: break print(\"E:\", mse / count, \"N:\",", "x: x+w] img = transformer.rotate_along_axis(img, theta=40) frame_copy = frame_copy[y: y+h,", "/= holes_count mse += mean_error count += 1 cv.imshow(\"Final\", img_copy)", "opened!\") mse = 0 count = 0 reader = imageio.get_reader('samples/delta.mp4')", "50 < cv.contourArea(x) < 175, contours)) factor = 0.1 smooth_contours", "[[x, y], [x + w, y], [x, y + h],", "# kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) # frame = cv.morphologyEx(frame,", "cv.arcLength(contours[i], True) approx = cv.approxPolyDP(contours[i], epsilon, True) x, y, width,", "= cv.boundingRect(roi) x, y, w, h = 115, 0, 445,", "y + h), (0, 255, 0), 2) # cv.drawContours(frame_copy, roi,", "2) o1, o2 = origin r = w // 2", "dst = np.float32([[0, 0], [w, 0], [30, h], [w -", "r = w // 2 + 1 ORIGIN = (0,", "(3, 3)) # frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel) # frame", "mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY) # frame_copy = cv.bitwise_and(frame_copy,", "cv.imshow(\"Chg\", img) if cv.waitKey(30) == 27: break print(\"E:\", mse /", "27: break print(\"E:\", mse / count, \"N:\", count) writer.close() cap.release()", "0], [cols, 0], [0, rows], [cols, rows]]) matrix = cv.getPerspectiveTransform(src,", "res, frame = cap.read() if not res: break mean_error =", "* R) / r X, Y = round(X, 2), round(Y,", "= cap.read() if not res: break mean_error = 0 holes_count", "2), round(Y, 2) cv.circle(img_copy, center, radius, (0, 255, 0), 2)", "# frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) #", "cv.VideoCapture('samples/delta.mp4') if not cap.isOpened(): raise IOError(\"Video was not opened!\") mse", "(255, 0, 255, 255), 1, cv.LINE_AA) e1, e2, e3 =", "cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) img", "x, y = item # cv.circle(img, (x, y), 5, (0,", "img = cv.warpPerspective(img, matrix, (w, h)) cv.imshow('', img) img_copy =", "= np.float32([[0, 0], [w, 0], [38, 293], [407, 293]]) dst", "max(contours, key=cv.contourArea) # x, y, w, h = cv.boundingRect(roi) x,", "y, w, h = 115, 0, 445, 360 img =", "# frame = cv.medianBlur(frame, 3) # contours, hierarchy = cv.findContours(frame,", "< area < 200: smooth_contours.append(contours[i]) center, radius = cv.minEnclosingCircle(approx) radius", "w, y + h), (0, 255, 0), 2) # cv.drawContours(frame_copy,", "275 if __name__ == \"__main__\": cap = cv.VideoCapture('samples/delta.mp4') if not", "= reader.get_meta_data()['fps'] writer = imageio.get_writer('samples/result.mp4', fps=fps) while True: res, frame", "approx = cv.approxPolyDP(contours[i], epsilon, True) x, y, width, height =", "# x, y = map(int, item.ravel()) # x, y =", "= 0 holes_count = 0 img = frame.copy() cv.imshow('dfa', img)", "0.3, (255, 0, 255, 255), 1, cv.LINE_AA) e1, e2, e3", "= cv.morphologyEx(img, cv.MORPH_OPEN, kernel) img = cv.medianBlur(img, 3) contours, hierarchy", "origin, 4, (0, 0, 255), -1) # cv.line(img_copy, origin, (origin[0],", "255), 2) # res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY)", "round(Y, 2) cv.circle(img_copy, center, radius, (0, 255, 0), 2) cv.putText(img_copy,", "cv.circle(img, (x, y), 5, (0, 0, 255), -1) src =", "rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows)) cv.imshow('', rotated_img) D1 =", "200: smooth_contours.append(contours[i]) center, radius = cv.minEnclosingCircle(approx) radius = int(radius) center", "cv.LINE_AA) e1, e2, e3 = map(lambda d: abs(math.hypot(X, Y) -", "print() # corners = np.array([[38, 293], [407, 293]]) # for", "cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255), 2) mean_error /=", "origin r = w // 2 + 1 ORIGIN =", "e2, e3) if error < 10: mean_error += error **", "perimeter = cv.arcLength(src, True) # corners = cv.approxPolyDP(src, perimeter //", "[w, 0], [38, 293], [407, 293]]) dst = np.float32([[0, 0],", "y), (x + w, y + h), (0, 255, 0),", "y = item # cv.circle(img, (x, y), 5, (0, 0,", "frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask) # corners = cv.goodFeaturesToTrack(frame_copy, 1000,", "in range(len(contours)): epsilon = factor * cv.arcLength(contours[i], True) approx =", "445, 360 img = img[y: y+h, x: x+w] img =", "transformer.rotate_along_axis(img, theta=40) frame_copy = frame_copy[y: y+h, x: x+w] frame_copy =", "str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1,", "width, height = cv.boundingRect(approx) area = width*height if len(approx) ==", "= cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) # roi = max(contours, key=cv.contourArea) #", "= imageio.get_writer('samples/result.mp4', fps=fps) while True: res, frame = cap.read() if", "1) # corners = list(sorted(corners, key=lambda x: x[0][1])) # print(corners[-1],", "print(corners[-1], corners[-2]) # print() # corners = np.array([[38, 293], [407,", "cv.COLOR_BGR2GRAY) img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel", "min(e1, e2, e3) if error < 10: mean_error += error", "cv2 as cv import numpy as np import transformer def", "matrix = cv.getPerspectiveTransform(src, dst) rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows))", "tuple(map(int, center)) x, y = center X = ((x -", "293]]) # for item in corners: # # x, y", "# # x, y = map(int, item.ravel()) # x, y", "= origin r = w // 2 + 1 ORIGIN", "= cv.warpPerspective(img_copy, matrix, (cols, rows)) cv.imshow('', rotated_img) D1 = 105", "0], [38, 293], [407, 293]]) dst = np.float32([[0, 0], [w,", "x: x[0][1])) # print(corners[-1], corners[-2]) # print() # corners =", "h = cv.boundingRect(roi) x, y, w, h = 115, 0,", "2 + 4, h // 2 + 2) o1, o2", "= ((x - o1) * R) / r Y =", "9) # kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) # frame =", "img) img_copy = img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) img =", "y, w, h = cv.boundingRect(roi) x, y, w, h =", "= cv.minEnclosingCircle(approx) radius = int(radius) center = tuple(map(int, center)) x,", "# corners = cv.approxPolyDP(src, perimeter // 10, True) # corners", "import imageio import cv2 as cv import numpy as np", "height = cv.boundingRect(approx) area = width*height if len(approx) == 4", "h), (0, 255, 0), 2) # cv.drawContours(frame_copy, roi, -1, (0,", "1, cv.LINE_AA) e1, e2, e3 = map(lambda d: abs(math.hypot(X, Y)", "= max(contours, key=cv.contourArea) x, y, w, h = cv.boundingRect(roi) corners", "cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2) # res, mask", "2) # cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2) #", "corners = [[x, y], [x + w, y], [x, y", "contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE) roi = max(contours, key=cv.contourArea)", "= img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols = img.shape", "D3 = 275 if __name__ == \"__main__\": cap = cv.VideoCapture('samples/delta.mp4')", "cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) # kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))", "cv.getPerspectiveTransform(src, dst) img = cv.warpPerspective(img, matrix, (w, h)) cv.imshow('', img)", "w // 2 + 1 ORIGIN = (0, 0) R", "corners[-2]) # print() # corners = np.array([[38, 293], [407, 293]])", "center, radius, (0, 255, 0), 2) cv.putText(img_copy, str((X, Y)), center,", "= cv.VideoCapture('samples/delta.mp4') if not cap.isOpened(): raise IOError(\"Video was not opened!\")", "= factor * cv.arcLength(contours[i], True) approx = cv.approxPolyDP(contours[i], epsilon, True)", "= w // 2 + 1 ORIGIN = (0, 0)", "mean_error = 0 holes_count = 0 img = frame.copy() cv.imshow('dfa',", "cv import numpy as np import transformer def fix_rotation(img): img_copy", "-1) src = np.float32([[0, 0], [w, 0], [38, 293], [407,", "cv.bitwise_and(frame_copy, frame_copy, mask=mask) # corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1)", "img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols = img.shape img =", "(cols, rows)) cv.imshow('', rotated_img) D1 = 105 D2 = 175", "2) cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255,", "e3 = map(lambda d: abs(math.hypot(X, Y) - d), [D1, D2,", "img.copy() img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) rows, cols = img.shape img", "0], [0, rows], [cols, rows]]) matrix = cv.getPerspectiveTransform(src, dst) rotated_img", "(0, 0) R = 300 # mm contours, hierarchy =", "roi = max(contours, key=cv.contourArea) x, y, w, h = cv.boundingRect(roi)", "# cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255), 2) mean_error", "if not cap.isOpened(): raise IOError(\"Video was not opened!\") mse =", "o1, o2 = origin r = w // 2 +", "< 175, contours)) factor = 0.1 smooth_contours = [] for", "img.shape img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9) kernel", "= cv.getPerspectiveTransform(src, dst) rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows)) cv.imshow('',", "holes_count mse += mean_error count += 1 cv.imshow(\"Final\", img_copy) writer.append_data(img_copy)", "roi = max(contours, key=cv.contourArea) # x, y, w, h =", "import transformer def fix_rotation(img): img_copy = img.copy() img = cv.cvtColor(img,", "= [[x, y], [x + w, y], [x, y +", "= cv.medianBlur(frame, 3) # contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)", "np.float32(corners) # src = np.reshape(src, (len(src), 1, 2)) # perimeter", "list(sorted(corners, key=lambda x: x[0][1])) # print(corners[-1], corners[-2]) # print() #" ]
[ "This key started working instead during SDSS-V self.dither = header['POINTING'][0]", "a few items from its header. It is used in", "# self.seeing = header['SEEING'] # self.img_type = header['IMAGETYP'] def main():", "# This key started working instead during SDSS-V self.dither =", "\"\"\" import argparse from pathlib import Path from astropy.time import", "parse raw data from APOGEE. The purpose of collecting this", "BOSSRaw: \"\"\"A class to parse raw data from APOGEE. The", "autoschedulers change, which many libraries depend on. This will hopefully", "help='Show details, can be stacked') if args.today: mjd_today = int(Time.now().sjd)", "parser.add_argument('-v', '--verbose', action='count', default=1, help='Show details, can be stacked') if", "data_dir = '/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd) else: raise", "fitsio class BOSSRaw: \"\"\"A class to parse raw data from", "= fil header = fitsio.read_header(fil) self.dither = header['MGDPOS'] if not", "header['POINTING'][0] self.exp_time = int(header['EXPTIME']) self.isot = Time(header['DATE-OBS']) # UTC self.plate_id", "as well. \"\"\" import argparse from pathlib import Path from", "its header. It is used in bin/sloan_log.py, but it could", "in header['HARTMANN']: self.hartmann = 'Open' self.flavor = header['FLAVOR'].capitalize() self.hart_resids =", "SDSS-V logging\"\"\" def __init__(self, fil): self.fil = fil header =", "the mjd to search') parser.add_argument('-v', '--verbose', action='count', default=1, help='Show details,", "class to parse raw data from APOGEE. The purpose of", "ouptuts in case things like autoschedulers change, which many libraries", "hopefully help SDSS-V logging\"\"\" def __init__(self, fil): self.fil = fil", "mjd_today = int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir =", "args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd) else: raise Exception('No date specified') for", "a single BOSS image and pull a few items from", "on. This will hopefully help SDSS-V logging\"\"\" def __init__(self, fil):", "= fitsio.read_header(fil) self.dither = header['MGDPOS'] if not self.dither: # This", "\"\"\"A class to parse raw data from APOGEE. The purpose", "in header['HARTMANN']: self.hartmann = 'Closed' self.flavor = header['FLAVOR'].capitalize() elif 'Out'", "'Hart' # self.seeing = header['SEEING'] # self.img_type = header['IMAGETYP'] def", "instead during SDSS-V self.dither = header['POINTING'][0] self.exp_time = int(header['EXPTIME']) self.isot", "mjd to search') parser.add_argument('-v', '--verbose', action='count', default=1, help='Show details, can", "grab a single BOSS image and pull a few items", "(-t), the mjd to search') parser.add_argument('-v', '--verbose', action='count', default=1, help='Show", "for path in Path(data_dir).rglob('sdR*.fit.gz'): print(path) if __name__ == '__main__': main()", "= int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd)", "but it could be used directly as well. \"\"\" import", "<reponame>sdss/ObserverTools<filename>sdssobstools/boss_data.py #!/usr/bin/env python3 \"\"\" A tool to grab a single", "pathlib import Path from astropy.time import Time import fitsio class", "args.today: mjd_today = int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir", "from astropy.time import Time import fitsio class BOSSRaw: \"\"\"A class", "depend on. This will hopefully help SDSS-V logging\"\"\" def __init__(self,", "logging\"\"\" def __init__(self, fil): self.fil = fil header = fitsio.read_header(fil)", "Path from astropy.time import Time import fitsio class BOSSRaw: \"\"\"A", "libraries depend on. This will hopefully help SDSS-V logging\"\"\" def", "things like autoschedulers change, which many libraries depend on. This", "'--today', action='store_true') args = parser.parse_args() parser.add_argument('-m', '--mjd', help='If not today", "case things like autoschedulers change, which many libraries depend on.", "raw data from APOGEE. The purpose of collecting this raw", "[] else: self.hartmann = header['HARTMANN'] self.flavor = 'Hart' # self.seeing", "def main(): parser = argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true') args =", "data_dir = '/data/spectro/{}/'.format(args.mjd) else: raise Exception('No date specified') for path", "#!/usr/bin/env python3 \"\"\" A tool to grab a single BOSS", "raise Exception('No date specified') for path in Path(data_dir).rglob('sdR*.fit.gz'): print(path) if", "int(header['EXPTIME']) self.isot = Time(header['DATE-OBS']) # UTC self.plate_id = header['PLATEID'] self.cart_id", "'/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd) else: raise Exception('No date", "need these ouptuts in case things like autoschedulers change, which", "it could be used directly as well. \"\"\" import argparse", "= '/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd) else: raise Exception('No", "= argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true') args = parser.parse_args() parser.add_argument('-m', '--mjd',", "= header['PLATETYP'] if 'Closed' in header['HARTMANN']: self.hartmann = 'Closed' self.flavor", "if 'Closed' in header['HARTMANN']: self.hartmann = 'Closed' self.flavor = header['FLAVOR'].capitalize()", "key started working instead during SDSS-V self.dither = header['POINTING'][0] self.exp_time", "raw data is to future-proof things that need these ouptuts", "things that need these ouptuts in case things like autoschedulers", "in case things like autoschedulers change, which many libraries depend", "from APOGEE. The purpose of collecting this raw data is", "= header['MGDPOS'] if not self.dither: # This key started working", "parser.add_argument('-t', '--today', action='store_true') args = parser.parse_args() parser.add_argument('-m', '--mjd', help='If not", "= Time(header['DATE-OBS']) # UTC self.plate_id = header['PLATEID'] self.cart_id = header['CARTID']", "self.flavor = header['FLAVOR'].capitalize() self.hart_resids = [] else: self.hartmann = header['HARTMANN']", "header['HARTMANN'] self.flavor = 'Hart' # self.seeing = header['SEEING'] # self.img_type", "argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true') args = parser.parse_args() parser.add_argument('-m', '--mjd', help='If", "def __init__(self, fil): self.fil = fil header = fitsio.read_header(fil) self.dither", "header['IMAGETYP'] def main(): parser = argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true') args", "in bin/sloan_log.py, but it could be used directly as well.", "self.flavor = 'Hart' # self.seeing = header['SEEING'] # self.img_type =", "header. It is used in bin/sloan_log.py, but it could be", "header['HARTMANN']: self.hartmann = 'Open' self.flavor = header['FLAVOR'].capitalize() self.hart_resids = []", "data from APOGEE. The purpose of collecting this raw data", "to grab a single BOSS image and pull a few", "header['FLAVOR'].capitalize() elif 'Out' in header['HARTMANN']: self.hartmann = 'Open' self.flavor =", "args = parser.parse_args() parser.add_argument('-m', '--mjd', help='If not today (-t), the", "self.hartmann = 'Closed' self.flavor = header['FLAVOR'].capitalize() elif 'Out' in header['HARTMANN']:", "# self.img_type = header['IMAGETYP'] def main(): parser = argparse.ArgumentParser() parser.add_argument('-t',", "default=1, help='Show details, can be stacked') if args.today: mjd_today =", "BOSS image and pull a few items from its header.", "= header['FLAVOR'].capitalize() self.hart_resids = [] else: self.hartmann = header['HARTMANN'] self.flavor", "elif args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd) else: raise Exception('No date specified')", "data is to future-proof things that need these ouptuts in", "= header['SEEING'] # self.img_type = header['IMAGETYP'] def main(): parser =", "parser.add_argument('-m', '--mjd', help='If not today (-t), the mjd to search')", "directly as well. \"\"\" import argparse from pathlib import Path", "used in bin/sloan_log.py, but it could be used directly as", "UTC self.plate_id = header['PLATEID'] self.cart_id = header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0])", "= 'Closed' self.flavor = header['FLAVOR'].capitalize() elif 'Out' in header['HARTMANN']: self.hartmann", "self.hartmann = 'Open' self.flavor = header['FLAVOR'].capitalize() self.hart_resids = [] else:", "header['PLATETYP'] if 'Closed' in header['HARTMANN']: self.hartmann = 'Closed' self.flavor =", "self.exp_time = int(header['EXPTIME']) self.isot = Time(header['DATE-OBS']) # UTC self.plate_id =", "if not self.dither: # This key started working instead during", "self.hart_resids = [] else: self.hartmann = header['HARTMANN'] self.flavor = 'Hart'", "'Out' in header['HARTMANN']: self.hartmann = 'Open' self.flavor = header['FLAVOR'].capitalize() self.hart_resids", "else: self.hartmann = header['HARTMANN'] self.flavor = 'Hart' # self.seeing =", "this raw data is to future-proof things that need these", "action='store_true') args = parser.parse_args() parser.add_argument('-m', '--mjd', help='If not today (-t),", "be used directly as well. \"\"\" import argparse from pathlib", "self.cart_id = header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP'] if", "import Path from astropy.time import Time import fitsio class BOSSRaw:", "will hopefully help SDSS-V logging\"\"\" def __init__(self, fil): self.fil =", "self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP'] if 'Closed' in header['HARTMANN']:", "could be used directly as well. \"\"\" import argparse from", "= 'Hart' # self.seeing = header['SEEING'] # self.img_type = header['IMAGETYP']", "self.seeing = header['SEEING'] # self.img_type = header['IMAGETYP'] def main(): parser", "= [] else: self.hartmann = header['HARTMANN'] self.flavor = 'Hart' #", "import fitsio class BOSSRaw: \"\"\"A class to parse raw data", "'--verbose', action='count', default=1, help='Show details, can be stacked') if args.today:", "fil header = fitsio.read_header(fil) self.dither = header['MGDPOS'] if not self.dither:", "header['SEEING'] # self.img_type = header['IMAGETYP'] def main(): parser = argparse.ArgumentParser()", "self.hartmann = header['HARTMANN'] self.flavor = 'Hart' # self.seeing = header['SEEING']", "__init__(self, fil): self.fil = fil header = fitsio.read_header(fil) self.dither =", "fil): self.fil = fil header = fitsio.read_header(fil) self.dither = header['MGDPOS']", "header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP'] if 'Closed' in", "these ouptuts in case things like autoschedulers change, which many", "purpose of collecting this raw data is to future-proof things", "self.lead = header['PLATETYP'] if 'Closed' in header['HARTMANN']: self.hartmann = 'Closed'", "started working instead during SDSS-V self.dither = header['POINTING'][0] self.exp_time =", "= header['IMAGETYP'] def main(): parser = argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true')", "can be stacked') if args.today: mjd_today = int(Time.now().sjd) data_dir =", "'Closed' in header['HARTMANN']: self.hartmann = 'Closed' self.flavor = header['FLAVOR'].capitalize() elif", "A tool to grab a single BOSS image and pull", "stacked') if args.today: mjd_today = int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today) elif", "SDSS-V self.dither = header['POINTING'][0] self.exp_time = int(header['EXPTIME']) self.isot = Time(header['DATE-OBS'])", "from its header. It is used in bin/sloan_log.py, but it", "parser = argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true') args = parser.parse_args() parser.add_argument('-m',", "'/data/spectro/{}/'.format(args.mjd) else: raise Exception('No date specified') for path in Path(data_dir).rglob('sdR*.fit.gz'):", "future-proof things that need these ouptuts in case things like", "main(): parser = argparse.ArgumentParser() parser.add_argument('-t', '--today', action='store_true') args = parser.parse_args()", "used directly as well. \"\"\" import argparse from pathlib import", "else: raise Exception('No date specified') for path in Path(data_dir).rglob('sdR*.fit.gz'): print(path)", "working instead during SDSS-V self.dither = header['POINTING'][0] self.exp_time = int(header['EXPTIME'])", "today (-t), the mjd to search') parser.add_argument('-v', '--verbose', action='count', default=1,", "= parser.parse_args() parser.add_argument('-m', '--mjd', help='If not today (-t), the mjd", "This will hopefully help SDSS-V logging\"\"\" def __init__(self, fil): self.fil", "is used in bin/sloan_log.py, but it could be used directly", "to parse raw data from APOGEE. The purpose of collecting", "few items from its header. It is used in bin/sloan_log.py,", "'Open' self.flavor = header['FLAVOR'].capitalize() self.hart_resids = [] else: self.hartmann =", "which many libraries depend on. This will hopefully help SDSS-V", "# UTC self.plate_id = header['PLATEID'] self.cart_id = header['CARTID'] self.exp_id =", "elif 'Out' in header['HARTMANN']: self.hartmann = 'Open' self.flavor = header['FLAVOR'].capitalize()", "Exception('No date specified') for path in Path(data_dir).rglob('sdR*.fit.gz'): print(path) if __name__", "class BOSSRaw: \"\"\"A class to parse raw data from APOGEE.", "self.fil = fil header = fitsio.read_header(fil) self.dither = header['MGDPOS'] if", "pull a few items from its header. It is used", "help SDSS-V logging\"\"\" def __init__(self, fil): self.fil = fil header", "header['MGDPOS'] if not self.dither: # This key started working instead", "\"\"\" A tool to grab a single BOSS image and", "like autoschedulers change, which many libraries depend on. This will", "if args.today: mjd_today = int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today) elif args.mjd:", "parser.parse_args() parser.add_argument('-m', '--mjd', help='If not today (-t), the mjd to", "self.flavor = header['FLAVOR'].capitalize() elif 'Out' in header['HARTMANN']: self.hartmann = 'Open'", "= header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP'] if 'Closed'", "help='If not today (-t), the mjd to search') parser.add_argument('-v', '--verbose',", "= 'Open' self.flavor = header['FLAVOR'].capitalize() self.hart_resids = [] else: self.hartmann", "python3 \"\"\" A tool to grab a single BOSS image", "header['PLATEID'] self.cart_id = header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP']", "header['FLAVOR'].capitalize() self.hart_resids = [] else: self.hartmann = header['HARTMANN'] self.flavor =", "Time import fitsio class BOSSRaw: \"\"\"A class to parse raw", "= header['HARTMANN'] self.flavor = 'Hart' # self.seeing = header['SEEING'] #", "'--mjd', help='If not today (-t), the mjd to search') parser.add_argument('-v',", "tool to grab a single BOSS image and pull a", "header = fitsio.read_header(fil) self.dither = header['MGDPOS'] if not self.dither: #", "= header['POINTING'][0] self.exp_time = int(header['EXPTIME']) self.isot = Time(header['DATE-OBS']) # UTC", "self.dither = header['POINTING'][0] self.exp_time = int(header['EXPTIME']) self.isot = Time(header['DATE-OBS']) #", "specified') for path in Path(data_dir).rglob('sdR*.fit.gz'): print(path) if __name__ == '__main__':", "header['HARTMANN']: self.hartmann = 'Closed' self.flavor = header['FLAVOR'].capitalize() elif 'Out' in", "int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today) elif args.mjd: data_dir = '/data/spectro/{}/'.format(args.mjd) else:", "and pull a few items from its header. It is", "image and pull a few items from its header. It", "self.dither = header['MGDPOS'] if not self.dither: # This key started", "action='count', default=1, help='Show details, can be stacked') if args.today: mjd_today", "= header['FLAVOR'].capitalize() elif 'Out' in header['HARTMANN']: self.hartmann = 'Open' self.flavor", "well. \"\"\" import argparse from pathlib import Path from astropy.time", "= '/data/spectro/{}/'.format(args.mjd) else: raise Exception('No date specified') for path in", "change, which many libraries depend on. This will hopefully help", "from pathlib import Path from astropy.time import Time import fitsio", "to future-proof things that need these ouptuts in case things", "be stacked') if args.today: mjd_today = int(Time.now().sjd) data_dir = '/data/spectro/{}/'.format(mjd_today)", "'Closed' self.flavor = header['FLAVOR'].capitalize() elif 'Out' in header['HARTMANN']: self.hartmann =", "not today (-t), the mjd to search') parser.add_argument('-v', '--verbose', action='count',", "argparse from pathlib import Path from astropy.time import Time import", "APOGEE. The purpose of collecting this raw data is to", "of collecting this raw data is to future-proof things that", "= header['PLATEID'] self.cart_id = header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead =", "It is used in bin/sloan_log.py, but it could be used", "date specified') for path in Path(data_dir).rglob('sdR*.fit.gz'): print(path) if __name__ ==", "self.plate_id = header['PLATEID'] self.cart_id = header['CARTID'] self.exp_id = int(str(fil).split('-')[-1].split('.')[0]) self.lead", "many libraries depend on. This will hopefully help SDSS-V logging\"\"\"", "import argparse from pathlib import Path from astropy.time import Time", "import Time import fitsio class BOSSRaw: \"\"\"A class to parse", "that need these ouptuts in case things like autoschedulers change,", "is to future-proof things that need these ouptuts in case", "search') parser.add_argument('-v', '--verbose', action='count', default=1, help='Show details, can be stacked')", "fitsio.read_header(fil) self.dither = header['MGDPOS'] if not self.dither: # This key", "self.dither: # This key started working instead during SDSS-V self.dither", "Time(header['DATE-OBS']) # UTC self.plate_id = header['PLATEID'] self.cart_id = header['CARTID'] self.exp_id", "collecting this raw data is to future-proof things that need", "= int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP'] if 'Closed' in header['HARTMANN']: self.hartmann", "int(str(fil).split('-')[-1].split('.')[0]) self.lead = header['PLATETYP'] if 'Closed' in header['HARTMANN']: self.hartmann =", "= int(header['EXPTIME']) self.isot = Time(header['DATE-OBS']) # UTC self.plate_id = header['PLATEID']", "to search') parser.add_argument('-v', '--verbose', action='count', default=1, help='Show details, can be", "bin/sloan_log.py, but it could be used directly as well. \"\"\"", "during SDSS-V self.dither = header['POINTING'][0] self.exp_time = int(header['EXPTIME']) self.isot =", "self.img_type = header['IMAGETYP'] def main(): parser = argparse.ArgumentParser() parser.add_argument('-t', '--today',", "items from its header. It is used in bin/sloan_log.py, but", "not self.dither: # This key started working instead during SDSS-V", "details, can be stacked') if args.today: mjd_today = int(Time.now().sjd) data_dir", "self.isot = Time(header['DATE-OBS']) # UTC self.plate_id = header['PLATEID'] self.cart_id =", "astropy.time import Time import fitsio class BOSSRaw: \"\"\"A class to", "The purpose of collecting this raw data is to future-proof", "single BOSS image and pull a few items from its" ]
[ "counter += 1 if chosenNumber == numberRandom: print(f'Parabéns! Você acertou", "acertar o número # O programa termina se o usuário", "n: # print('Você acertou!') # else: # print('Você errou.') import", "acertou!') # else: # print('Você errou.') import random numberRandom =", "o usuário tenha três chances de acertar o número #", "numberRandom: print(f'Parabéns! Você acertou na {counter}ª de 3 tentativas!') break", "# O programa termina se o usuário acertar ou errar", "10: ')) counter += 1 if chosenNumber == numberRandom: print(f'Parabéns!", "# print('Você errou.') import random numberRandom = random.randint(1, 10) counter", "random numberRandom = random.randint(1, 10) counter = 0 while True:", "# Altere o Programa 8.20 de forma que o usuário", "- counter} tentativa(s).') else: print('Suas tentativas acabaram! Mais sorte na", "# if x == n: # print('Você acertou!') # else:", "acertou na {counter}ª de 3 tentativas!') break else: print(f'Você errou!')", "# n = random.randint(1, 10) # x = int(input('Escolha um", "# # n = random.randint(1, 10) # x = int(input('Escolha", "True: chosenNumber = int(input('\\nEscolha um número entre 1 e 10:", "acabaram! Mais sorte na próxima vez.') print(f'O número sorteado foi", "tentativas acabaram! Mais sorte na próxima vez.') print(f'O número sorteado", "= random.randint(1, 10) # x = int(input('Escolha um número entre", "while True: chosenNumber = int(input('\\nEscolha um número entre 1 e", "Mais sorte na próxima vez.') print(f'O número sorteado foi {numberRandom}.')", "Adivinhando o número # # import random # # n", "número entre 1 e 10: ')) # if x ==", "== numberRandom: print(f'Parabéns! Você acertou na {counter}ª de 3 tentativas!')", "de 3 tentativas!') break else: print(f'Você errou!') if counter <", "10) counter = 0 while True: chosenNumber = int(input('\\nEscolha um", "{3 - counter} tentativa(s).') else: print('Suas tentativas acabaram! Mais sorte", "= random.randint(1, 10) counter = 0 while True: chosenNumber =", "# else: # print('Você errou.') import random numberRandom = random.randint(1,", "o Programa 8.20 de forma que o usuário tenha três", "numberRandom = random.randint(1, 10) counter = 0 while True: chosenNumber", "termina se o usuário acertar ou errar três vezes #", "print(f'Parabéns! Você acertou na {counter}ª de 3 tentativas!') break else:", "3 tentativas!') break else: print(f'Você errou!') if counter < 3:", "= int(input('\\nEscolha um número entre 1 e 10: ')) counter", "8.20 do livro, página 184 # Programa 8.20 - Adivinhando", "livro, página 184 # Programa 8.20 - Adivinhando o número", "')) counter += 1 if chosenNumber == numberRandom: print(f'Parabéns! Você", "sorte na próxima vez.') print(f'O número sorteado foi {numberRandom}.') break", "# Programa 8.20 - Adivinhando o número # # import", "Você acertou na {counter}ª de 3 tentativas!') break else: print(f'Você", "tenha três chances de acertar o número # O programa", "counter} tentativa(s).') else: print('Suas tentativas acabaram! Mais sorte na próxima", "três vezes # Programa 8.20 do livro, página 184 #", "else: print('Suas tentativas acabaram! Mais sorte na próxima vez.') print(f'O", "forma que o usuário tenha três chances de acertar o", "# x = int(input('Escolha um número entre 1 e 10:", "1 e 10: ')) counter += 1 if chosenNumber ==", "{counter}ª de 3 tentativas!') break else: print(f'Você errou!') if counter", "e 10: ')) # if x == n: # print('Você", "import random # # n = random.randint(1, 10) # x", "um número entre 1 e 10: ')) counter += 1", "random # # n = random.randint(1, 10) # x =", "10) # x = int(input('Escolha um número entre 1 e", "três chances de acertar o número # O programa termina", "random.randint(1, 10) counter = 0 while True: chosenNumber = int(input('\\nEscolha", "- Adivinhando o número # # import random # #", "vezes # Programa 8.20 do livro, página 184 # Programa", "counter = 0 while True: chosenNumber = int(input('\\nEscolha um número", "Altere o Programa 8.20 de forma que o usuário tenha", "if x == n: # print('Você acertou!') # else: #", "chosenNumber = int(input('\\nEscolha um número entre 1 e 10: '))", "acertar ou errar três vezes # Programa 8.20 do livro,", "número entre 1 e 10: ')) counter += 1 if", "== n: # print('Você acertou!') # else: # print('Você errou.')", "n = random.randint(1, 10) # x = int(input('Escolha um número", "print(f'Resta(m) {3 - counter} tentativa(s).') else: print('Suas tentativas acabaram! Mais", "e 10: ')) counter += 1 if chosenNumber == numberRandom:", "na {counter}ª de 3 tentativas!') break else: print(f'Você errou!') if", "usuário acertar ou errar três vezes # Programa 8.20 do", "o número # # import random # # n =", "print('Suas tentativas acabaram! Mais sorte na próxima vez.') print(f'O número", "< 3: print(f'Resta(m) {3 - counter} tentativa(s).') else: print('Suas tentativas", "')) # if x == n: # print('Você acertou!') #", "chances de acertar o número # O programa termina se", "8.20 - Adivinhando o número # # import random #", "se o usuário acertar ou errar três vezes # Programa", "random.randint(1, 10) # x = int(input('Escolha um número entre 1", "Programa 8.20 - Adivinhando o número # # import random", "entre 1 e 10: ')) counter += 1 if chosenNumber", "# Programa 8.20 do livro, página 184 # Programa 8.20", "if chosenNumber == numberRandom: print(f'Parabéns! Você acertou na {counter}ª de", "import random numberRandom = random.randint(1, 10) counter = 0 while", "if counter < 3: print(f'Resta(m) {3 - counter} tentativa(s).') else:", "um número entre 1 e 10: ')) # if x", "counter < 3: print(f'Resta(m) {3 - counter} tentativa(s).') else: print('Suas", "x = int(input('Escolha um número entre 1 e 10: '))", "8.20 de forma que o usuário tenha três chances de", "de forma que o usuário tenha três chances de acertar", "errou.') import random numberRandom = random.randint(1, 10) counter = 0", "programa termina se o usuário acertar ou errar três vezes", "0 while True: chosenNumber = int(input('\\nEscolha um número entre 1", "tentativas!') break else: print(f'Você errou!') if counter < 3: print(f'Resta(m)", "de acertar o número # O programa termina se o", "o número # O programa termina se o usuário acertar", "3: print(f'Resta(m) {3 - counter} tentativa(s).') else: print('Suas tentativas acabaram!", "else: print(f'Você errou!') if counter < 3: print(f'Resta(m) {3 -", "# import random # # n = random.randint(1, 10) #", "int(input('Escolha um número entre 1 e 10: ')) # if", "# # import random # # n = random.randint(1, 10)", "O programa termina se o usuário acertar ou errar três", "número # # import random # # n = random.randint(1,", "print('Você errou.') import random numberRandom = random.randint(1, 10) counter =", "= 0 while True: chosenNumber = int(input('\\nEscolha um número entre", "página 184 # Programa 8.20 - Adivinhando o número #", "else: # print('Você errou.') import random numberRandom = random.randint(1, 10)", "int(input('\\nEscolha um número entre 1 e 10: ')) counter +=", "número # O programa termina se o usuário acertar ou", "1 e 10: ')) # if x == n: #", "o usuário acertar ou errar três vezes # Programa 8.20", "do livro, página 184 # Programa 8.20 - Adivinhando o", "que o usuário tenha três chances de acertar o número", "tentativa(s).') else: print('Suas tentativas acabaram! Mais sorte na próxima vez.')", "ou errar três vezes # Programa 8.20 do livro, página", "1 if chosenNumber == numberRandom: print(f'Parabéns! Você acertou na {counter}ª", "errou!') if counter < 3: print(f'Resta(m) {3 - counter} tentativa(s).')", "184 # Programa 8.20 - Adivinhando o número # #", "+= 1 if chosenNumber == numberRandom: print(f'Parabéns! Você acertou na", "errar três vezes # Programa 8.20 do livro, página 184", "10: ')) # if x == n: # print('Você acertou!')", "chosenNumber == numberRandom: print(f'Parabéns! Você acertou na {counter}ª de 3", "print(f'Você errou!') if counter < 3: print(f'Resta(m) {3 - counter}", "break else: print(f'Você errou!') if counter < 3: print(f'Resta(m) {3", "# print('Você acertou!') # else: # print('Você errou.') import random", "entre 1 e 10: ')) # if x == n:", "usuário tenha três chances de acertar o número # O", "= int(input('Escolha um número entre 1 e 10: ')) #", "Programa 8.20 de forma que o usuário tenha três chances", "x == n: # print('Você acertou!') # else: # print('Você", "Programa 8.20 do livro, página 184 # Programa 8.20 -", "print('Você acertou!') # else: # print('Você errou.') import random numberRandom" ]
[ "the non-interactive argument was set, True, if it was not", "utf-8 -*- \"\"\"This sub module provides a global variable to", "-*- \"\"\"This sub module provides a global variable to check", "interactive -- False, if the main the non-interactive argument was", "set, True, if it was not set \"\"\" global interactive", "True, if it was not set \"\"\" global interactive interactive", "if the main the non-interactive argument was set, True, if", "provides a global variable to check for checking if the", "non-interactive argument was set Exported variable: interactive -- False, if", "if the non-interactive argument was set Exported variable: interactive --", "module provides a global variable to check for checking if", "to check for checking if the non-interactive argument was set", "it was not set \"\"\" global interactive interactive = True;", "sub module provides a global variable to check for checking", "<reponame>mariusfrinken/slogviz # -*- coding: utf-8 -*- \"\"\"This sub module provides", "variable: interactive -- False, if the main the non-interactive argument", "checking if the non-interactive argument was set Exported variable: interactive", "Exported variable: interactive -- False, if the main the non-interactive", "argument was set, True, if it was not set \"\"\"", "-*- coding: utf-8 -*- \"\"\"This sub module provides a global", "was set, True, if it was not set \"\"\" global", "argument was set Exported variable: interactive -- False, if the", "global variable to check for checking if the non-interactive argument", "coding: utf-8 -*- \"\"\"This sub module provides a global variable", "variable to check for checking if the non-interactive argument was", "False, if the main the non-interactive argument was set, True,", "non-interactive argument was set, True, if it was not set", "check for checking if the non-interactive argument was set Exported", "main the non-interactive argument was set, True, if it was", "if it was not set \"\"\" global interactive interactive =", "a global variable to check for checking if the non-interactive", "set Exported variable: interactive -- False, if the main the", "\"\"\"This sub module provides a global variable to check for", "the non-interactive argument was set Exported variable: interactive -- False,", "for checking if the non-interactive argument was set Exported variable:", "# -*- coding: utf-8 -*- \"\"\"This sub module provides a", "was set Exported variable: interactive -- False, if the main", "-- False, if the main the non-interactive argument was set,", "the main the non-interactive argument was set, True, if it" ]
[ "with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setuptools.setup(", "encoding='utf-8') as f: long_description = f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\",", "f: long_description = f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable", "as f: long_description = f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[ \"Programming Language", "setuptools # To use a consistent encoding from codecs import", "3\", \"License :: OSI Approved :: MIT License\", \"Operating System", ":: 3\", \"License :: OSI Approved :: MIT License\", \"Operating", "OSI Approved :: MIT License\", \"Operating System :: OS Independent\",", "path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read()", "f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\", long_description=long_description,", "1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True,", "codecs import open from os import path here = path.abspath(path.dirname(__file__))", ":: MIT License\", \"Operating System :: OS Independent\", ], python_requires='>=3.7',", "setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\",", "url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[ \"Programming Language ::", "install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[ \"Programming Language :: Python :: 3\",", "To use a consistent encoding from codecs import open from", "# To use a consistent encoding from codecs import open", "include_package_data=True, classifiers=[ \"Programming Language :: Python :: 3\", \"License ::", "import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as", "packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[ \"Programming Language :: Python", "description=\"Differentiable 1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"],", "\"Programming Language :: Python :: 3\", \"License :: OSI Approved", "from codecs import open from os import path here =", "author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\",", "version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(),", "package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[ \"Programming Language :: Python ::", "Language :: Python :: 3\", \"License :: OSI Approved ::", "'README.md'), encoding='utf-8') as f: long_description = f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\",", "import setuptools # To use a consistent encoding from codecs", "classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI", ":: OSI Approved :: MIT License\", \"Operating System :: OS", "path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f:", "open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setuptools.setup( name=\"atm76\",", "\"License :: OSI Approved :: MIT License\", \"Operating System ::", "use a consistent encoding from codecs import open from os", "Python :: 3\", \"License :: OSI Approved :: MIT License\",", "encoding from codecs import open from os import path here", "a consistent encoding from codecs import open from os import", "os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8')", "MIT License\", \"Operating System :: OS Independent\", ], python_requires='>=3.7', )", "= f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\",", "from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'),", "name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\",", "\"genn\"], include_package_data=True, classifiers=[ \"Programming Language :: Python :: 3\", \"License", "Approved :: MIT License\", \"Operating System :: OS Independent\", ],", "here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description", "long_description = f.read() setuptools.setup( name=\"atm76\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Differentiable 1976 Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={},", "Atmosphere\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[", "long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/shb84/ATM76.git\", packages=setuptools.find_packages(), package_data={}, install_requires=[\"numpy>=1.16\", \"genn\"], include_package_data=True, classifiers=[ \"Programming", "= path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description =", "consistent encoding from codecs import open from os import path", "open from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here,", ":: Python :: 3\", \"License :: OSI Approved :: MIT", "import open from os import path here = path.abspath(path.dirname(__file__)) with" ]
[ "\"\"\" if len(array_speed) == 0: return 0 sum = 0", "speed in array_speed: if speed < speed_before: break else: speed_before", "running...') start = time.clock() r = requests.get('http://{}'.format(dnode), stream=True) total_length =", "speed done!\") #TODO Bỏ time, để kiểm tra xem db", "download_speed, mean_deviationS, accelerationS] Returns: json format for influxdb \"\"\" return", "total_length // (time.clock() - start) accelerationS = self.acceleration(array_speed) mean_deviationS =", "url file download. start (float): It's time which started download.", "for each 1024 Byte Returns: acceleration (kB/s) : the deviation", "chunk in r.iter_content(1024): # 1kB1024 1MB 1048576 end_chunk = time.clock()", "array_speed): \"\"\"Caculate acceleration. By get the highest speed in the", "my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS] Returns: json", "- start_chunk start_chunk = end_chunk if delta <= 0: break", "1kB1024 1MB 1048576 end_chunk = time.clock() delta = end_chunk -", "start = time.clock() r = requests.get('http://{}'.format(dnode), stream=True) total_length = int(r.headers.get('content-length'))", "for speed in array_speed: sum += abs(speed - download_speed) return", "download speed. Returns: mean_deviation (kB/s) \"\"\" if len(array_speed) == 0:", "__call__(self, client, dnode): logger.info('Test download speed : running...') start =", "start (float): It's time which started download. end (float): It's", "with item 0 : json format for influxdb \"\"\" download_speed", "accelerationS] Returns: json format for influxdb \"\"\" return { \"measurement\":", "if total_length is None: logger.error(\"Empty file!\") else: array_speed = []", "accelerationS])] def acceleration(self, array_speed): \"\"\"Caculate acceleration. By get the highest", "list download times for each 1024 Byte Returns: acceleration (kB/s)", "download (Byte) array_speed (list): list download speeds for each 1024", "return { \"measurement\": \"download_speed\", \"tags\": { \"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1])", "dung thu vien asyncio ntn ca nen em dung thu", "= [] start_chunk = time.clock() for chunk in r.iter_content(1024): #", "inserting into influxdb. Args: my_array (list): [self._snode, url, str(datetime.now()), download_speed,", "def acceleration(self, array_speed): \"\"\"Caculate acceleration. By get the highest speed", "# kB / s end = time.clock() yield from self._queue.put(self.get_result(dnode,", "time.clock() delta = end_chunk - start_chunk start_chunk = end_chunk if", "cycle. Args: array_speed (list): list download times for each 1024", "- start) accelerationS = self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed, download_speed) logger.info(\"Test", "hay chưa return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])] def", "[] start_chunk = time.clock() for chunk in r.iter_content(1024): # 1kB1024", "mean_deviationS, accelerationS] Returns: json format for influxdb \"\"\" return {", "(kB/s) : the deviation between highest speed and first byte", "requests.get('http://{}'.format(dnode), stream=True) total_length = int(r.headers.get('content-length')) if total_length is None: logger.error(\"Empty", "time which finished download. total_length (int): size of file download", "into influxdb. Args: my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS,", "start, end, total_length, array_speed): \"\"\"Download and processing data. Args: url", "(kB/s) Returns: list with item 0 : json format for", "for inserting into influxdb. Args: my_array (list): [self._snode, url, str(datetime.now()),", "return 0 sum = 0 for speed in array_speed: sum", "ca nen em dung thu vien request # python import", "\"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) }, # \"time\": \"{}\".format(my_array[2]), \"fields\": { \"speed\":", "stream=True) total_length = int(r.headers.get('content-length')) if total_length is None: logger.error(\"Empty file!\")", "\"\"\"Reformat my_array for inserting into influxdb. Args: my_array (list): [self._snode,", "end, total_length, array_speed): \"\"\"Download and processing data. Args: url (str):", "= array_speed[0] for speed in array_speed: if speed < speed_before:", "# \"time\": \"{}\".format(my_array[2]), \"fields\": { \"speed\": my_array[3], \"mean_deviation\": my_array[4], \"acceleration\":", "mean_deviationS = self.mean_deviation(array_speed, download_speed) logger.info(\"Test download speed done!\") #TODO Bỏ", "len(array_speed) == 0: return 0 speed_before = array_speed[0] for speed", "Returns: mean_deviation (kB/s) \"\"\" if len(array_speed) == 0: return 0", "return speed_before - array_speed[0] def mean_deviation(self, array_speed, download_speed): \"\"\"The mean", "speed_before = speed return speed_before - array_speed[0] def mean_deviation(self, array_speed,", "the first cycle. Args: array_speed (list): list download times for", "highest speed and first byte speed \"\"\" if len(array_speed) ==", "0 sum = 0 for speed in array_speed: sum +=", "== 0: return 0 speed_before = array_speed[0] for speed in", "download. end (float): It's time which finished download. total_length (int):", "with download_speed. Args: array_speed (list): list download speeds for each", "from agent.check_plugins import AbstractCheckPlugin # Do khong biet dung thu", "def __call__(self, client, dnode): logger.info('Test download speed : running...') start", "download times for each 1024 Byte Returns: acceleration (kB/s) :", "(list): list download speeds for each 1024 Byte (kB/s) Returns:", "agent.check_plugins import AbstractCheckPlugin # Do khong biet dung thu vien", "== 0: return 0 sum = 0 for speed in", "dung thu vien request # python import requests import sys", "end, total_length, array_speed)) @asyncio.coroutine def get_result(self, url, start, end, total_length,", "break else: array_speed.append(1//delta) # kB / s end = time.clock()", "datetime.now(), download_speed, mean_deviationS, accelerationS])] def acceleration(self, array_speed): \"\"\"Caculate acceleration. By", "speed in array_speed: sum += abs(speed - download_speed) return sum//len(array_speed)", "acceleration. By get the highest speed in the first cycle.", "time.clock() yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed)) @asyncio.coroutine def", "= logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self, client, dnode): logger.info('Test", "deviation each downloads with download_speed. Args: array_speed (list): list download", "Args: array_speed (list): list download times for each 1024 Byte", "is None: logger.error(\"Empty file!\") else: array_speed = [] start_chunk =", "< speed_before: break else: speed_before = speed return speed_before -", "in r.iter_content(1024): # 1kB1024 1MB 1048576 end_chunk = time.clock() delta", "format for influxdb \"\"\" download_speed = total_length // (time.clock() -", "download_speed (kB/s): mean download speed. Returns: mean_deviation (kB/s) \"\"\" if", "int(r.headers.get('content-length')) if total_length is None: logger.error(\"Empty file!\") else: array_speed =", "{ \"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) }, # \"time\": \"{}\".format(my_array[2]), \"fields\":", "return sum//len(array_speed) def output(self, my_array): \"\"\"Reformat my_array for inserting into", "\"tags\": { \"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) }, # \"time\": \"{}\".format(my_array[2]),", "item 0 : json format for influxdb \"\"\" download_speed =", "self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed, download_speed) logger.info(\"Test download speed done!\") #TODO", "}, # \"time\": \"{}\".format(my_array[2]), \"fields\": { \"speed\": my_array[3], \"mean_deviation\": my_array[4],", "python import requests import sys import time from datetime import", "time.clock() r = requests.get('http://{}'.format(dnode), stream=True) total_length = int(r.headers.get('content-length')) if total_length", "mean deviation each downloads with download_speed. Args: array_speed (list): list", "\"dnode\": \"{}\".format(my_array[1]) }, # \"time\": \"{}\".format(my_array[2]), \"fields\": { \"speed\": my_array[3],", "logger.info(\"Test download speed done!\") #TODO Bỏ time, để kiểm tra", "khong biet dung thu vien asyncio ntn ca nen em", "time from datetime import datetime logger = logging.getLogger(__name__) class Download(AbstractCheckPlugin):", "nen em dung thu vien request # python import requests", "url, datetime.now(), download_speed, mean_deviationS, accelerationS])] def acceleration(self, array_speed): \"\"\"Caculate acceleration.", "= int(r.headers.get('content-length')) if total_length is None: logger.error(\"Empty file!\") else: array_speed", "end = time.clock() yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed))", "= 0 for speed in array_speed: sum += abs(speed -", "json format for influxdb \"\"\" return { \"measurement\": \"download_speed\", \"tags\":", "speeds for each kB. download_speed (kB/s): mean download speed. Returns:", "1048576 end_chunk = time.clock() delta = end_chunk - start_chunk start_chunk", "array_speed[0] def mean_deviation(self, array_speed, download_speed): \"\"\"The mean deviation each downloads", "sum += abs(speed - download_speed) return sum//len(array_speed) def output(self, my_array):", "client, dnode): logger.info('Test download speed : running...') start = time.clock()", "json format for influxdb \"\"\" download_speed = total_length // (time.clock()", "sum//len(array_speed) def output(self, my_array): \"\"\"Reformat my_array for inserting into influxdb.", "time which started download. end (float): It's time which finished", "thu vien request # python import requests import sys import", "\"download_speed\", \"tags\": { \"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) }, # \"time\":", "delta <= 0: break else: array_speed.append(1//delta) # kB / s", "each downloads with download_speed. Args: array_speed (list): list download speeds", "acceleration(self, array_speed): \"\"\"Caculate acceleration. By get the highest speed in", "= total_length // (time.clock() - start) accelerationS = self.acceleration(array_speed) mean_deviationS", "download. total_length (int): size of file download (Byte) array_speed (list):", "kiểm tra xem db có ghi đc dữ liệu hay", "array_speed)) @asyncio.coroutine def get_result(self, url, start, end, total_length, array_speed): \"\"\"Download", "time.clock() for chunk in r.iter_content(1024): # 1kB1024 1MB 1048576 end_chunk", "\"\"\"Download and processing data. Args: url (str): url file download.", "time, để kiểm tra xem db có ghi đc dữ", "started download. end (float): It's time which finished download. total_length", "sys import time from datetime import datetime logger = logging.getLogger(__name__)", "return 0 speed_before = array_speed[0] for speed in array_speed: if", "= end_chunk - start_chunk start_chunk = end_chunk if delta <=", "array_speed = [] start_chunk = time.clock() for chunk in r.iter_content(1024):", "\"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) }, # \"time\": \"{}\".format(my_array[2]), \"fields\": {", "each 1024 Byte Returns: acceleration (kB/s) : the deviation between", "(kB/s) \"\"\" if len(array_speed) == 0: return 0 sum =", ": json format for influxdb \"\"\" download_speed = total_length //", "total_length, array_speed)) @asyncio.coroutine def get_result(self, url, start, end, total_length, array_speed):", "file download (Byte) array_speed (list): list download speeds for each", "array_speed (list): list download times for each 1024 Byte Returns:", "speed_before: break else: speed_before = speed return speed_before - array_speed[0]", "download_speed, mean_deviationS, accelerationS])] def acceleration(self, array_speed): \"\"\"Caculate acceleration. By get", "influxdb \"\"\" return { \"measurement\": \"download_speed\", \"tags\": { \"snode\": \"{}\".format(my_array[0]),", "(time.clock() - start) accelerationS = self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed, download_speed)", "for each kB. download_speed (kB/s): mean download speed. Returns: mean_deviation", "file download. start (float): It's time which started download. end", "start_chunk = time.clock() for chunk in r.iter_content(1024): # 1kB1024 1MB", "download speed done!\") #TODO Bỏ time, để kiểm tra xem", "{ \"measurement\": \"download_speed\", \"tags\": { \"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) },", "- download_speed) return sum//len(array_speed) def output(self, my_array): \"\"\"Reformat my_array for", "s end = time.clock() yield from self._queue.put(self.get_result(dnode, start, end, total_length,", "end_chunk = time.clock() delta = end_chunk - start_chunk start_chunk =", "yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed)) @asyncio.coroutine def get_result(self,", "total_length (int): size of file download (Byte) array_speed (list): list", "each kB. download_speed (kB/s): mean download speed. Returns: mean_deviation (kB/s)", "speed_before = array_speed[0] for speed in array_speed: if speed <", "\"\"\" download_speed = total_length // (time.clock() - start) accelerationS =", "datetime import datetime logger = logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine def", "get the highest speed in the first cycle. Args: array_speed", "get_result(self, url, start, end, total_length, array_speed): \"\"\"Download and processing data.", "db có ghi đc dữ liệu hay chưa return [self.output([self._snode,", "(list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS] Returns: json format", "@asyncio.coroutine def get_result(self, url, start, end, total_length, array_speed): \"\"\"Download and", "speed < speed_before: break else: speed_before = speed return speed_before", "for chunk in r.iter_content(1024): # 1kB1024 1MB 1048576 end_chunk =", "download speeds for each kB. download_speed (kB/s): mean download speed.", "liệu hay chưa return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])]", "my_array for inserting into influxdb. Args: my_array (list): [self._snode, url,", "else: array_speed = [] start_chunk = time.clock() for chunk in", "the deviation between highest speed and first byte speed \"\"\"", "highest speed in the first cycle. Args: array_speed (list): list", "import datetime logger = logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self,", "(str): url file download. start (float): It's time which started", "download. start (float): It's time which started download. end (float):", "r = requests.get('http://{}'.format(dnode), stream=True) total_length = int(r.headers.get('content-length')) if total_length is", "sum = 0 for speed in array_speed: sum += abs(speed", "download_speed) logger.info(\"Test download speed done!\") #TODO Bỏ time, để kiểm", "break else: speed_before = speed return speed_before - array_speed[0] def", "if len(array_speed) == 0: return 0 sum = 0 for", "download_speed): \"\"\"The mean deviation each downloads with download_speed. Args: array_speed", "finished download. total_length (int): size of file download (Byte) array_speed", "ntn ca nen em dung thu vien request # python", "#TODO Bỏ time, để kiểm tra xem db có ghi", "= time.clock() delta = end_chunk - start_chunk start_chunk = end_chunk", "self._queue.put(self.get_result(dnode, start, end, total_length, array_speed)) @asyncio.coroutine def get_result(self, url, start,", "speeds for each 1024 Byte (kB/s) Returns: list with item", "dữ liệu hay chưa return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS,", "total_length, array_speed): \"\"\"Download and processing data. Args: url (str): url", "chưa return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])] def acceleration(self,", "end (float): It's time which finished download. total_length (int): size", "downloads with download_speed. Args: array_speed (list): list download speeds for", "in the first cycle. Args: array_speed (list): list download times", "em dung thu vien request # python import requests import", "done!\") #TODO Bỏ time, để kiểm tra xem db có", "0: break else: array_speed.append(1//delta) # kB / s end =", "array_speed: sum += abs(speed - download_speed) return sum//len(array_speed) def output(self,", "kB / s end = time.clock() yield from self._queue.put(self.get_result(dnode, start,", "- array_speed[0] def mean_deviation(self, array_speed, download_speed): \"\"\"The mean deviation each", "influxdb \"\"\" download_speed = total_length // (time.clock() - start) accelerationS", "speed and first byte speed \"\"\" if len(array_speed) == 0:", "url, str(datetime.now()), download_speed, mean_deviationS, accelerationS] Returns: json format for influxdb", "array_speed: if speed < speed_before: break else: speed_before = speed", "influxdb. Args: my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS]", "from datetime import datetime logger = logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine", "mean_deviation(self, array_speed, download_speed): \"\"\"The mean deviation each downloads with download_speed.", "def output(self, my_array): \"\"\"Reformat my_array for inserting into influxdb. Args:", "0 for speed in array_speed: sum += abs(speed - download_speed)", "file!\") else: array_speed = [] start_chunk = time.clock() for chunk", "list download speeds for each 1024 Byte (kB/s) Returns: list", "xem db có ghi đc dữ liệu hay chưa return", "start) accelerationS = self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed, download_speed) logger.info(\"Test download", "Returns: acceleration (kB/s) : the deviation between highest speed and", "download_speed) return sum//len(array_speed) def output(self, my_array): \"\"\"Reformat my_array for inserting", "(float): It's time which finished download. total_length (int): size of", "start_chunk = end_chunk if delta <= 0: break else: array_speed.append(1//delta)", "logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self, client, dnode): logger.info('Test download", "and first byte speed \"\"\" if len(array_speed) == 0: return", "import sys import time from datetime import datetime logger =", "biet dung thu vien asyncio ntn ca nen em dung", "for influxdb \"\"\" download_speed = total_length // (time.clock() - start)", "0: return 0 sum = 0 for speed in array_speed:", "for influxdb \"\"\" return { \"measurement\": \"download_speed\", \"tags\": { \"snode\":", "if len(array_speed) == 0: return 0 speed_before = array_speed[0] for", "\"{}\".format(my_array[2]), \"fields\": { \"speed\": my_array[3], \"mean_deviation\": my_array[4], \"acceleration\": my_array[5] }", "acceleration (kB/s) : the deviation between highest speed and first", "tra xem db có ghi đc dữ liệu hay chưa", ": the deviation between highest speed and first byte speed", "để kiểm tra xem db có ghi đc dữ liệu", "array_speed (list): list download speeds for each 1024 Byte (kB/s)", "for each 1024 Byte (kB/s) Returns: list with item 0", "else: array_speed.append(1//delta) # kB / s end = time.clock() yield", "1024 Byte (kB/s) Returns: list with item 0 : json", "1024 Byte Returns: acceleration (kB/s) : the deviation between highest", "output(self, my_array): \"\"\"Reformat my_array for inserting into influxdb. Args: my_array", "from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed)) @asyncio.coroutine def get_result(self, url,", "(kB/s): mean download speed. Returns: mean_deviation (kB/s) \"\"\" if len(array_speed)", "= time.clock() r = requests.get('http://{}'.format(dnode), stream=True) total_length = int(r.headers.get('content-length')) if", "import requests import sys import time from datetime import datetime", "def mean_deviation(self, array_speed, download_speed): \"\"\"The mean deviation each downloads with", "\"\"\" return { \"measurement\": \"download_speed\", \"tags\": { \"snode\": \"{}\".format(my_array[0]), \"dnode\":", "Returns: json format for influxdb \"\"\" return { \"measurement\": \"download_speed\",", "vien asyncio ntn ca nen em dung thu vien request", "AbstractCheckPlugin # Do khong biet dung thu vien asyncio ntn", "if delta <= 0: break else: array_speed.append(1//delta) # kB /", "times for each 1024 Byte Returns: acceleration (kB/s) : the", "có ghi đc dữ liệu hay chưa return [self.output([self._snode, url,", "ghi đc dữ liệu hay chưa return [self.output([self._snode, url, datetime.now(),", "end_chunk - start_chunk start_chunk = end_chunk if delta <= 0:", "url (str): url file download. start (float): It's time which", "list with item 0 : json format for influxdb \"\"\"", "0: return 0 speed_before = array_speed[0] for speed in array_speed:", "\"measurement\": \"download_speed\", \"tags\": { \"snode\": \"{}\".format(my_array[0]), \"dnode\": \"{}\".format(my_array[1]) }, #", "import AbstractCheckPlugin # Do khong biet dung thu vien asyncio", "array_speed (list): list download speeds for each kB. download_speed (kB/s):", "if speed < speed_before: break else: speed_before = speed return", "total_length = int(r.headers.get('content-length')) if total_length is None: logger.error(\"Empty file!\") else:", "Do khong biet dung thu vien asyncio ntn ca nen", "= time.clock() for chunk in r.iter_content(1024): # 1kB1024 1MB 1048576", "which finished download. total_length (int): size of file download (Byte)", "/ s end = time.clock() yield from self._queue.put(self.get_result(dnode, start, end,", "logging import asyncio from agent.check_plugins import AbstractCheckPlugin # Do khong", "download speed : running...') start = time.clock() r = requests.get('http://{}'.format(dnode),", "array_speed[0] for speed in array_speed: if speed < speed_before: break", "mean download speed. Returns: mean_deviation (kB/s) \"\"\" if len(array_speed) ==", "start, end, total_length, array_speed)) @asyncio.coroutine def get_result(self, url, start, end,", "It's time which finished download. total_length (int): size of file", "đc dữ liệu hay chưa return [self.output([self._snode, url, datetime.now(), download_speed,", "import time from datetime import datetime logger = logging.getLogger(__name__) class", "download speeds for each 1024 Byte (kB/s) Returns: list with", "else: speed_before = speed return speed_before - array_speed[0] def mean_deviation(self,", "request # python import requests import sys import time from", "and processing data. Args: url (str): url file download. start", "<= 0: break else: array_speed.append(1//delta) # kB / s end", "list download speeds for each kB. download_speed (kB/s): mean download", "len(array_speed) == 0: return 0 sum = 0 for speed", "It's time which started download. end (float): It's time which", "in array_speed: if speed < speed_before: break else: speed_before =", "None: logger.error(\"Empty file!\") else: array_speed = [] start_chunk = time.clock()", "# Do khong biet dung thu vien asyncio ntn ca", "asyncio ntn ca nen em dung thu vien request #", "(list): list download speeds for each kB. download_speed (kB/s): mean", "size of file download (Byte) array_speed (list): list download speeds", "first cycle. Args: array_speed (list): list download times for each", "kB. download_speed (kB/s): mean download speed. Returns: mean_deviation (kB/s) \"\"\"", "[self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])] def acceleration(self, array_speed): \"\"\"Caculate", "format for influxdb \"\"\" return { \"measurement\": \"download_speed\", \"tags\": {", "return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])] def acceleration(self, array_speed):", "import asyncio from agent.check_plugins import AbstractCheckPlugin # Do khong biet", "# python import requests import sys import time from datetime", "1MB 1048576 end_chunk = time.clock() delta = end_chunk - start_chunk", "= time.clock() yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed)) @asyncio.coroutine", "speed_before - array_speed[0] def mean_deviation(self, array_speed, download_speed): \"\"\"The mean deviation", "\"\"\" if len(array_speed) == 0: return 0 speed_before = array_speed[0]", "speed : running...') start = time.clock() r = requests.get('http://{}'.format(dnode), stream=True)", "mean_deviation (kB/s) \"\"\" if len(array_speed) == 0: return 0 sum", "array_speed, download_speed): \"\"\"The mean deviation each downloads with download_speed. Args:", "[self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS] Returns: json format for", "0 speed_before = array_speed[0] for speed in array_speed: if speed", "array_speed.append(1//delta) # kB / s end = time.clock() yield from", "= end_chunk if delta <= 0: break else: array_speed.append(1//delta) #", "download_speed = total_length // (time.clock() - start) accelerationS = self.acceleration(array_speed)", "byte speed \"\"\" if len(array_speed) == 0: return 0 speed_before", "// (time.clock() - start) accelerationS = self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed,", "\"\"\"The mean deviation each downloads with download_speed. Args: array_speed (list):", "speed \"\"\" if len(array_speed) == 0: return 0 speed_before =", "str(datetime.now()), download_speed, mean_deviationS, accelerationS] Returns: json format for influxdb \"\"\"", "asyncio from agent.check_plugins import AbstractCheckPlugin # Do khong biet dung", "Byte Returns: acceleration (kB/s) : the deviation between highest speed", "requests import sys import time from datetime import datetime logger", "\"\"\"Caculate acceleration. By get the highest speed in the first", "each 1024 Byte (kB/s) Returns: list with item 0 :", "dnode): logger.info('Test download speed : running...') start = time.clock() r", "mean_deviationS, accelerationS])] def acceleration(self, array_speed): \"\"\"Caculate acceleration. By get the", "total_length is None: logger.error(\"Empty file!\") else: array_speed = [] start_chunk", "my_array): \"\"\"Reformat my_array for inserting into influxdb. Args: my_array (list):", "speed. Returns: mean_deviation (kB/s) \"\"\" if len(array_speed) == 0: return", "(list): list download times for each 1024 Byte Returns: acceleration", "Args: my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS] Returns:", "Args: url (str): url file download. start (float): It's time", "logger.error(\"Empty file!\") else: array_speed = [] start_chunk = time.clock() for", "accelerationS = self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed, download_speed) logger.info(\"Test download speed", "thu vien asyncio ntn ca nen em dung thu vien", "speed return speed_before - array_speed[0] def mean_deviation(self, array_speed, download_speed): \"\"\"The", "datetime logger = logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self, client,", "import logging import asyncio from agent.check_plugins import AbstractCheckPlugin # Do", "between highest speed and first byte speed \"\"\" if len(array_speed)", "By get the highest speed in the first cycle. Args:", "Returns: list with item 0 : json format for influxdb", "url, start, end, total_length, array_speed): \"\"\"Download and processing data. Args:", "logger.info('Test download speed : running...') start = time.clock() r =", "download_speed. Args: array_speed (list): list download speeds for each kB.", "array_speed): \"\"\"Download and processing data. Args: url (str): url file", "abs(speed - download_speed) return sum//len(array_speed) def output(self, my_array): \"\"\"Reformat my_array", "r.iter_content(1024): # 1kB1024 1MB 1048576 end_chunk = time.clock() delta =", "speed in the first cycle. Args: array_speed (list): list download", "= self.acceleration(array_speed) mean_deviationS = self.mean_deviation(array_speed, download_speed) logger.info(\"Test download speed done!\")", "Bỏ time, để kiểm tra xem db có ghi đc", "(Byte) array_speed (list): list download speeds for each 1024 Byte", "\"fields\": { \"speed\": my_array[3], \"mean_deviation\": my_array[4], \"acceleration\": my_array[5] } }", ": running...') start = time.clock() r = requests.get('http://{}'.format(dnode), stream=True) total_length", "(int): size of file download (Byte) array_speed (list): list download", "= requests.get('http://{}'.format(dnode), stream=True) total_length = int(r.headers.get('content-length')) if total_length is None:", "delta = end_chunk - start_chunk start_chunk = end_chunk if delta", "\"time\": \"{}\".format(my_array[2]), \"fields\": { \"speed\": my_array[3], \"mean_deviation\": my_array[4], \"acceleration\": my_array[5]", "class Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self, client, dnode): logger.info('Test download speed", "Args: array_speed (list): list download speeds for each kB. download_speed", "Byte (kB/s) Returns: list with item 0 : json format", "in array_speed: sum += abs(speed - download_speed) return sum//len(array_speed) def", "0 : json format for influxdb \"\"\" download_speed = total_length", "self.mean_deviation(array_speed, download_speed) logger.info(\"Test download speed done!\") #TODO Bỏ time, để", "first byte speed \"\"\" if len(array_speed) == 0: return 0", "for speed in array_speed: if speed < speed_before: break else:", "def get_result(self, url, start, end, total_length, array_speed): \"\"\"Download and processing", "logger = logging.getLogger(__name__) class Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self, client, dnode):", "# 1kB1024 1MB 1048576 end_chunk = time.clock() delta = end_chunk", "\"{}\".format(my_array[1]) }, # \"time\": \"{}\".format(my_array[2]), \"fields\": { \"speed\": my_array[3], \"mean_deviation\":", "(float): It's time which started download. end (float): It's time", "the highest speed in the first cycle. Args: array_speed (list):", "end_chunk if delta <= 0: break else: array_speed.append(1//delta) # kB", "= speed return speed_before - array_speed[0] def mean_deviation(self, array_speed, download_speed):", "data. Args: url (str): url file download. start (float): It's", "of file download (Byte) array_speed (list): list download speeds for", "= self.mean_deviation(array_speed, download_speed) logger.info(\"Test download speed done!\") #TODO Bỏ time,", "@asyncio.coroutine def __call__(self, client, dnode): logger.info('Test download speed : running...')", "which started download. end (float): It's time which finished download.", "processing data. Args: url (str): url file download. start (float):", "Download(AbstractCheckPlugin): @asyncio.coroutine def __call__(self, client, dnode): logger.info('Test download speed :", "start_chunk start_chunk = end_chunk if delta <= 0: break else:", "deviation between highest speed and first byte speed \"\"\" if", "+= abs(speed - download_speed) return sum//len(array_speed) def output(self, my_array): \"\"\"Reformat", "vien request # python import requests import sys import time" ]
[ "import views app_name = \"main\" urlpatterns = [ path(\"\",views.homepage,name=\"homepage\") ]", ". import views app_name = \"main\" urlpatterns = [ path(\"\",views.homepage,name=\"homepage\")", "<reponame>AyemunHossain/Django from django.urls import path from . import views app_name", "import path from . import views app_name = \"main\" urlpatterns", "from django.urls import path from . import views app_name =", "from . import views app_name = \"main\" urlpatterns = [", "path from . import views app_name = \"main\" urlpatterns =", "django.urls import path from . import views app_name = \"main\"" ]
[ "data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test = np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data()) x_train", "14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test = np.array(data_lwr.get_x_data())", "the parameters of the network dataset (str): Dataset to use", "(str): Dataset to use for training/evaluating \"\"\" if dataset ==", "\"\"\"Retrieve the MNIST dataset and process the data.\"\"\" # Set", "x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test", "points\") nb_classes = -1 batch_size = np.shape(y_train)[0] input_shape = (13,)", "compile_model(network, nb_classes, input_shape): \"\"\"Compile a sequential model. Args: network (dict):", "network['learning_rate'] model = Sequential() # Add each layer. for i", "input shape for first layer. if i == 0: print(nb_neurons)", "loss. Args: network (dict): the parameters of the network dataset", "Sequential from keras.layers import Dense, Dropout from keras.utils.np_utils import to_categorical", "if dataset == 'dbtt': model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0) y_predict", "import to_categorical from keras.callbacks import EarlyStopping import data_parser import numpy", "rmsprop from sklearn.metrics import mean_squared_error # Helper: Early stopping. early_stopper", "first layer. if i == 0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape))", "# Set defaults. nb_classes = 10 batch_size = 128 input_shape", "model.predict(x_test) * 758.92 # todo way to not hardcode this?", "in range(nb_layers): # Need input shape for first layer. if", "on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets import mnist, cifar10 from keras.models", "nb_classes = -1 batch_size = np.shape(y_train)[0] input_shape = (13,) #", "\"\"\"Train the model, return test loss. Args: network (dict): the", "= get_cifar10() elif dataset == 'mnist': nb_classes, batch_size, input_shape, x_train,", "vectors to binary class matrices y_train = to_categorical(y_train, nb_classes) y_test", "x_train, x_test, y_train, y_test) def compile_model(network, nb_classes, input_shape): \"\"\"Compile a", "input_shape, x_train, x_test, y_train, y_test) def compile_model(network, nb_classes, input_shape): \"\"\"Compile", "y_test) = mnist.load_data() x_train = x_train.reshape(60000, 784) x_test = x_test.reshape(10000,", "callbacks=[early_stopper]) score = model.evaluate(x_test, y_test, verbose=0) return score[1] # 1", "parameters. nb_layers = network['nb_layers'] nb_neurons = network['nb_neurons'] activation = network['activation']", "== -1): model.add(Dense(1, activation='linear')) ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM)", "= network['activation'] optimizer = network['optimizer'] learning_rate = network['learning_rate'] model =", "784) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255", "= EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve the CIFAR dataset and process", "input_shape, x_train, x_test, y_train, y_test) def get_mnist(): \"\"\"Retrieve the MNIST", "= data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff fl p", "'=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test =", "ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy',", "Dense, Dropout from keras.utils.np_utils import to_categorical from keras.callbacks import EarlyStopping", "print(rms) return rms else: model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, # using", "nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test = get_mnist()", "x_test, y_train, y_test = get_cifar10() elif dataset == 'mnist': nb_classes,", "= Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer,", "= x_train.reshape(50000, 3072) x_test = x_test.reshape(10000, 3072) x_train = x_train.astype('float32')", "y_test = np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data()) y_train = np.array(data.get_y_data()) #print(\"Training", "x_train = x_train.reshape(60000, 784) x_test = x_test.reshape(10000, 784) x_train =", "X = [\"N_log(eff fl p =.05)\", \"N_log(eff fl p =.4)\",", "= (13,) # normalize y columns y_train = y_train/758.92 return", "network['nb_neurons'] activation = network['activation'] optimizer = network['optimizer'] learning_rate = network['learning_rate']", "p =.4)\", \"N_log(eff fl p =.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\",", "x_test.reshape(10000, 3072) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /=", "x_test.astype('float32') x_train /= 255 x_test /= 255 # convert class", "np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data()) y_train = np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0],", "Args: network (dict): the parameters of the network dataset (str):", "data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff fl", "data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\",", "print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) #", "as np from keras.optimizers import Adadelta, Adam, rmsprop from sklearn.metrics", "np from keras.optimizers import Adadelta, Adam, rmsprop from sklearn.metrics import", "(nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def compile_model(network, nb_classes,", "train_and_score(network, dataset): \"\"\"Train the model, return test loss. Args: network", "p =.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\", \"N( C )\",", "== 'dbtt': model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0) y_predict = model.predict(x_test)", "return model def train_and_score(network, dataset): \"\"\"Train the model, return test", "\"N_log(eff fl p =.4)\", \"N_log(eff fl p =.5)\", \"N(Cu)\", \"N(Ni)\",", "'dbtt': model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0) y_predict = model.predict(x_test) *", "y_train, epochs=10, batch_size=1406, verbose=0) y_predict = model.predict(x_test) * 758.92 #", "data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\",", "keras.callbacks import EarlyStopping import data_parser import numpy as np from", "model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, # using early stopping, so no", "epochs=10, batch_size=1406, verbose=0) y_predict = model.predict(x_test) * 758.92 # todo", "data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\", '=',", "x_test, y_train, y_test = get_mnist() elif dataset == 'dbtt': nb_classes,", "network (dict): the parameters of the network dataset (str): Dataset", "the CIFAR dataset and process the data.\"\"\" # Set defaults.", "# Get the data. (x_train, y_train), (x_test, y_test) = mnist.load_data()", "x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test /= 255", "def train_and_score(network, dataset): \"\"\"Train the model, return test loss. Args:", "(nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def get_dbtt(): data", "= 64 input_shape = (3072,) # Get the data. (x_train,", "def get_cifar10(): \"\"\"Retrieve the CIFAR dataset and process the data.\"\"\"", "optimizer=optimizer, metrics=['accuracy']) return model def train_and_score(network, dataset): \"\"\"Train the model,", "x_train /= 255 x_test /= 255 # convert class vectors", "fl p =.4)\", \"N_log(eff fl p =.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\",", "10 batch_size = 64 input_shape = (3072,) # Get the", "=.4)\", \"N_log(eff fl p =.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\",", "[\"N_log(eff fl p =.05)\", \"N_log(eff fl p =.4)\", \"N_log(eff fl", "from keras.layers import Dense, Dropout from keras.utils.np_utils import to_categorical from", "(13,) # normalize y columns y_train = y_train/758.92 return (nb_classes,", "= x_test.reshape(10000, 3072) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train", "activation=activation, input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) # hard-coded dropout", "of the network Returns: a compiled network. \"\"\" # Get", "dataset == 'cifar10': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train,", "# Add each layer. for i in range(nb_layers): # Need", "data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test", "https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets import mnist, cifar10 from keras.models import", "input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) # hard-coded dropout #", "(3072,) # Get the data. (x_train, y_train), (x_test, y_test) =", "y_train = np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0], \"data points\") nb_classes =", "way to not hardcode this? rms = np.sqrt(mean_squared_error(y_test, y_predict)) print(rms)", "== 'cifar10': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test", "the MNIST dataset and process the data.\"\"\" # Set defaults.", "x_train, \\ x_test, y_train, y_test = get_dbtt() model = compile_model(network,", "= np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0], \"data points\") nb_classes = -1", "= 10 batch_size = 64 input_shape = (3072,) # Get", "Get the data. (x_train, y_train), (x_test, y_test) = cifar10.load_data() x_train", "\"N_log(eff fl p =.3)\", \"N(Temp)\"] Y = \"CD delta sigma\"", "from keras.utils.np_utils import to_categorical from keras.callbacks import EarlyStopping import data_parser", "keras.utils.np_utils import to_categorical from keras.callbacks import EarlyStopping import data_parser import", "network parameters. nb_layers = network['nb_layers'] nb_neurons = network['nb_neurons'] activation =", "x_train.reshape(50000, 3072) x_test = x_test.reshape(10000, 3072) x_train = x_train.astype('float32') x_test", "\"\"\" Utility used by the Network class to actually train.", "= network['nb_layers'] nb_neurons = network['nb_neurons'] activation = network['activation'] optimizer =", "fl p =.3)\", \"N(Temp)\"] Y = \"CD delta sigma\" data.set_x_features(X)", "model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model def train_and_score(network, dataset): \"\"\"Train the", "Dataset to use for training/evaluating \"\"\" if dataset == 'cifar10':", "= get_dbtt() model = compile_model(network, nb_classes, input_shape) if dataset ==", "=.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\", \"N( C )\", \"N_log(eff", "range(nb_layers): # Need input shape for first layer. if i", "from keras.datasets import mnist, cifar10 from keras.models import Sequential from", "Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])", "= x_test.reshape(10000, 784) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train", "== 0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation))", "== 'mnist': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test", "3072) x_test = x_test.reshape(10000, 3072) x_train = x_train.astype('float32') x_test =", "Helper: Early stopping. early_stopper = EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve the", "network (dict): the parameters of the network Returns: a compiled", "MNIST dataset and process the data.\"\"\" # Set defaults. nb_classes", "= model.predict(x_test) * 758.92 # todo way to not hardcode", "Add each layer. for i in range(nb_layers): # Need input", "for first layer. if i == 0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation,", "import EarlyStopping import data_parser import numpy as np from keras.optimizers", "29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test = np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data())", "verbose=0) y_predict = model.predict(x_test) * 758.92 # todo way to", "used by the Network class to actually train. Based on:", "Need input shape for first layer. if i == 0:", "np.shape(y_train)[0] input_shape = (13,) # normalize y columns y_train =", "nb_classes) return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def", "y_test = get_mnist() elif dataset == 'dbtt': nb_classes, batch_size, input_shape,", "model = compile_model(network, nb_classes, input_shape) if dataset == 'dbtt': model.fit(x_train,", "np.shape(y_train)[0], \"data points\") nb_classes = -1 batch_size = np.shape(y_train)[0] input_shape", "= -1 batch_size = np.shape(y_train)[0] input_shape = (13,) # normalize", "255 # convert class vectors to binary class matrices y_train", "y_train, y_test) def get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\")", "3072) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255", "= to_categorical(y_test, nb_classes) return (nb_classes, batch_size, input_shape, x_train, x_test, y_train,", "x_test = x_test.astype('float32') x_train /= 255 x_test /= 255 #", "data. (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(60000,", "\"N_log(eff fl p =.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\", \"N(", "class to actually train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets", "p =.2)\", \"N_log(eff fl p =.3)\", \"N(Temp)\"] Y = \"CD", "#print(\"Training with\", np.shape(y_train)[0], \"data points\") nb_classes = -1 batch_size =", "= np.array(data.get_x_data()) y_train = np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0], \"data points\")", "Utility used by the Network class to actually train. Based", "x_test = x_test.reshape(10000, 3072) x_train = x_train.astype('float32') x_test = x_test.astype('float32')", "optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model def", "of the network dataset (str): Dataset to use for training/evaluating", "compiled network. \"\"\" # Get our network parameters. nb_layers =", "data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\",", "y_test = to_categorical(y_test, nb_classes) return (nb_classes, batch_size, input_shape, x_train, x_test,", "y_train = y_train/758.92 return (nb_classes, batch_size, input_shape, x_train, x_test, y_train,", "input_shape, x_train, \\ x_test, y_train, y_test = get_mnist() elif dataset", "y_predict)) print(rms) return rms else: model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, #", "shape for first layer. if i == 0: print(nb_neurons) model.add(Dense(units=nb_neurons,", "=.1)\", \"N_log(eff fl p =.2)\", \"N_log(eff fl p =.3)\", \"N(Temp)\"]", "input_shape = (3072,) # Get the data. (x_train, y_train), (x_test,", "using early stopping, so no real limit verbose=0, validation_data=(x_test, y_test),", "= np.shape(y_train)[0] input_shape = (13,) # normalize y columns y_train", "C )\", \"N_log(eff fl p =.1)\", \"N_log(eff fl p =.2)\",", "29) data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\", '=', 2)", "model, return test loss. Args: network (dict): the parameters of", "EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve the CIFAR dataset and process the", "else: model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, # using early stopping, so", "data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test = np.array(data_lwr.get_x_data()) y_test", "input_shape = (784,) # Get the data. (x_train, y_train), (x_test,", "y_test), callbacks=[early_stopper]) score = model.evaluate(x_test, y_test, verbose=0) return score[1] #", "i == 0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons,", "= (3072,) # Get the data. (x_train, y_train), (x_test, y_test)", "p =.05)\", \"N_log(eff fl p =.4)\", \"N_log(eff fl p =.5)\",", "= np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data()) y_train =", "network dataset (str): Dataset to use for training/evaluating \"\"\" if", "our network parameters. nb_layers = network['nb_layers'] nb_neurons = network['nb_neurons'] activation", "if dataset == 'cifar10': nb_classes, batch_size, input_shape, x_train, \\ x_test,", "get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff", "\"N(Si)\", \"N( C )\", \"N_log(eff fl p =.1)\", \"N_log(eff fl", "64 input_shape = (3072,) # Get the data. (x_train, y_train),", "x_test = np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data()) y_train", "input_shape = (13,) # normalize y columns y_train = y_train/758.92", "train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets import mnist, cifar10", "x_train, \\ x_test, y_train, y_test = get_mnist() elif dataset ==", "keras.layers import Dense, Dropout from keras.utils.np_utils import to_categorical from keras.callbacks", "= x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test /=", "x_test /= 255 # convert class vectors to binary class", "y_train, y_test) def compile_model(network, nb_classes, input_shape): \"\"\"Compile a sequential model.", "layer. for i in range(nb_layers): # Need input shape for", "and process the data.\"\"\" # Set defaults. nb_classes = 10", "# Get the data. (x_train, y_train), (x_test, y_test) = cifar10.load_data()", "else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model def train_and_score(network,", ")\", \"N_log(eff fl p =.1)\", \"N_log(eff fl p =.2)\", \"N_log(eff", "\"N(Mn)\", \"N(P)\", \"N(Si)\", \"N( C )\", \"N_log(eff fl p =.1)\",", "y_test) def get_mnist(): \"\"\"Retrieve the MNIST dataset and process the", "input_shape, x_train, \\ x_test, y_train, y_test = get_dbtt() model =", "batch_size=1406, verbose=0) y_predict = model.predict(x_test) * 758.92 # todo way", "network Returns: a compiled network. \"\"\" # Get our network", "= np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data()) y_train = np.array(data.get_y_data()) #print(\"Training with\",", "network['nb_layers'] nb_neurons = network['nb_neurons'] activation = network['activation'] optimizer = network['optimizer']", "# normalize y columns y_train = y_train/758.92 return (nb_classes, batch_size,", "numpy as np from keras.optimizers import Adadelta, Adam, rmsprop from", "layer. if(nb_classes == -1): model.add(Dense(1, activation='linear')) ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error',", "print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) # hard-coded dropout # Output layer.", "nb_classes) y_test = to_categorical(y_test, nb_classes) return (nb_classes, batch_size, input_shape, x_train,", "\"N_log(eff fl p =.1)\", \"N_log(eff fl p =.2)\", \"N_log(eff fl", "batch_size = 128 input_shape = (784,) # Get the data.", "if(nb_classes == -1): model.add(Dense(1, activation='linear')) ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'],", "# Get our network parameters. nb_layers = network['nb_layers'] nb_neurons =", "from keras.models import Sequential from keras.layers import Dense, Dropout from", "\"\"\"Retrieve the CIFAR dataset and process the data.\"\"\" # Set", "else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) # hard-coded dropout # Output", "batch_size=batch_size, epochs=10000, # using early stopping, so no real limit", "so no real limit verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score =", "import data_parser import numpy as np from keras.optimizers import Adadelta,", "use for training/evaluating \"\"\" if dataset == 'cifar10': nb_classes, batch_size,", "(x_train, y_train), (x_test, y_test) = cifar10.load_data() x_train = x_train.reshape(50000, 3072)", "\"\"\" from keras.datasets import mnist, cifar10 from keras.models import Sequential", "import numpy as np from keras.optimizers import Adadelta, Adam, rmsprop", "data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff fl p =.05)\", \"N_log(eff fl p", "nb_classes = 10 batch_size = 128 input_shape = (784,) #", "model.add(Dense(1, activation='linear')) ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes,", "x_train, \\ x_test, y_train, y_test = get_cifar10() elif dataset ==", "= network['nb_neurons'] activation = network['activation'] optimizer = network['optimizer'] learning_rate =", "y_train, y_test = get_cifar10() elif dataset == 'mnist': nb_classes, batch_size,", "dataset == 'mnist': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train,", "return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def compile_model(network,", "compile_model(network, nb_classes, input_shape) if dataset == 'dbtt': model.fit(x_train, y_train, epochs=10,", "cifar10.load_data() x_train = x_train.reshape(50000, 3072) x_test = x_test.reshape(10000, 3072) x_train", "to use for training/evaluating \"\"\" if dataset == 'cifar10': nb_classes,", "the data.\"\"\" # Set defaults. nb_classes = 10 batch_size =", "= y_train/758.92 return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)", "model = Sequential() # Add each layer. for i in", "batch_size, input_shape, x_train, x_test, y_train, y_test) def compile_model(network, nb_classes, input_shape):", "dataset (str): Dataset to use for training/evaluating \"\"\" if dataset", "batch_size, input_shape, x_train, x_test, y_train, y_test) def get_mnist(): \"\"\"Retrieve the", "model def train_and_score(network, dataset): \"\"\"Train the model, return test loss.", "Adam, rmsprop from sklearn.metrics import mean_squared_error # Helper: Early stopping.", "\"CD delta sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29)", "p =.1)\", \"N_log(eff fl p =.2)\", \"N_log(eff fl p =.3)\",", "# using early stopping, so no real limit verbose=0, validation_data=(x_test,", "import Sequential from keras.layers import Dense, Dropout from keras.utils.np_utils import", "y_train, batch_size=batch_size, epochs=10000, # using early stopping, so no real", "= \"CD delta sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=',", "\"\"\" # Get our network parameters. nb_layers = network['nb_layers'] nb_neurons", "fl p =.1)\", \"N_log(eff fl p =.2)\", \"N_log(eff fl p", "model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) # hard-coded dropout # Output layer. if(nb_classes", "todo way to not hardcode this? rms = np.sqrt(mean_squared_error(y_test, y_predict))", "data.\"\"\" # Set defaults. nb_classes = 10 batch_size = 64", "stopping. early_stopper = EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve the CIFAR dataset", "batch_size = np.shape(y_train)[0] input_shape = (13,) # normalize y columns", "convert class vectors to binary class matrices y_train = to_categorical(y_train,", "14) x_test = np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data())", "'=', 8) data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\", '=',", "dataset == 'dbtt': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train,", "y_test = get_dbtt() model = compile_model(network, nb_classes, input_shape) if dataset", "get_mnist() elif dataset == 'dbtt': nb_classes, batch_size, input_shape, x_train, \\", "data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\", '=', 8)", "# convert class vectors to binary class matrices y_train =", "early stopping, so no real limit verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper])", "= mnist.load_data() x_train = x_train.reshape(60000, 784) x_test = x_test.reshape(10000, 784)", "= network['learning_rate'] model = Sequential() # Add each layer. for", "= np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return rms else: model.fit(x_train, y_train, batch_size=batch_size,", "score = model.evaluate(x_test, y_test, verbose=0) return score[1] # 1 is", "data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\",", "defaults. nb_classes = 10 batch_size = 64 input_shape = (3072,)", "y_test = get_cifar10() elif dataset == 'mnist': nb_classes, batch_size, input_shape,", "mnist, cifar10 from keras.models import Sequential from keras.layers import Dense,", "= network['optimizer'] learning_rate = network['learning_rate'] model = Sequential() # Add", "dropout # Output layer. if(nb_classes == -1): model.add(Dense(1, activation='linear')) ADAM", "layer. if i == 0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else:", "batch_size, input_shape, x_train, x_test, y_train, y_test) def get_dbtt(): data =", "cifar10 from keras.models import Sequential from keras.layers import Dense, Dropout", "each layer. for i in range(nb_layers): # Need input shape", "sequential model. Args: network (dict): the parameters of the network", "'mnist': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test =", "8) data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\", '=', 14)", "model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0) y_predict = model.predict(x_test) * 758.92", "\"N(Temp)\"] Y = \"CD delta sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X)", "mnist.load_data() x_train = x_train.reshape(60000, 784) x_test = x_test.reshape(10000, 784) x_train", "rms else: model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, # using early stopping,", "\"N( C )\", \"N_log(eff fl p =.1)\", \"N_log(eff fl p", "y columns y_train = y_train/758.92 return (nb_classes, batch_size, input_shape, x_train,", "x_train, x_test, y_train, y_test) def get_mnist(): \"\"\"Retrieve the MNIST dataset", "= x_test.astype('float32') x_train /= 255 x_test /= 255 # convert", "(x_test, y_test) = cifar10.load_data() x_train = x_train.reshape(50000, 3072) x_test =", "data_parser import numpy as np from keras.optimizers import Adadelta, Adam,", "by the Network class to actually train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py", "dataset and process the data.\"\"\" # Set defaults. nb_classes =", "input_shape, x_train, \\ x_test, y_train, y_test = get_cifar10() elif dataset", "network['optimizer'] learning_rate = network['learning_rate'] model = Sequential() # Add each", "the network dataset (str): Dataset to use for training/evaluating \"\"\"", "y_train, y_test = get_dbtt() model = compile_model(network, nb_classes, input_shape) if", "the model, return test loss. Args: network (dict): the parameters", "# hard-coded dropout # Output layer. if(nb_classes == -1): model.add(Dense(1,", "y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(60000, 784) x_test", "keras.datasets import mnist, cifar10 from keras.models import Sequential from keras.layers", "for training/evaluating \"\"\" if dataset == 'cifar10': nb_classes, batch_size, input_shape,", "data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\", '=', 1)", "y_predict = model.predict(x_test) * 758.92 # todo way to not", "y_train, y_test) def get_mnist(): \"\"\"Retrieve the MNIST dataset and process", "batch_size, input_shape, x_train, \\ x_test, y_train, y_test = get_mnist() elif", "no real limit verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score = model.evaluate(x_test,", "optimizer = network['optimizer'] learning_rate = network['learning_rate'] model = Sequential() #", "the data. (x_train, y_train), (x_test, y_test) = cifar10.load_data() x_train =", "matrices y_train = to_categorical(y_train, nb_classes) y_test = to_categorical(y_test, nb_classes) return", "x_test, y_train, y_test) def get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr =", "=.05)\", \"N_log(eff fl p =.4)\", \"N_log(eff fl p =.5)\", \"N(Cu)\",", "batch_size, input_shape, x_train, \\ x_test, y_train, y_test = get_cifar10() elif", "real limit verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score = model.evaluate(x_test, y_test,", "the data. (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train =", "keras.optimizers import Adadelta, Adam, rmsprop from sklearn.metrics import mean_squared_error #", "to_categorical(y_test, nb_classes) return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)", "x_test, y_train, y_test) def get_mnist(): \"\"\"Retrieve the MNIST dataset and", "=.3)\", \"N(Temp)\"] Y = \"CD delta sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y)", "limit verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score = model.evaluate(x_test, y_test, verbose=0)", "'=', 14) x_test = np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data()) x_train =", "nb_classes = 10 batch_size = 64 input_shape = (3072,) #", "= (784,) # Get the data. (x_train, y_train), (x_test, y_test)", "10 batch_size = 128 input_shape = (784,) # Get the", "x_test.reshape(10000, 784) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /=", "fl p =.05)\", \"N_log(eff fl p =.4)\", \"N_log(eff fl p", "y_train), (x_test, y_test) = cifar10.load_data() x_train = x_train.reshape(50000, 3072) x_test", "x_train = np.array(data.get_x_data()) y_train = np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0], \"data", "binary class matrices y_train = to_categorical(y_train, nb_classes) y_test = to_categorical(y_test,", "metrics=['accuracy']) return model def train_and_score(network, dataset): \"\"\"Train the model, return", "= compile_model(network, nb_classes, input_shape) if dataset == 'dbtt': model.fit(x_train, y_train,", "-1): model.add(Dense(1, activation='linear')) ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else:", "\\ x_test, y_train, y_test = get_mnist() elif dataset == 'dbtt':", "not hardcode this? rms = np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return rms", "the network Returns: a compiled network. \"\"\" # Get our", "sklearn.metrics import mean_squared_error # Helper: Early stopping. early_stopper = EarlyStopping(patience=5)", "'=', 1) data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=',", "* 758.92 # todo way to not hardcode this? rms", "nb_layers = network['nb_layers'] nb_neurons = network['nb_neurons'] activation = network['activation'] optimizer", "dataset == 'dbtt': model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0) y_predict =", "import Adadelta, Adam, rmsprop from sklearn.metrics import mean_squared_error # Helper:", "class vectors to binary class matrices y_train = to_categorical(y_train, nb_classes)", "validation_data=(x_test, y_test), callbacks=[early_stopper]) score = model.evaluate(x_test, y_test, verbose=0) return score[1]", "nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test = get_cifar10()", "1) data.add_exclusive_filter(\"Alloy\", '=', 2) data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29)", "\"\"\"Compile a sequential model. Args: network (dict): the parameters of", "y_train = to_categorical(y_train, nb_classes) y_test = to_categorical(y_test, nb_classes) return (nb_classes,", "return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def get_mnist():", "sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\", '=',", "= [\"N_log(eff fl p =.05)\", \"N_log(eff fl p =.4)\", \"N_log(eff", "x_test, y_train, y_test) def compile_model(network, nb_classes, input_shape): \"\"\"Compile a sequential", "# Helper: Early stopping. early_stopper = EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve", "\\ x_test, y_train, y_test = get_dbtt() model = compile_model(network, nb_classes,", "to not hardcode this? rms = np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return", "(x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(60000, 784)", "model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2)) # hard-coded", "784) x_test = x_test.reshape(10000, 784) x_train = x_train.astype('float32') x_test =", "Sequential() # Add each layer. for i in range(nb_layers): #", "y_test) = cifar10.load_data() x_train = x_train.reshape(50000, 3072) x_test = x_test.reshape(10000,", "x_train.reshape(60000, 784) x_test = x_test.reshape(10000, 784) x_train = x_train.astype('float32') x_test", "p =.3)\", \"N(Temp)\"] Y = \"CD delta sigma\" data.set_x_features(X) data.set_y_feature(Y)", "0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else: print(nb_neurons) model.add(Dense(nb_neurons, activation=activation)) model.add(Dropout(0.2))", "columns y_train = y_train/758.92 return (nb_classes, batch_size, input_shape, x_train, x_test,", "hard-coded dropout # Output layer. if(nb_classes == -1): model.add(Dense(1, activation='linear'))", "with\", np.shape(y_train)[0], \"data points\") nb_classes = -1 batch_size = np.shape(y_train)[0]", "= x_train.reshape(60000, 784) x_test = x_test.reshape(10000, 784) x_train = x_train.astype('float32')", "= 128 input_shape = (784,) # Get the data. (x_train,", "batch_size, input_shape, x_train, \\ x_test, y_train, y_test = get_dbtt() model", "\"N_log(eff fl p =.2)\", \"N_log(eff fl p =.3)\", \"N(Temp)\"] Y", "get_cifar10(): \"\"\"Retrieve the CIFAR dataset and process the data.\"\"\" #", "/= 255 # convert class vectors to binary class matrices", "\"N(P)\", \"N(Si)\", \"N( C )\", \"N_log(eff fl p =.1)\", \"N_log(eff", "758.92 # todo way to not hardcode this? rms =", "class matrices y_train = to_categorical(y_train, nb_classes) y_test = to_categorical(y_test, nb_classes)", "'dbtt': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test =", "Set defaults. nb_classes = 10 batch_size = 128 input_shape =", "data. (x_train, y_train), (x_test, y_test) = cifar10.load_data() x_train = x_train.reshape(50000,", "Early stopping. early_stopper = EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve the CIFAR", "255 x_test /= 255 # convert class vectors to binary", "Get our network parameters. nb_layers = network['nb_layers'] nb_neurons = network['nb_neurons']", "for i in range(nb_layers): # Need input shape for first", "'=', 29) data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\", '=', 1) data.add_exclusive_filter(\"Alloy\", '=',", "to_categorical from keras.callbacks import EarlyStopping import data_parser import numpy as", "activation='linear')) ADAM = Adam(lr=learning_rate) model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax'))", "\\ x_test, y_train, y_test = get_cifar10() elif dataset == 'mnist':", "network['activation'] optimizer = network['optimizer'] learning_rate = network['learning_rate'] model = Sequential()", "= to_categorical(y_train, nb_classes) y_test = to_categorical(y_test, nb_classes) return (nb_classes, batch_size,", "# Need input shape for first layer. if i ==", "(nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def get_mnist(): \"\"\"Retrieve", "(784,) # Get the data. (x_train, y_train), (x_test, y_test) =", "nb_classes, input_shape) if dataset == 'dbtt': model.fit(x_train, y_train, epochs=10, batch_size=1406,", "epochs=10000, # using early stopping, so no real limit verbose=0,", "keras.models import Sequential from keras.layers import Dense, Dropout from keras.utils.np_utils", "from sklearn.metrics import mean_squared_error # Helper: Early stopping. early_stopper =", "(dict): the parameters of the network Returns: a compiled network.", "y_test) def compile_model(network, nb_classes, input_shape): \"\"\"Compile a sequential model. Args:", "Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets import mnist, cifar10 from", "def compile_model(network, nb_classes, input_shape): \"\"\"Compile a sequential model. Args: network", "input_shape) if dataset == 'dbtt': model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0)", "y_test) def get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X", "defaults. nb_classes = 10 batch_size = 128 input_shape = (784,)", "metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model", "model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model def train_and_score(network, dataset):", "elif dataset == 'dbtt': nb_classes, batch_size, input_shape, x_train, \\ x_test,", "= Sequential() # Add each layer. for i in range(nb_layers):", "to binary class matrices y_train = to_categorical(y_train, nb_classes) y_test =", "activation = network['activation'] optimizer = network['optimizer'] learning_rate = network['learning_rate'] model", "activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model def train_and_score(network, dataset): \"\"\"Train", "data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff fl p =.05)\", \"N_log(eff", "== 'dbtt': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test", "process the data.\"\"\" # Set defaults. nb_classes = 10 batch_size", "\"\"\" if dataset == 'cifar10': nb_classes, batch_size, input_shape, x_train, \\", "training/evaluating \"\"\" if dataset == 'cifar10': nb_classes, batch_size, input_shape, x_train,", "'=', 2) data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=',", "=.2)\", \"N_log(eff fl p =.3)\", \"N(Temp)\"] Y = \"CD delta", "dataset): \"\"\"Train the model, return test loss. Args: network (dict):", "this? rms = np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return rms else: model.fit(x_train,", "rms = np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return rms else: model.fit(x_train, y_train,", "'=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14) x_test = np.array(data_lwr.get_x_data()) y_test =", "get_cifar10() elif dataset == 'mnist': nb_classes, batch_size, input_shape, x_train, \\", "'cifar10': nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test =", "activation=activation)) model.add(Dropout(0.2)) # hard-coded dropout # Output layer. if(nb_classes ==", "if i == 0: print(nb_neurons) model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape)) else: print(nb_neurons)", "data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\", '=', 8) data.add_exclusive_filter(\"Alloy\",", "parameters of the network Returns: a compiled network. \"\"\" #", "x_test = x_test.reshape(10000, 784) x_train = x_train.astype('float32') x_test = x_test.astype('float32')", "\"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\", \"N( C )\", \"N_log(eff fl", "Dropout from keras.utils.np_utils import to_categorical from keras.callbacks import EarlyStopping import", "CIFAR dataset and process the data.\"\"\" # Set defaults. nb_classes", "return rms else: model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, # using early", "EarlyStopping import data_parser import numpy as np from keras.optimizers import", "fl p =.2)\", \"N_log(eff fl p =.3)\", \"N(Temp)\"] Y =", "(dict): the parameters of the network dataset (str): Dataset to", "hardcode this? rms = np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return rms else:", "x_test, y_train, y_test = get_dbtt() model = compile_model(network, nb_classes, input_shape)", "= get_mnist() elif dataset == 'dbtt': nb_classes, batch_size, input_shape, x_train,", "data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff fl p =.05)\",", "a compiled network. \"\"\" # Get our network parameters. nb_layers", "= 10 batch_size = 128 input_shape = (784,) # Get", "the parameters of the network Returns: a compiled network. \"\"\"", "model.add(Dropout(0.2)) # hard-coded dropout # Output layer. if(nb_classes == -1):", "# todo way to not hardcode this? rms = np.sqrt(mean_squared_error(y_test,", "return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def get_dbtt():", "batch_size = 64 input_shape = (3072,) # Get the data.", "parameters of the network dataset (str): Dataset to use for", "delta sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\", '=', 29) data.add_exclusive_filter(\"Alloy\",", "a sequential model. Args: network (dict): the parameters of the", "128 input_shape = (784,) # Get the data. (x_train, y_train),", "x_train, x_test, y_train, y_test) def get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr", "2) data.add_exclusive_filter(\"Alloy\", '=', 14) data_lwr.add_exclusive_filter(\"Alloy\", '=', 29) data_lwr.add_exclusive_filter(\"Alloy\", '=', 14)", "verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score = model.evaluate(x_test, y_test, verbose=0) return", "nb_classes, input_shape): \"\"\"Compile a sequential model. Args: network (dict): the", "def get_mnist(): \"\"\"Retrieve the MNIST dataset and process the data.\"\"\"", "mean_squared_error # Helper: Early stopping. early_stopper = EarlyStopping(patience=5) def get_cifar10():", "= data_parser.parse(\"CD_LWR_clean8.csv\") X = [\"N_log(eff fl p =.05)\", \"N_log(eff fl", "actually train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets import mnist,", "from keras.callbacks import EarlyStopping import data_parser import numpy as np", "Returns: a compiled network. \"\"\" # Get our network parameters.", "Network class to actually train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from", "Args: network (dict): the parameters of the network Returns: a", "model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM) else: model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return", "y_train, y_test = get_mnist() elif dataset == 'dbtt': nb_classes, batch_size,", "Adadelta, Adam, rmsprop from sklearn.metrics import mean_squared_error # Helper: Early", "to_categorical(y_train, nb_classes) y_test = to_categorical(y_test, nb_classes) return (nb_classes, batch_size, input_shape,", "get_dbtt() model = compile_model(network, nb_classes, input_shape) if dataset == 'dbtt':", "stopping, so no real limit verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score", "np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0], \"data points\") nb_classes = -1 batch_size", "model.evaluate(x_test, y_test, verbose=0) return score[1] # 1 is accuracy. 0", "normalize y columns y_train = y_train/758.92 return (nb_classes, batch_size, input_shape,", "input_shape): \"\"\"Compile a sequential model. Args: network (dict): the parameters", "<gh_stars>1-10 \"\"\" Utility used by the Network class to actually", "to actually train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\" from keras.datasets import", "np.array(data.get_x_data()) y_train = np.array(data.get_y_data()) #print(\"Training with\", np.shape(y_train)[0], \"data points\") nb_classes", "import mnist, cifar10 from keras.models import Sequential from keras.layers import", "early_stopper = EarlyStopping(patience=5) def get_cifar10(): \"\"\"Retrieve the CIFAR dataset and", "i in range(nb_layers): # Need input shape for first layer.", "model. Args: network (dict): the parameters of the network Returns:", "network. \"\"\" # Get our network parameters. nb_layers = network['nb_layers']", "y_train/758.92 return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test) def", "test loss. Args: network (dict): the parameters of the network", "elif dataset == 'mnist': nb_classes, batch_size, input_shape, x_train, \\ x_test,", "x_train = x_train.reshape(50000, 3072) x_test = x_test.reshape(10000, 3072) x_train =", "-1 batch_size = np.shape(y_train)[0] input_shape = (13,) # normalize y", "= cifar10.load_data() x_train = x_train.reshape(50000, 3072) x_test = x_test.reshape(10000, 3072)", "\"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\", \"N( C )\", \"N_log(eff fl p", "\"data points\") nb_classes = -1 batch_size = np.shape(y_train)[0] input_shape =", "from keras.optimizers import Adadelta, Adam, rmsprop from sklearn.metrics import mean_squared_error", "Get the data. (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train", "return test loss. Args: network (dict): the parameters of the", "import Dense, Dropout from keras.utils.np_utils import to_categorical from keras.callbacks import", "input_shape, x_train, x_test, y_train, y_test) def get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\")", "verbose=0) return score[1] # 1 is accuracy. 0 is loss.", "(x_test, y_test) = mnist.load_data() x_train = x_train.reshape(60000, 784) x_test =", "the Network class to actually train. Based on: https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py \"\"\"", "nb_classes, batch_size, input_shape, x_train, \\ x_test, y_train, y_test = get_dbtt()", "import mean_squared_error # Helper: Early stopping. early_stopper = EarlyStopping(patience=5) def", "np.sqrt(mean_squared_error(y_test, y_predict)) print(rms) return rms else: model.fit(x_train, y_train, batch_size=batch_size, epochs=10000,", "Y = \"CD delta sigma\" data.set_x_features(X) data.set_y_feature(Y) data_lwr.set_y_feature(Y) data_lwr.set_x_features(X) data.add_exclusive_filter(\"Alloy\",", "Output layer. if(nb_classes == -1): model.add(Dense(1, activation='linear')) ADAM = Adam(lr=learning_rate)", "learning_rate = network['learning_rate'] model = Sequential() # Add each layer.", "Set defaults. nb_classes = 10 batch_size = 64 input_shape =", "fl p =.5)\", \"N(Cu)\", \"N(Ni)\", \"N(Mn)\", \"N(P)\", \"N(Si)\", \"N( C", "get_mnist(): \"\"\"Retrieve the MNIST dataset and process the data.\"\"\" #", "data.\"\"\" # Set defaults. nb_classes = 10 batch_size = 128", "= model.evaluate(x_test, y_test, verbose=0) return score[1] # 1 is accuracy.", "def get_dbtt(): data = data_parser.parse(\"DBTT_Data22.csv\") data_lwr = data_parser.parse(\"CD_LWR_clean8.csv\") X =", "nb_neurons = network['nb_neurons'] activation = network['activation'] optimizer = network['optimizer'] learning_rate", "# Set defaults. nb_classes = 10 batch_size = 64 input_shape", "y_test, verbose=0) return score[1] # 1 is accuracy. 0 is", "/= 255 x_test /= 255 # convert class vectors to", "np.array(data_lwr.get_x_data()) y_test = np.array(data_lwr.get_y_data()) x_train = np.array(data.get_x_data()) y_train = np.array(data.get_y_data())", "# Output layer. if(nb_classes == -1): model.add(Dense(1, activation='linear')) ADAM =" ]
[ "import Item from openslides.core.models import CustomSlide from openslides.utils.test import TestCase", "Test that get_root_and_children needs only one db query. \"\"\" for", "Item from openslides.core.models import CustomSlide from openslides.utils.test import TestCase class", "import TestCase class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\" Test that get_root_and_children", "db query. \"\"\" for i in range(10): CustomSlide.objects.create(title='item{}'.format(i)) with self.assertNumQueries(1):", "query. \"\"\" for i in range(10): CustomSlide.objects.create(title='item{}'.format(i)) with self.assertNumQueries(1): Item.objects.get_root_and_children()", "openslides.agenda.models import Item from openslides.core.models import CustomSlide from openslides.utils.test import", "one db query. \"\"\" for i in range(10): CustomSlide.objects.create(title='item{}'.format(i)) with", "<reponame>rolandgeider/OpenSlides from openslides.agenda.models import Item from openslides.core.models import CustomSlide from", "TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\" Test that get_root_and_children needs only one", "needs only one db query. \"\"\" for i in range(10):", "openslides.core.models import CustomSlide from openslides.utils.test import TestCase class TestItemManager(TestCase): def", "from openslides.utils.test import TestCase class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\" Test", "from openslides.core.models import CustomSlide from openslides.utils.test import TestCase class TestItemManager(TestCase):", "CustomSlide from openslides.utils.test import TestCase class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\"", "that get_root_and_children needs only one db query. \"\"\" for i", "\"\"\" Test that get_root_and_children needs only one db query. \"\"\"", "TestCase class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\" Test that get_root_and_children needs", "openslides.utils.test import TestCase class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\" Test that", "get_root_and_children needs only one db query. \"\"\" for i in", "import CustomSlide from openslides.utils.test import TestCase class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self):", "from openslides.agenda.models import Item from openslides.core.models import CustomSlide from openslides.utils.test", "test_get_root_and_children_db_queries(self): \"\"\" Test that get_root_and_children needs only one db query.", "def test_get_root_and_children_db_queries(self): \"\"\" Test that get_root_and_children needs only one db", "class TestItemManager(TestCase): def test_get_root_and_children_db_queries(self): \"\"\" Test that get_root_and_children needs only", "only one db query. \"\"\" for i in range(10): CustomSlide.objects.create(title='item{}'.format(i))" ]
[ "as certs_file: certs = \"\" for der in ssl_context.get_ca_certs(True): certs", "cleanup(self): # Non throw function \"\"\" Delete the cert file", "@staticmethod def _cleanup(name, session, warn_message): try: os.remove(name) except: logging.warning(f\"Couldn't delete", "\"\"\" This create a CA bundle file extracted from the", "that this file is being opened later on in the", "num=1): file_name_candidate = f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return cls._generate_cert_file_path(file_name, num +", "except: logging.warning(\"Couldn't close session\") logging.warning(warn_message) @classmethod def _create_cert_file(cls, ssl_context: SSLContext):", "path @classmethod def _generate_cert_file_path(cls, file_name: str, num=1): file_name_candidate = f\"{file_name}({num}).pem\"", "requests.Session() self.session.trust_env = False self.session.verify = self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer", "self.session, warn_message=\"Implicitly cleaning up {!r}\".format(self)) def __enter__(self): return self def", "logging.warning(warn_message) @classmethod def _create_cert_file(cls, ssl_context: SSLContext): \"\"\" This create a", "self.cleanup() def cleanup(self): # Non throw function \"\"\" Delete the", "flow. This means we have to close the file before", "why not using tempfile self._ssl_context = ssl_context self.session = requests.Session()", "with the ssl_context 3. Except ssl_context to control the TLS", "\"\"\" This class create a wrapper for the requests.Session object", "Load certificates provided with the ssl_context 3. Except ssl_context to", "+= f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path @classmethod def _generate_cert_file_path(cls, file_name: str,", "file_name_candidate = f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return cls._generate_cert_file_path(file_name, num + 1)", "False self.session.verify = self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer = weakref.finalize( self,", "from ssl_context_builder.builder.builder import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class RequestsSecureSession:", "for the requests.Session object It does the following: 1. Disable", "self def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self): #", "__init__(self, ssl_context: SSLContext): \"\"\" This class create a wrapper for", "session\") logging.warning(warn_message) @classmethod def _create_cert_file(cls, ssl_context: SSLContext): \"\"\" This create", "tempfile self._ssl_context = ssl_context self.session = requests.Session() self.session.trust_env = False", "def _cleanup(name, session, warn_message): try: os.remove(name) except: logging.warning(f\"Couldn't delete certs", "f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return cls._generate_cert_file_path(file_name, num + 1) return file_name_candidate", "self.session.verify = self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer = weakref.finalize( self, self._cleanup,", "def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self): # Non", "def __init__(self, ssl_context: SSLContext): \"\"\" This class create a wrapper", "session, warn_message): try: os.remove(name) except: logging.warning(f\"Couldn't delete certs file {name}\")", "CA bundle file extracted from the ssl_context The reason we", "tb): self.cleanup() def cleanup(self): # Non throw function \"\"\" Delete", "created ca_bundle file \"\"\" path = \"certs.pem\" if os.path.exists(path): path", "# see note inside the function why not using tempfile", "certificates provided with the ssl_context 3. Except ssl_context to control", "def __enter__(self): return self def __exit__(self, exc, value, tb): self.cleanup()", "from the ssl_context The reason we are creating a real", "logging.warning(f\"Couldn't delete certs file {self.cert_file_path}\") try: self.session.close() except: logging.warning(\"Couldn't close", "real file and deleting it is that this file is", "in ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path @classmethod def", "@classmethod def _generate_cert_file_path(cls, file_name: str, num=1): file_name_candidate = f\"{file_name}({num}).pem\" if", "self.cert_file_path = self._create_cert_file(ssl_context) # see note inside the function why", "provided with the ssl_context 3. Except ssl_context to control the", "2. Load certificates provided with the ssl_context 3. Except ssl_context", "{!r}\".format(self)) def __enter__(self): return self def __exit__(self, exc, value, tb):", "ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path @classmethod def _generate_cert_file_path(cls,", "weakref.finalize( self, self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly cleaning up {!r}\".format(self)) def", "ssl from ssl import SSLContext import logging from ssl_context_builder.builder.builder import", "communication @param ssl_context: SSLContext \"\"\" self.cert_file_path = self._create_cert_file(ssl_context) # see", "from ssl import SSLContext import logging from ssl_context_builder.builder.builder import SslContextBuilder", "self.session.close() except: logging.warning(\"Couldn't close session\") @staticmethod def _cleanup(name, session, warn_message):", "os.path.exists(path): path = cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\") as certs_file: certs", "logging from ssl_context_builder.builder.builder import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class", "cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\") as certs_file: certs = \"\" for", "when closed. @param ssl_context: ssl_context @return: path to the created", "= f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return cls._generate_cert_file_path(file_name, num + 1) return", "and close the session @return: \"\"\" if self._finalizer.detach(): try: os.remove(self.cert_file_path)", "file {name}\") try: session.close() except: logging.warning(\"Couldn't close session\") logging.warning(warn_message) @classmethod", "self._finalizer = weakref.finalize( self, self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly cleaning up", "SslAdapter class RequestsSecureSession: def __init__(self, ssl_context: SSLContext): \"\"\" This class", "Delete the cert file and close the session @return: \"\"\"", "using tempfile self._ssl_context = ssl_context self.session = requests.Session() self.session.trust_env =", "Non throw function \"\"\" Delete the cert file and close", "certs_file.write(certs) return path @classmethod def _generate_cert_file_path(cls, file_name: str, num=1): file_name_candidate", "close session\") logging.warning(warn_message) @classmethod def _create_cert_file(cls, ssl_context: SSLContext): \"\"\" This", "file \"\"\" path = \"certs.pem\" if os.path.exists(path): path = cls._generate_cert_file_path(\"certs\")", "throw function \"\"\" Delete the cert file and close the", "with open(path, mode=\"a+\") as certs_file: certs = \"\" for der", "class create a wrapper for the requests.Session object It does", "1. Disable session env_vars consuming 2. Load certificates provided with", "requests import ssl from ssl import SSLContext import logging from", "if self._finalizer.detach(): try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete certs file {self.cert_file_path}\")", "warn_message): try: os.remove(name) except: logging.warning(f\"Couldn't delete certs file {name}\") try:", "a CA bundle file extracted from the ssl_context The reason", "ssl_context to control the TLS communication @param ssl_context: SSLContext \"\"\"", "self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer = weakref.finalize( self, self._cleanup, self.cert_file_path, self.session,", "we are creating a real file and deleting it is", "cleaning up {!r}\".format(self)) def __enter__(self): return self def __exit__(self, exc,", "self.cert_file_path, self.session, warn_message=\"Implicitly cleaning up {!r}\".format(self)) def __enter__(self): return self", "the function why not using tempfile self._ssl_context = ssl_context self.session", "SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class RequestsSecureSession: def __init__(self, ssl_context:", "to control the TLS communication @param ssl_context: SSLContext \"\"\" self.cert_file_path", "certs file {self.cert_file_path}\") try: self.session.close() except: logging.warning(\"Couldn't close session\") @staticmethod", "import weakref import os import requests import ssl from ssl", "mode=\"a+\") as certs_file: certs = \"\" for der in ssl_context.get_ca_certs(True):", "cert file and close the session @return: \"\"\" if self._finalizer.detach():", "@return: \"\"\" if self._finalizer.detach(): try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete certs", "wrapper for the requests.Session object It does the following: 1.", "opened later on in the requests flow. This means we", "being used tempfile is being destroyed when closed. @param ssl_context:", "\"\"\" path = \"certs.pem\" if os.path.exists(path): path = cls._generate_cert_file_path(\"certs\") with", "close the session @return: \"\"\" if self._finalizer.detach(): try: os.remove(self.cert_file_path) except:", "we have to close the file before it is being", "logging.warning(f\"Couldn't delete certs file {name}\") try: session.close() except: logging.warning(\"Couldn't close", "tempfile is being destroyed when closed. @param ssl_context: ssl_context @return:", "os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete certs file {self.cert_file_path}\") try: self.session.close() except:", "note inside the function why not using tempfile self._ssl_context =", "value, tb): self.cleanup() def cleanup(self): # Non throw function \"\"\"", "is being opened later on in the requests flow. This", "\"\"\" self.cert_file_path = self._create_cert_file(ssl_context) # see note inside the function", "This class create a wrapper for the requests.Session object It", "{name}\") try: session.close() except: logging.warning(\"Couldn't close session\") logging.warning(warn_message) @classmethod def", "import logging from ssl_context_builder.builder.builder import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter", "except: logging.warning(f\"Couldn't delete certs file {name}\") try: session.close() except: logging.warning(\"Couldn't", "delete certs file {name}\") try: session.close() except: logging.warning(\"Couldn't close session\")", "__exit__(self, exc, value, tb): self.cleanup() def cleanup(self): # Non throw", "TLS communication @param ssl_context: SSLContext \"\"\" self.cert_file_path = self._create_cert_file(ssl_context) #", "file is being opened later on in the requests flow.", "consuming 2. Load certificates provided with the ssl_context 3. Except", "file before it is being used tempfile is being destroyed", "see note inside the function why not using tempfile self._ssl_context", "\"\"\" Delete the cert file and close the session @return:", "bundle file extracted from the ssl_context The reason we are", "a real file and deleting it is that this file", "destroyed when closed. @param ssl_context: ssl_context @return: path to the", "This create a CA bundle file extracted from the ssl_context", "@return: path to the created ca_bundle file \"\"\" path =", "certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path @classmethod def _generate_cert_file_path(cls, file_name:", "try: self.session.close() except: logging.warning(\"Couldn't close session\") @staticmethod def _cleanup(name, session,", "SSLContext \"\"\" self.cert_file_path = self._create_cert_file(ssl_context) # see note inside the", "import ssl from ssl import SSLContext import logging from ssl_context_builder.builder.builder", "object It does the following: 1. Disable session env_vars consuming", "SSLContext): \"\"\" This class create a wrapper for the requests.Session", "str, num=1): file_name_candidate = f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return cls._generate_cert_file_path(file_name, num", "except: logging.warning(\"Couldn't close session\") @staticmethod def _cleanup(name, session, warn_message): try:", "and deleting it is that this file is being opened", "= \"\" for der in ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs)", "a wrapper for the requests.Session object It does the following:", "self._create_cert_file(ssl_context) # see note inside the function why not using", "@param ssl_context: ssl_context @return: path to the created ca_bundle file", "Except ssl_context to control the TLS communication @param ssl_context: SSLContext", "create a wrapper for the requests.Session object It does the", "if os.path.exists(path): path = cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\") as certs_file:", "the cert file and close the session @return: \"\"\" if", "self._finalizer.detach(): try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete certs file {self.cert_file_path}\") try:", "file {self.cert_file_path}\") try: self.session.close() except: logging.warning(\"Couldn't close session\") @staticmethod def", "certs_file: certs = \"\" for der in ssl_context.get_ca_certs(True): certs +=", "return self def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self):", "later on in the requests flow. This means we have", "import SSLContext import logging from ssl_context_builder.builder.builder import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter", "{self.cert_file_path}\") try: self.session.close() except: logging.warning(\"Couldn't close session\") @staticmethod def _cleanup(name,", "being destroyed when closed. @param ssl_context: ssl_context @return: path to", "it is being used tempfile is being destroyed when closed.", "warn_message=\"Implicitly cleaning up {!r}\".format(self)) def __enter__(self): return self def __exit__(self,", "ssl_context @return: path to the created ca_bundle file \"\"\" path", "requests.Session object It does the following: 1. Disable session env_vars", "import SslAdapter class RequestsSecureSession: def __init__(self, ssl_context: SSLContext): \"\"\" This", "self._ssl_context = ssl_context self.session = requests.Session() self.session.trust_env = False self.session.verify", "ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class RequestsSecureSession: def __init__(self, ssl_context: SSLContext): \"\"\"", "from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class RequestsSecureSession: def __init__(self, ssl_context: SSLContext):", "deleting it is that this file is being opened later", "self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer = weakref.finalize( self, self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly", "inside the function why not using tempfile self._ssl_context = ssl_context", "the file before it is being used tempfile is being", "to close the file before it is being used tempfile", "means we have to close the file before it is", "exc, value, tb): self.cleanup() def cleanup(self): # Non throw function", "self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly cleaning up {!r}\".format(self)) def __enter__(self): return", "Disable session env_vars consuming 2. Load certificates provided with the", "ssl_context: ssl_context @return: path to the created ca_bundle file \"\"\"", "used tempfile is being destroyed when closed. @param ssl_context: ssl_context", "the TLS communication @param ssl_context: SSLContext \"\"\" self.cert_file_path = self._create_cert_file(ssl_context)", "@classmethod def _create_cert_file(cls, ssl_context: SSLContext): \"\"\" This create a CA", "ssl_context: SSLContext): \"\"\" This class create a wrapper for the", "os import requests import ssl from ssl import SSLContext import", "this file is being opened later on in the requests", "@param ssl_context: SSLContext \"\"\" self.cert_file_path = self._create_cert_file(ssl_context) # see note", "create a CA bundle file extracted from the ssl_context The", "certs = \"\" for der in ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\"", "\"\"\" if self._finalizer.detach(): try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete certs file", "weakref import os import requests import ssl from ssl import", "def cleanup(self): # Non throw function \"\"\" Delete the cert", "the created ca_bundle file \"\"\" path = \"certs.pem\" if os.path.exists(path):", "being opened later on in the requests flow. This means", "session\") @staticmethod def _cleanup(name, session, warn_message): try: os.remove(name) except: logging.warning(f\"Couldn't", "ca_bundle file \"\"\" path = \"certs.pem\" if os.path.exists(path): path =", "\"certs.pem\" if os.path.exists(path): path = cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\") as", "is being used tempfile is being destroyed when closed. @param", "it is that this file is being opened later on", "path = \"certs.pem\" if os.path.exists(path): path = cls._generate_cert_file_path(\"certs\") with open(path,", "closed. @param ssl_context: ssl_context @return: path to the created ca_bundle", "= self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer = weakref.finalize( self, self._cleanup, self.cert_file_path,", "function \"\"\" Delete the cert file and close the session", "# Non throw function \"\"\" Delete the cert file and", "function why not using tempfile self._ssl_context = ssl_context self.session =", "return path @classmethod def _generate_cert_file_path(cls, file_name: str, num=1): file_name_candidate =", "SslAdapter(ssl_context)) self._finalizer = weakref.finalize( self, self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly cleaning", "file extracted from the ssl_context The reason we are creating", "ssl_context_builder.builder.builder import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class RequestsSecureSession: def", "try: os.remove(name) except: logging.warning(f\"Couldn't delete certs file {name}\") try: session.close()", "the ssl_context 3. Except ssl_context to control the TLS communication", "ssl import SSLContext import logging from ssl_context_builder.builder.builder import SslContextBuilder from", "extracted from the ssl_context The reason we are creating a", "= ssl_context self.session = requests.Session() self.session.trust_env = False self.session.verify =", "logging.warning(\"Couldn't close session\") @staticmethod def _cleanup(name, session, warn_message): try: os.remove(name)", "ssl_context: SSLContext): \"\"\" This create a CA bundle file extracted", "The reason we are creating a real file and deleting", "file and deleting it is that this file is being", "certs file {name}\") try: session.close() except: logging.warning(\"Couldn't close session\") logging.warning(warn_message)", "ssl_context The reason we are creating a real file and", "except: logging.warning(f\"Couldn't delete certs file {self.cert_file_path}\") try: self.session.close() except: logging.warning(\"Couldn't", "It does the following: 1. Disable session env_vars consuming 2.", "ssl_context 3. Except ssl_context to control the TLS communication @param", "does the following: 1. Disable session env_vars consuming 2. Load", "self, self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly cleaning up {!r}\".format(self)) def __enter__(self):", "creating a real file and deleting it is that this", "for der in ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path", "SSLContext): \"\"\" This create a CA bundle file extracted from", "try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete certs file {self.cert_file_path}\") try: self.session.close()", "self.session.trust_env = False self.session.verify = self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer =", "is that this file is being opened later on in", "import requests import ssl from ssl import SSLContext import logging", "\"\" for der in ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return", "= self._create_cert_file(ssl_context) # see note inside the function why not", "ssl_context: SSLContext \"\"\" self.cert_file_path = self._create_cert_file(ssl_context) # see note inside", "_cleanup(name, session, warn_message): try: os.remove(name) except: logging.warning(f\"Couldn't delete certs file", "before it is being used tempfile is being destroyed when", "to the created ca_bundle file \"\"\" path = \"certs.pem\" if", "reason we are creating a real file and deleting it", "try: session.close() except: logging.warning(\"Couldn't close session\") logging.warning(warn_message) @classmethod def _create_cert_file(cls,", "def _create_cert_file(cls, ssl_context: SSLContext): \"\"\" This create a CA bundle", "= cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\") as certs_file: certs = \"\"", "close session\") @staticmethod def _cleanup(name, session, warn_message): try: os.remove(name) except:", "env_vars consuming 2. Load certificates provided with the ssl_context 3.", "class RequestsSecureSession: def __init__(self, ssl_context: SSLContext): \"\"\" This class create", "3. Except ssl_context to control the TLS communication @param ssl_context:", "logging.warning(\"Couldn't close session\") logging.warning(warn_message) @classmethod def _create_cert_file(cls, ssl_context: SSLContext): \"\"\"", "the requests flow. This means we have to close the", "the requests.Session object It does the following: 1. Disable session", "session env_vars consuming 2. Load certificates provided with the ssl_context", "up {!r}\".format(self)) def __enter__(self): return self def __exit__(self, exc, value,", "close the file before it is being used tempfile is", "file and close the session @return: \"\"\" if self._finalizer.detach(): try:", "= \"certs.pem\" if os.path.exists(path): path = cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\")", "der in ssl_context.get_ca_certs(True): certs += f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path @classmethod", "_generate_cert_file_path(cls, file_name: str, num=1): file_name_candidate = f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return", "following: 1. Disable session env_vars consuming 2. Load certificates provided", "__enter__(self): return self def __exit__(self, exc, value, tb): self.cleanup() def", "open(path, mode=\"a+\") as certs_file: certs = \"\" for der in", "requests flow. This means we have to close the file", "path to the created ca_bundle file \"\"\" path = \"certs.pem\"", "are creating a real file and deleting it is that", "SSLContext import logging from ssl_context_builder.builder.builder import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import", "the following: 1. Disable session env_vars consuming 2. Load certificates", "delete certs file {self.cert_file_path}\") try: self.session.close() except: logging.warning(\"Couldn't close session\")", "= requests.Session() self.session.trust_env = False self.session.verify = self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context))", "the ssl_context The reason we are creating a real file", "import os import requests import ssl from ssl import SSLContext", "This means we have to close the file before it", "is being destroyed when closed. @param ssl_context: ssl_context @return: path", "path = cls._generate_cert_file_path(\"certs\") with open(path, mode=\"a+\") as certs_file: certs =", "f\"{ssl.DER_cert_to_PEM_cert(der)}\\n\" certs_file.write(certs) return path @classmethod def _generate_cert_file_path(cls, file_name: str, num=1):", "_create_cert_file(cls, ssl_context: SSLContext): \"\"\" This create a CA bundle file", "control the TLS communication @param ssl_context: SSLContext \"\"\" self.cert_file_path =", "self.session = requests.Session() self.session.trust_env = False self.session.verify = self.cert_file_path self.session.mount('https://',", "session.close() except: logging.warning(\"Couldn't close session\") logging.warning(warn_message) @classmethod def _create_cert_file(cls, ssl_context:", "def _generate_cert_file_path(cls, file_name: str, num=1): file_name_candidate = f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate):", "import SslContextBuilder from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter class RequestsSecureSession: def __init__(self,", "ssl_context self.session = requests.Session() self.session.trust_env = False self.session.verify = self.cert_file_path", "file_name: str, num=1): file_name_candidate = f\"{file_name}({num}).pem\" if os.path.exists(file_name_candidate): return cls._generate_cert_file_path(file_name,", "os.remove(name) except: logging.warning(f\"Couldn't delete certs file {name}\") try: session.close() except:", "= False self.session.verify = self.cert_file_path self.session.mount('https://', SslAdapter(ssl_context)) self._finalizer = weakref.finalize(", "the session @return: \"\"\" if self._finalizer.detach(): try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't", "not using tempfile self._ssl_context = ssl_context self.session = requests.Session() self.session.trust_env", "= weakref.finalize( self, self._cleanup, self.cert_file_path, self.session, warn_message=\"Implicitly cleaning up {!r}\".format(self))", "have to close the file before it is being used", "session @return: \"\"\" if self._finalizer.detach(): try: os.remove(self.cert_file_path) except: logging.warning(f\"Couldn't delete", "RequestsSecureSession: def __init__(self, ssl_context: SSLContext): \"\"\" This class create a", "on in the requests flow. This means we have to", "in the requests flow. This means we have to close" ]
[ "len(labels) data_index = [i for i in range(length)] class_index =", "class_index = labels csv_dict = {'class_index': class_index, 'data_index': data_index} df", "= dict[b'labels'] data = dict[b'data'] filenames = dict[b'filenames'] length =", "in range(length)] class_index = labels csv_dict = {'class_index': class_index, 'data_index':", "f: dict = pickle.load(f, encoding='bytes') dict.keys() batch_label = dict[b'batch_label'] labels", "''' import pickle import pandas as pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch'", "cifar-10. The .csv file format: class_index,data_index 3,0 8,1 8,2 ...", "file format: class_index,data_index 3,0 8,1 8,2 ... ''' import pickle", "8,2 ... ''' import pickle import pandas as pd file", "as pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb') as f:", "Selected cifar-10. The .csv file format: class_index,data_index 3,0 8,1 8,2", "open(file, 'rb') as f: dict = pickle.load(f, encoding='bytes') dict.keys() batch_label", "labels = dict[b'labels'] data = dict[b'data'] filenames = dict[b'filenames'] length", "= dict[b'data'] filenames = dict[b'filenames'] length = len(labels) data_index =", "for i in range(length)] class_index = labels csv_dict = {'class_index':", "... ''' import pickle import pandas as pd file =", "as f: dict = pickle.load(f, encoding='bytes') dict.keys() batch_label = dict[b'batch_label']", "= {'class_index': class_index, 'data_index': data_index} df = pd.DataFrame(csv_dict) df.to_csv('selected_cifar10.csv', index=False)", "dict.keys() batch_label = dict[b'batch_label'] labels = dict[b'labels'] data = dict[b'data']", ".csv file format: class_index,data_index 3,0 8,1 8,2 ... ''' import", "csv_dict = {'class_index': class_index, 'data_index': data_index} df = pd.DataFrame(csv_dict) df.to_csv('selected_cifar10.csv',", "'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb') as f: dict = pickle.load(f, encoding='bytes')", "8,1 8,2 ... ''' import pickle import pandas as pd", "= labels csv_dict = {'class_index': class_index, 'data_index': data_index} df =", "data_index = [i for i in range(length)] class_index = labels", "= [i for i in range(length)] class_index = labels csv_dict", "dict[b'batch_label'] labels = dict[b'labels'] data = dict[b'data'] filenames = dict[b'filenames']", "i in range(length)] class_index = labels csv_dict = {'class_index': class_index,", "= dict[b'filenames'] length = len(labels) data_index = [i for i", "with open(file, 'rb') as f: dict = pickle.load(f, encoding='bytes') dict.keys()", "= 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb') as f: dict = pickle.load(f,", "<reponame>jiaqiangwjq/python_workhouse ''' Selected cifar-10. The .csv file format: class_index,data_index 3,0", "= pickle.load(f, encoding='bytes') dict.keys() batch_label = dict[b'batch_label'] labels = dict[b'labels']", "dict[b'labels'] data = dict[b'data'] filenames = dict[b'filenames'] length = len(labels)", "length = len(labels) data_index = [i for i in range(length)]", "dict[b'data'] filenames = dict[b'filenames'] length = len(labels) data_index = [i", "= len(labels) data_index = [i for i in range(length)] class_index", "format: class_index,data_index 3,0 8,1 8,2 ... ''' import pickle import", "pandas as pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb') as", "batch_label = dict[b'batch_label'] labels = dict[b'labels'] data = dict[b'data'] filenames", "= dict[b'batch_label'] labels = dict[b'labels'] data = dict[b'data'] filenames =", "range(length)] class_index = labels csv_dict = {'class_index': class_index, 'data_index': data_index}", "data = dict[b'data'] filenames = dict[b'filenames'] length = len(labels) data_index", "[i for i in range(length)] class_index = labels csv_dict =", "3,0 8,1 8,2 ... ''' import pickle import pandas as", "dict = pickle.load(f, encoding='bytes') dict.keys() batch_label = dict[b'batch_label'] labels =", "pickle import pandas as pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file,", "class_index,data_index 3,0 8,1 8,2 ... ''' import pickle import pandas", "import pandas as pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb')", "''' Selected cifar-10. The .csv file format: class_index,data_index 3,0 8,1", "encoding='bytes') dict.keys() batch_label = dict[b'batch_label'] labels = dict[b'labels'] data =", "filenames = dict[b'filenames'] length = len(labels) data_index = [i for", "dict[b'filenames'] length = len(labels) data_index = [i for i in", "The .csv file format: class_index,data_index 3,0 8,1 8,2 ... '''", "pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb') as f: dict", "labels csv_dict = {'class_index': class_index, 'data_index': data_index} df = pd.DataFrame(csv_dict)", "file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with open(file, 'rb') as f: dict =", "import pickle import pandas as pd file = 'E:\\pycharm\\LEARN\\data\\cifar-10\\cifar-10-batches-py\\\\test_batch' with", "'rb') as f: dict = pickle.load(f, encoding='bytes') dict.keys() batch_label =", "pickle.load(f, encoding='bytes') dict.keys() batch_label = dict[b'batch_label'] labels = dict[b'labels'] data" ]
[ "Organization, OrganizationMember from flask import g client = app.test_client() _ctx", "codebox import app from codebox.apps.auth.models import User from codebox.apps.snippets.models import", "from codebox.apps.auth.models import User from codebox.apps.snippets.models import Snippet from codebox.apps.organizations.models", "import Organization, OrganizationMember from flask import g client = app.test_client()", "Create sample snippets # plaintext Snippet.objects.create(org='disqus', user=1, lang='text', text =", "User from codebox.apps.snippets.models import Snippet from codebox.apps.organizations.models import Organization, OrganizationMember", "Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its HTML!</h1>') # javascript", "= '<h1>Look its HTML!</h1>') # javascript Snippet.objects.create(org='disqus', user=1, lang='javascript', text", "text = '<h1>Look its HTML!</h1>') # javascript Snippet.objects.create(org='disqus', user=1, lang='javascript',", "# plaintext Snippet.objects.create(org='disqus', user=1, lang='text', text = \"Hello World!\") #", "plaintext Snippet.objects.create(org='disqus', user=1, lang='text', text = \"Hello World!\") # python", "OrganizationMember from flask import g client = app.test_client() _ctx =", "'<h1>Look its HTML!</h1>') # javascript Snippet.objects.create(org='disqus', user=1, lang='javascript', text =", "sample snippets # plaintext Snippet.objects.create(org='disqus', user=1, lang='text', text = \"Hello", "Fixtures from codebox import app from codebox.apps.auth.models import User from", "name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) # Create sample snippets #", "Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) # Create sample snippets # plaintext", "user=1) # Create sample snippets # plaintext Snippet.objects.create(org='disqus', user=1, lang='text',", "Snippet from codebox.apps.organizations.models import Organization, OrganizationMember from flask import g", "= app.test_client() _ctx = app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg')", "World!\") # python Snippet.objects.create(org='disqus', user=1, lang='python', text = \"print 'Disqus", "= \"print 'Disqus was here'\") # html Snippet.objects.create(org='disqus', user=1, lang='html',", "text = \"Hello World!\") # python Snippet.objects.create(org='disqus', user=1, lang='python', text", "lang='text', text = \"Hello World!\") # python Snippet.objects.create(org='disqus', user=1, lang='python',", "# Ghetto Fixtures from codebox import app from codebox.apps.auth.models import", "_ctx = app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS')", "was here'\") # html Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look", "app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1)", "g client = app.test_client() _ctx = app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb()", "Snippet.objects.create(org='disqus', user=1, lang='text', text = \"Hello World!\") # python Snippet.objects.create(org='disqus',", "import User from codebox.apps.snippets.models import Snippet from codebox.apps.organizations.models import Organization,", "# html Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its HTML!</h1>')", "Ghetto Fixtures from codebox import app from codebox.apps.auth.models import User", "app from codebox.apps.auth.models import User from codebox.apps.snippets.models import Snippet from", "lang='python', text = \"print 'Disqus was here'\") # html Snippet.objects.create(org='disqus',", "OrganizationMember.objects.create(org='disqus', user=1) # Create sample snippets # plaintext Snippet.objects.create(org='disqus', user=1,", "python Snippet.objects.create(org='disqus', user=1, lang='python', text = \"print 'Disqus was here'\")", "app.test_client() _ctx = app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus',", "# Create sample snippets # plaintext Snippet.objects.create(org='disqus', user=1, lang='text', text", "user=1, lang='text', text = \"Hello World!\") # python Snippet.objects.create(org='disqus', user=1,", "user=1, lang='html', text = '<h1>Look its HTML!</h1>') # javascript Snippet.objects.create(org='disqus',", "import Snippet from codebox.apps.organizations.models import Organization, OrganizationMember from flask import", "from codebox.apps.snippets.models import Snippet from codebox.apps.organizations.models import Organization, OrganizationMember from", "codebox.apps.snippets.models import Snippet from codebox.apps.organizations.models import Organization, OrganizationMember from flask", "codebox.apps.auth.models import User from codebox.apps.snippets.models import Snippet from codebox.apps.organizations.models import", "User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) # Create sample snippets", "= \"Hello World!\") # python Snippet.objects.create(org='disqus', user=1, lang='python', text =", "flask import g client = app.test_client() _ctx = app.test_request_context() _ctx.push()", "\"print 'Disqus was here'\") # html Snippet.objects.create(org='disqus', user=1, lang='html', text", "lang='html', text = '<h1>Look its HTML!</h1>') # javascript Snippet.objects.create(org='disqus', user=1,", "its HTML!</h1>') # javascript Snippet.objects.create(org='disqus', user=1, lang='javascript', text = \"document.write('Di-squs')\")", "from codebox import app from codebox.apps.auth.models import User from codebox.apps.snippets.models", "name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) # Create sample snippets # plaintext Snippet.objects.create(org='disqus',", "html Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its HTML!</h1>') #", "\"Hello World!\") # python Snippet.objects.create(org='disqus', user=1, lang='python', text = \"print", "import g client = app.test_client() _ctx = app.test_request_context() _ctx.push() app.preprocess_request()", "_ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) #", "app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) # Create", "here'\") # html Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its", "g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus', user=1) # Create sample", "Snippet.objects.create(org='disqus', user=1, lang='python', text = \"print 'Disqus was here'\") #", "from flask import g client = app.test_client() _ctx = app.test_request_context()", "import app from codebox.apps.auth.models import User from codebox.apps.snippets.models import Snippet", "text = \"print 'Disqus was here'\") # html Snippet.objects.create(org='disqus', user=1,", "'Disqus was here'\") # html Snippet.objects.create(org='disqus', user=1, lang='html', text =", "client = app.test_client() _ctx = app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1,", "user=1, lang='python', text = \"print 'Disqus was here'\") # html", "= app.test_request_context() _ctx.push() app.preprocess_request() g.redis.flushdb() User.objects.create(pk=1, name='zeeg') Organization.objects.create(pk='disqus', name='DISQUS') OrganizationMember.objects.create(org='disqus',", "# python Snippet.objects.create(org='disqus', user=1, lang='python', text = \"print 'Disqus was", "snippets # plaintext Snippet.objects.create(org='disqus', user=1, lang='text', text = \"Hello World!\")", "codebox.apps.organizations.models import Organization, OrganizationMember from flask import g client =", "from codebox.apps.organizations.models import Organization, OrganizationMember from flask import g client" ]
[ "Mock, patch from django.test import SimpleTestCase from corehq.apps.domain.exceptions import DomainDoesNotExist", "django.test import SimpleTestCase from corehq.apps.domain.exceptions import DomainDoesNotExist from corehq.apps.linked_domain.exceptions import", "'downstream' def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return domain != self.downstream_domain with", "self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream): raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists',", "test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream): raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link',", "patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect =", "cls).setUpClass() cls.upstream_domain = 'upstream' cls.downstream_domain = 'downstream' def test_exception_raised_if_domain_does_not_exist(self): def", "patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain)", "LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain = 'upstream' cls.downstream_domain", "patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain)", "self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists):", "domain != self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect =", "class LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain = 'upstream'", "corehq.apps.linked_domain.views import link_domains class LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass()", "from corehq.apps.linked_domain.views import link_domains class LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls): super(LinkDomainsTests,", "mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link',", "self.downstream_domain) def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\", "with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def", "mock_domainexists.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists',", "patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists',", "super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain = 'upstream' cls.downstream_domain = 'downstream' def test_exception_raised_if_domain_does_not_exist(self):", "mock_handler(downstream, upstream): raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains')", "return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler", "patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_successful(self):", "raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\", "def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains',", "= mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\", "self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream): raise DomainLinkError with", "link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream): raise DomainLinkError", "with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain,", "from corehq.apps.domain.exceptions import DomainDoesNotExist from corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists, DomainLinkError,", "= 'upstream' cls.downstream_domain = 'downstream' def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return", "DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views import link_domains class LinkDomainsTests(SimpleTestCase):", "patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream,", "self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link',", "return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\", "patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True): domain_link = link_domains(Mock(), self.upstream_domain, self.downstream_domain) self.assertIsNotNone(domain_link)", "test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(),", "( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views import link_domains class", "def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain,", "unittest.mock import Mock, patch from django.test import SimpleTestCase from corehq.apps.domain.exceptions", "from corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views", "<filename>corehq/apps/linked_domain/tests/test_views.py from unittest.mock import Mock, patch from django.test import SimpleTestCase", "with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True): domain_link", "cls.downstream_domain = 'downstream' def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return domain !=", "test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain)", "= mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\", "setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain = 'upstream' cls.downstream_domain = 'downstream' def", "patch from django.test import SimpleTestCase from corehq.apps.domain.exceptions import DomainDoesNotExist from", "upstream): raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as", "mock_handler(domain): return domain != self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist):", "link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\", "return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True): domain_link = link_domains(Mock(), self.upstream_domain, self.downstream_domain)", "patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True): domain_link =", "DomainLinkError, DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views import link_domains class LinkDomainsTests(SimpleTestCase): @classmethod", "cls.upstream_domain = 'upstream' cls.downstream_domain = 'downstream' def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain):", "return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True): domain_link = link_domains(Mock(),", "self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains',", "return domain != self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect", "with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler link_domains(Mock(), self.upstream_domain,", "as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def", "return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_successful(self): with", "self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler link_domains(Mock(),", "link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\", "import ( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views import link_domains", "return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def", "with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect", "return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler link_domains(Mock(), self.upstream_domain,", "'upstream' cls.downstream_domain = 'downstream' def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return domain", "test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return domain != self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as", "def mock_handler(domain): return domain != self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\", "corehq.apps.domain.exceptions import DomainDoesNotExist from corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed,", "def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain = 'upstream' cls.downstream_domain = 'downstream'", "return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream):", "link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\", "def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\ self.assertRaises(DomainLinkNotAllowed):", "self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream): raise", "@classmethod def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain = 'upstream' cls.downstream_domain =", "as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def", "def mock_handler(downstream, upstream): raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\", "self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\\", "self.upstream_domain, self.downstream_domain) def test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains',", "patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True): domain_link = link_domains(Mock(), self.upstream_domain,", "DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError):", "import DomainDoesNotExist from corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, )", "link_domains class LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain =", "import link_domains class LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls): super(LinkDomainsTests, cls).setUpClass() cls.upstream_domain", "patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self):", "self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with", "corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views import", "import Mock, patch from django.test import SimpleTestCase from corehq.apps.domain.exceptions import", "def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return domain != self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists')", "import SimpleTestCase from corehq.apps.domain.exceptions import DomainDoesNotExist from corehq.apps.linked_domain.exceptions import (", "DomainLinkNotAllowed, ) from corehq.apps.linked_domain.views import link_domains class LinkDomainsTests(SimpleTestCase): @classmethod def", "mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self):", "def test_exception_raised_if_domain_link_error_raised(self): def mock_handler(downstream, upstream): raise DomainLinkError with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\", ") from corehq.apps.linked_domain.views import link_domains class LinkDomainsTests(SimpleTestCase): @classmethod def setUpClass(cls):", "self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with", "!= self.downstream_domain with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\\ self.assertRaises(DomainDoesNotExist): mock_domainexists.side_effect = mock_handler", "patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain)", "mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self):", "test_successful(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\\ patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True):", "= 'downstream' def test_exception_raised_if_domain_does_not_exist(self): def mock_handler(domain): return domain != self.downstream_domain", "DomainDoesNotExist from corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists, DomainLinkError, DomainLinkNotAllowed, ) from", "mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link',", "from unittest.mock import Mock, patch from django.test import SimpleTestCase from", "patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\\ patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\\ self.assertRaises(DomainLinkError): mock_linkdomains.side_effect = mock_handler link_domains(Mock(),", "self.downstream_domain) def test_exception_raised_if_domain_link_already_exists(self): with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(),", "from django.test import SimpleTestCase from corehq.apps.domain.exceptions import DomainDoesNotExist from corehq.apps.linked_domain.exceptions", "return_value=True),\\ patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\\ self.assertRaises(DomainLinkAlreadyExists): link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_domain_link_error_raised(self): def", "SimpleTestCase from corehq.apps.domain.exceptions import DomainDoesNotExist from corehq.apps.linked_domain.exceptions import ( DomainLinkAlreadyExists,", "mock_linkdomains.side_effect = mock_handler link_domains(Mock(), self.upstream_domain, self.downstream_domain) def test_exception_raised_if_user_is_not_admin_in_both_domains(self): with patch('corehq.apps.linked_domain.views.domain_exists'," ]
[]
[ "super(self.__class__, self).parse(fp, **kwargs) # r = self._resource return doc #", ".datacite import DataCiteParser class WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser): def get_references(self,", "so Lars is shoving the references # in a section", "in self._array(r.get('descriptions', {}).get('description', [])): t = s.get('@descriptionType') c = self._text(s)", "import DataCiteParser class WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser): def get_references(self, r):", "return references def get_abstract(self, r): abs = super(ZenodoParser, self).get_abstract(r) abs", "references = [] for s in self._array(r.get('descriptions', {}).get('description', [])): t", "re.sub(r'</p>\\s*$', '', abs) return abs def parse(self, fp, **kwargs): \"\"\"Parses", "doc['source'] = 'ZENODO' return doc # # if __name__ ==", "get_references(self, r): # as of version 3.1 of datacite schema,", "[])): t = s.get('@descriptionType') c = self._text(s) if t ==", "= super(self.__class__, self).parse(fp, **kwargs) # r = self._resource return doc", "section labeled as \"Other\" as a json structure references =", "pub = doc.get('source') if pub != 'Zenodo' and pub !=", "= self._resource return doc # publisher pub = doc.get('source') if", "output sensibly # import codecs # sys.stdout = codecs.getwriter('utf-8')(sys.stdout) #", "# import codecs # sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # sys.stderr =", "= ZenodoParser() # for file in sys.argv[1:]: # d =", "\"__main__\": # # # allows program to print utf-8 encoded", "= doc.get('source') if pub != 'Zenodo' and pub != 'ZENODO':", "import json import re import logging from .datacite import DataCiteParser", "get_abstract(self, r): abs = super(ZenodoParser, self).get_abstract(r) abs = re.sub(r'\\s*<p>', '',", "abs) abs = re.sub(r'</p>\\s*$', '', abs) return abs def parse(self,", "re.sub(r'\\s*<p>', '', abs) abs = re.sub(r'</p>\\s*$', '', abs) return abs", "return abs def parse(self, fp, **kwargs): \"\"\"Parses Zenodo's flavor of", "sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # sys.stderr = codecs.getwriter('utf-8')(sys.stderr) # # parser", "encoded output sensibly # import codecs # sys.stdout = codecs.getwriter('utf-8')(sys.stdout)", "r): abs = super(ZenodoParser, self).get_abstract(r) abs = re.sub(r'\\s*<p>', '', abs)", "returns ADS tagged format\"\"\" doc = super(self.__class__, self).parse(fp, **kwargs) #", "only hope... references = c.split('\\n') elif t == 'Other': try:", "in a section labeled as \"Other\" as a json structure", "# XXX not supported yet, but one can only hope...", "= codecs.getwriter('utf-8')(sys.stderr) # # parser = ZenodoParser() # for file", "Zenodo\" % pub) else: doc['source'] = 'ZENODO' return doc #", "parse(self, fp, **kwargs): \"\"\"Parses Zenodo's flavor of DataCite 3.1 schema,", "than Zenodo\" % pub) else: doc['source'] = 'ZENODO' return doc", "ZenodoParser() # for file in sys.argv[1:]: # d = None", "= json.loads(c) references = j.get('references', []) except ValueError: logging.warning(u'Ignoring unparsable", "of \\\"%s\\\" rather than Zenodo\" % pub) else: doc['source'] =", "= re.sub(r'\\s*<p>', '', abs) abs = re.sub(r'</p>\\s*$', '', abs) return", "unparsable \"Other\" description element: %s\\n' % c) return references def", "ADS tagged format\"\"\" doc = super(self.__class__, self).parse(fp, **kwargs) # r", "__name__ == \"__main__\": # # # allows program to print", "t == 'Other': try: j = json.loads(c) references = j.get('references',", "raise WrongPublisherException(\"Found publisher field of \\\"%s\\\" rather than Zenodo\" %", "= 'ZENODO' return doc # # if __name__ == \"__main__\":", "= s.get('@descriptionType') c = self._text(s) if t == 'References': #", "{}).get('description', [])): t = s.get('@descriptionType') c = self._text(s) if t", "abs = re.sub(r'\\s*<p>', '', abs) abs = re.sub(r'</p>\\s*$', '', abs)", "= c.split('\\n') elif t == 'Other': try: j = json.loads(c)", "doc = super(self.__class__, self).parse(fp, **kwargs) # r = self._resource return", "Zenodo's flavor of DataCite 3.1 schema, returns ADS tagged format\"\"\"", "pub != 'ZENODO': raise WrongPublisherException(\"Found publisher field of \\\"%s\\\" rather", "rather than Zenodo\" % pub) else: doc['source'] = 'ZENODO' return", "= [] for s in self._array(r.get('descriptions', {}).get('description', [])): t =", "as of version 3.1 of datacite schema, \"References\" is not", "== \"__main__\": # # # allows program to print utf-8", "# d = None # with open(file, 'r') as fp:", "if __name__ == \"__main__\": # # # allows program to", "can only hope... references = c.split('\\n') elif t == 'Other':", "schema, \"References\" is not an # allowed description type so", "% c) return references def get_abstract(self, r): abs = super(ZenodoParser,", "to print utf-8 encoded output sensibly # import codecs #", "abs = super(ZenodoParser, self).get_abstract(r) abs = re.sub(r'\\s*<p>', '', abs) abs", "d = None # with open(file, 'r') as fp: #", "3.1 of datacite schema, \"References\" is not an # allowed", "references def get_abstract(self, r): abs = super(ZenodoParser, self).get_abstract(r) abs =", "description type so Lars is shoving the references # in", "doc # # if __name__ == \"__main__\": # # #", "# if __name__ == \"__main__\": # # # allows program", "= None # with open(file, 'r') as fp: # d", "t == 'References': # XXX not supported yet, but one", "utf-8 encoded output sensibly # import codecs # sys.stdout =", "elif t == 'Other': try: j = json.loads(c) references =", "for file in sys.argv[1:]: # d = None # with", "json structure references = [] for s in self._array(r.get('descriptions', {}).get('description',", "# r = self._resource return doc # publisher pub =", "WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser): def get_references(self, r): # as of", "XXX not supported yet, but one can only hope... references", "abs def parse(self, fp, **kwargs): \"\"\"Parses Zenodo's flavor of DataCite", "of DataCite 3.1 schema, returns ADS tagged format\"\"\" doc =", "self).get_abstract(r) abs = re.sub(r'\\s*<p>', '', abs) abs = re.sub(r'</p>\\s*$', '',", "# as of version 3.1 of datacite schema, \"References\" is", "class WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser): def get_references(self, r): # as", "pub) else: doc['source'] = 'ZENODO' return doc # # if", "pass class ZenodoParser(DataCiteParser): def get_references(self, r): # as of version", "DataCiteParser class WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser): def get_references(self, r): #", "# allowed description type so Lars is shoving the references", "print utf-8 encoded output sensibly # import codecs # sys.stdout", "3.1 schema, returns ADS tagged format\"\"\" doc = super(self.__class__, self).parse(fp,", "return doc # publisher pub = doc.get('source') if pub !=", "= re.sub(r'</p>\\s*$', '', abs) return abs def parse(self, fp, **kwargs):", "'References': # XXX not supported yet, but one can only", "element: %s\\n' % c) return references def get_abstract(self, r): abs", "fp, **kwargs): \"\"\"Parses Zenodo's flavor of DataCite 3.1 schema, returns", "# from __future__ import absolute_import import json import re import", "datacite schema, \"References\" is not an # allowed description type", "!= 'Zenodo' and pub != 'ZENODO': raise WrongPublisherException(\"Found publisher field", "!= 'ZENODO': raise WrongPublisherException(\"Found publisher field of \\\"%s\\\" rather than", "'Other': try: j = json.loads(c) references = j.get('references', []) except", "field of \\\"%s\\\" rather than Zenodo\" % pub) else: doc['source']", "'ZENODO' return doc # # if __name__ == \"__main__\": #", "not an # allowed description type so Lars is shoving", "structure references = [] for s in self._array(r.get('descriptions', {}).get('description', [])):", "description element: %s\\n' % c) return references def get_abstract(self, r):", "one can only hope... references = c.split('\\n') elif t ==", "#!/usr/bin/python # # from __future__ import absolute_import import json import", "# # allows program to print utf-8 encoded output sensibly", "file in sys.argv[1:]: # d = None # with open(file,", "class ZenodoParser(DataCiteParser): def get_references(self, r): # as of version 3.1", "not supported yet, but one can only hope... references =", "sensibly # import codecs # sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # sys.stderr", "import absolute_import import json import re import logging from .datacite", "references = j.get('references', []) except ValueError: logging.warning(u'Ignoring unparsable \"Other\" description", "else: doc['source'] = 'ZENODO' return doc # # if __name__", "%s\\n' % c) return references def get_abstract(self, r): abs =", "for s in self._array(r.get('descriptions', {}).get('description', [])): t = s.get('@descriptionType') c", "\"Other\" description element: %s\\n' % c) return references def get_abstract(self,", "r = self._resource return doc # publisher pub = doc.get('source')", "of datacite schema, \"References\" is not an # allowed description", "None # with open(file, 'r') as fp: # d =", "super(ZenodoParser, self).get_abstract(r) abs = re.sub(r'\\s*<p>', '', abs) abs = re.sub(r'</p>\\s*$',", "'', abs) return abs def parse(self, fp, **kwargs): \"\"\"Parses Zenodo's", "s in self._array(r.get('descriptions', {}).get('description', [])): t = s.get('@descriptionType') c =", "a section labeled as \"Other\" as a json structure references", "supported yet, but one can only hope... references = c.split('\\n')", "import codecs # sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # sys.stderr = codecs.getwriter('utf-8')(sys.stderr)", "hope... references = c.split('\\n') elif t == 'Other': try: j", "DataCite 3.1 schema, returns ADS tagged format\"\"\" doc = super(self.__class__,", "**kwargs) # r = self._resource return doc # publisher pub", "is shoving the references # in a section labeled as", "but one can only hope... references = c.split('\\n') elif t", "= super(ZenodoParser, self).get_abstract(r) abs = re.sub(r'\\s*<p>', '', abs) abs =", "references # in a section labeled as \"Other\" as a", "# # # allows program to print utf-8 encoded output", "logging.warning(u'Ignoring unparsable \"Other\" description element: %s\\n' % c) return references", "is not an # allowed description type so Lars is", "tagged format\"\"\" doc = super(self.__class__, self).parse(fp, **kwargs) # r =", "\"\"\"Parses Zenodo's flavor of DataCite 3.1 schema, returns ADS tagged", "pub != 'Zenodo' and pub != 'ZENODO': raise WrongPublisherException(\"Found publisher", "logging from .datacite import DataCiteParser class WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser):", "parser = ZenodoParser() # for file in sys.argv[1:]: # d", "= codecs.getwriter('utf-8')(sys.stdout) # sys.stderr = codecs.getwriter('utf-8')(sys.stderr) # # parser =", "references = c.split('\\n') elif t == 'Other': try: j =", "json import re import logging from .datacite import DataCiteParser class", "type so Lars is shoving the references # in a", "c) return references def get_abstract(self, r): abs = super(ZenodoParser, self).get_abstract(r)", "the references # in a section labeled as \"Other\" as", "% pub) else: doc['source'] = 'ZENODO' return doc # #", "allows program to print utf-8 encoded output sensibly # import", "c.split('\\n') elif t == 'Other': try: j = json.loads(c) references", "\\\"%s\\\" rather than Zenodo\" % pub) else: doc['source'] = 'ZENODO'", "r): # as of version 3.1 of datacite schema, \"References\"", "in sys.argv[1:]: # d = None # with open(file, 'r')", "def get_abstract(self, r): abs = super(ZenodoParser, self).get_abstract(r) abs = re.sub(r'\\s*<p>',", "self).parse(fp, **kwargs) # r = self._resource return doc # publisher", "[]) except ValueError: logging.warning(u'Ignoring unparsable \"Other\" description element: %s\\n' %", "import logging from .datacite import DataCiteParser class WrongPublisherException(Exception): pass class", "\"References\" is not an # allowed description type so Lars", "# publisher pub = doc.get('source') if pub != 'Zenodo' and", "codecs # sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # sys.stderr = codecs.getwriter('utf-8')(sys.stderr) #", "self._text(s) if t == 'References': # XXX not supported yet,", "# in a section labeled as \"Other\" as a json", "as a json structure references = [] for s in", "format\"\"\" doc = super(self.__class__, self).parse(fp, **kwargs) # r = self._resource", "# allows program to print utf-8 encoded output sensibly #", "codecs.getwriter('utf-8')(sys.stderr) # # parser = ZenodoParser() # for file in", "c = self._text(s) if t == 'References': # XXX not", "doc # publisher pub = doc.get('source') if pub != 'Zenodo'", "shoving the references # in a section labeled as \"Other\"", "ValueError: logging.warning(u'Ignoring unparsable \"Other\" description element: %s\\n' % c) return", "def get_references(self, r): # as of version 3.1 of datacite", "abs) return abs def parse(self, fp, **kwargs): \"\"\"Parses Zenodo's flavor", "abs = re.sub(r'</p>\\s*$', '', abs) return abs def parse(self, fp,", "__future__ import absolute_import import json import re import logging from", "**kwargs): \"\"\"Parses Zenodo's flavor of DataCite 3.1 schema, returns ADS", "WrongPublisherException(\"Found publisher field of \\\"%s\\\" rather than Zenodo\" % pub)", "def parse(self, fp, **kwargs): \"\"\"Parses Zenodo's flavor of DataCite 3.1", "as fp: # d = parser.parse(fp) # print json.dumps(d, indent=2)", "== 'Other': try: j = json.loads(c) references = j.get('references', [])", "json.loads(c) references = j.get('references', []) except ValueError: logging.warning(u'Ignoring unparsable \"Other\"", "if t == 'References': # XXX not supported yet, but", "absolute_import import json import re import logging from .datacite import", "version 3.1 of datacite schema, \"References\" is not an #", "'Zenodo' and pub != 'ZENODO': raise WrongPublisherException(\"Found publisher field of", "labeled as \"Other\" as a json structure references = []", "flavor of DataCite 3.1 schema, returns ADS tagged format\"\"\" doc", "doc.get('source') if pub != 'Zenodo' and pub != 'ZENODO': raise", "from __future__ import absolute_import import json import re import logging", "publisher field of \\\"%s\\\" rather than Zenodo\" % pub) else:", "program to print utf-8 encoded output sensibly # import codecs", "# # from __future__ import absolute_import import json import re", "j = json.loads(c) references = j.get('references', []) except ValueError: logging.warning(u'Ignoring", "codecs.getwriter('utf-8')(sys.stdout) # sys.stderr = codecs.getwriter('utf-8')(sys.stderr) # # parser = ZenodoParser()", "sys.stderr = codecs.getwriter('utf-8')(sys.stderr) # # parser = ZenodoParser() # for", "with open(file, 'r') as fp: # d = parser.parse(fp) #", "# sys.stderr = codecs.getwriter('utf-8')(sys.stderr) # # parser = ZenodoParser() #", "= j.get('references', []) except ValueError: logging.warning(u'Ignoring unparsable \"Other\" description element:", "# # if __name__ == \"__main__\": # # # allows", "t = s.get('@descriptionType') c = self._text(s) if t == 'References':", "<reponame>golnazads/adsabs-pyingest #!/usr/bin/python # # from __future__ import absolute_import import json", "of version 3.1 of datacite schema, \"References\" is not an", "yet, but one can only hope... references = c.split('\\n') elif", "self._resource return doc # publisher pub = doc.get('source') if pub", "as \"Other\" as a json structure references = [] for", "self._array(r.get('descriptions', {}).get('description', [])): t = s.get('@descriptionType') c = self._text(s) if", "'r') as fp: # d = parser.parse(fp) # print json.dumps(d,", "j.get('references', []) except ValueError: logging.warning(u'Ignoring unparsable \"Other\" description element: %s\\n'", "ZenodoParser(DataCiteParser): def get_references(self, r): # as of version 3.1 of", "\"Other\" as a json structure references = [] for s", "from .datacite import DataCiteParser class WrongPublisherException(Exception): pass class ZenodoParser(DataCiteParser): def", "s.get('@descriptionType') c = self._text(s) if t == 'References': # XXX", "and pub != 'ZENODO': raise WrongPublisherException(\"Found publisher field of \\\"%s\\\"", "allowed description type so Lars is shoving the references #", "# sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # sys.stderr = codecs.getwriter('utf-8')(sys.stderr) # #", "except ValueError: logging.warning(u'Ignoring unparsable \"Other\" description element: %s\\n' % c)", "'', abs) abs = re.sub(r'</p>\\s*$', '', abs) return abs def", "# parser = ZenodoParser() # for file in sys.argv[1:]: #", "schema, returns ADS tagged format\"\"\" doc = super(self.__class__, self).parse(fp, **kwargs)", "try: j = json.loads(c) references = j.get('references', []) except ValueError:", "re import logging from .datacite import DataCiteParser class WrongPublisherException(Exception): pass", "an # allowed description type so Lars is shoving the", "# for file in sys.argv[1:]: # d = None #", "# with open(file, 'r') as fp: # d = parser.parse(fp)", "= self._text(s) if t == 'References': # XXX not supported", "== 'References': # XXX not supported yet, but one can", "publisher pub = doc.get('source') if pub != 'Zenodo' and pub", "'ZENODO': raise WrongPublisherException(\"Found publisher field of \\\"%s\\\" rather than Zenodo\"", "[] for s in self._array(r.get('descriptions', {}).get('description', [])): t = s.get('@descriptionType')", "# # parser = ZenodoParser() # for file in sys.argv[1:]:", "if pub != 'Zenodo' and pub != 'ZENODO': raise WrongPublisherException(\"Found", "a json structure references = [] for s in self._array(r.get('descriptions',", "return doc # # if __name__ == \"__main__\": # #", "import re import logging from .datacite import DataCiteParser class WrongPublisherException(Exception):", "sys.argv[1:]: # d = None # with open(file, 'r') as", "Lars is shoving the references # in a section labeled", "open(file, 'r') as fp: # d = parser.parse(fp) # print" ]
[ "from authority import Authority from utils.logger import logger, iplogger from", "in hdr_list: d = {} d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"] =", "= requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer,", "hdr_list = BLOCKCHAIN.active_chain.header_list if len(hdr_list) > 200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100]", "LineProfilerMiddleware with open(\"lineprof\" + str(consts.MINER_SERVER_PORT) + \".log\", \"w\") as f:", "requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), )", "Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def fetch_peer_list() -> List[Dict[str, Any]]: try: r", "request.json public_key = data[\"public_key\"] logger.debug(public_key) current_balance = check_balance(public_key) return str(current_balance)", "consts.NO_MINING else \"MINING\" else: return \"Password Mismatch,\" + \"NOT MINING\"", "decompress(request_data) if transaction_json: try: tx = Transaction.from_json(transaction_json).object() # Add transaction", "+ \"NOT MINING\" if consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404)", "@app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\")) hash_list =", "file: uuid_json = file.read() valid_ids = set(json.loads(uuid_json)) @app.post(\"/\") def puzzle():", "Hash\" @app.post(\"/getblock\") def getblock(): log_ip(request, inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\") return", "# else: # message = \"Some Error Occured, Contact Admin.\"", "1), ) if r.status_code == 400: response.status = 400 logger.error(\"Wallet:", "else: logger.debug(\"Main: Peer data does not have Block Height\") return", "PEER_LIST: if greet_peer(peer): new_peer_list.append(peer) PEER_LIST = new_peer_list if PEER_LIST: max_peer", "d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp d[\"data\"] = render_block_header(hdr) headers.append(d)", "str(html) @app.get(\"/chains\") def visualize_chain(): log_ip(request, inspect.stack()[0][3]) data = [] start", "doing nothing\") return \"Block already Received Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server:", "# TODO Make new chain/ orphan set for Block that", "{} vin = {} current_amount = 0 total_amount = sum(amounts)", "= 400 return \"Invalid Receiver Public Key\" current_balance = check_balance(sender_public_key)", "from bottle import BaseTemplate, Bottle, request, response, static_file, template, error", "request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr peer[\"time\"] = time.time() peer[\"version\"] = request.forms.get(\"version\")", "\"Well Done!\" if check_balance(MY_WALLET.public_key) >= sum(amounts): result = send_bounty([pubkey], amounts)", "<br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s def render_block_header(hdr): html =", "tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\") def mining(): log_ip(request, inspect.stack()[0][3]) password =", "as e: # logger.error(e) # message = \"Some Error Occured.", "return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance(): log_ip(request, inspect.stack()[0][3])", "to be mined!\" # else: # message = \"Some Error", "headerhash = request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\")", "= data[\"sender_public_key\"] message = \"No Message\" if \"message\" in data:", "\"/greetpeer\", data=data) data = json.loads(r.text) # Update the peer data", "\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} # Send a POST request to", "\"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \" (\" + str(hdr.timestamp) +", "TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i += 1 for i, address in", "request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\") amounts = [300] if uuid in", "== sender_public_key: current_amount += tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\")", "the Receiver Port ID, try again.\" # message_type = \"danger\"", "devil, the poor have it, the rich need it, and", "enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i], address=address) change = (current_amount - total_amount)", "greet peer\" + str(e)) return False def receive_block_from_peer(peer: Dict[str, Any],", "= {} current_amount = 0 total_amount = sum(amounts) i =", "Update the peer data in the peer list with the", "Exception as e: logger.error(\"Server: New Block: invalid block received \"", "\"</td></tr>\" html += \"<tr><th>\" + \"Merkle Root\" + \"</th>\" html", "= check_balance(sender_public_key) if current_balance < bounty: logger.debug(\"Insufficient Balance to make", "len(hdr_list) > 200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for hdr", "+ str(bounty - current_balance) elif sender_public_key == receiver_public_key: logger.debug(\"Someone trying", "my peers sync_with_peers() # Start mining Thread Thread(target=start_mining_thread, daemon=True).start() if", "Thread Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode: Not Mining\") # Start", "= utxo_list[0] if tx_out.address == pub_key: current_balance += int(tx_out.amount) return", "+ str(BLOCKCHAIN.active_chain.length) + \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" + \"Balance", "PEER_LIST: max_peer = max(PEER_LIST, key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with", "public_key = data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\") def", "utxo_list[0] if tx_out.address == pub_key: current_balance += int(tx_out.amount) return int(current_balance)", "dhash from wallet import Wallet app = Bottle() BaseTemplate.defaults[\"get_url\"] =", "pass data = {\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\"", "# message_type = \"warning\" # else: # message = \"You", "def process_new_block(request_data: bytes) -> str: global BLOCKCHAIN block_json = decompress(request_data)", "str(peer)) Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start() def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task,", "not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def send_to_all_peers(url,", "= \"info\" uuid = request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\") amounts =", "json.loads(r.text) return peer_list except Exception as e: logger.error(\"Could not connect", "sync_with_peers(): try: PEER_LIST = fetch_peer_list() new_peer_list = [] for peer", "Peer data does not have Block Height\") return False return", "Again\" else: logger.info(\"Wallet: Transaction Sent, Wait for it to be", "def greet_peer(peer: Dict[str, Any]) -> bool: try: url = get_peer_url(peer)", "\" (\" + str(hdr.timestamp) + \")</td></tr>\" ) # get block", "t = Timer(1, miner.stop_mining) t.start() return \"Block Received\" logger.error(\"Server: Invalid", "\"Done\" # Transactions for all active chains @app.post(\"/newtransaction\") def received_new_transaction():", "Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start() def check_balance(pub_key: str) -> int: current_balance", "being sent, please wait for it to be mined!\" valid_ids.remove(uuid)", "total_amount = 0 # for i in range(0, number): #", "def wallet(): # log_ip(request, inspect.stack()[0][3]) # return template(\"wallet.html\", message=\"\", message_type=\"\",", "+ str(e)) return template(\"error.html\") return template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\")", "str: return \"http://\" + str(peer[\"ip\"]) + \":\" + str(peer[\"port\"]) def", "str) -> int: current_balance = 0 for x, utxo_list in", "Transactions for all active chains @app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request, inspect.stack()[0][3])", "number): # receiver = str(request.forms.get(\"port\" + str(i))) # bounty =", "get_peer_url(peer: Dict[str, Any]) -> str: return \"http://\" + str(peer[\"ip\"]) +", "set(json.loads(uuid_json)) @app.post(\"/\") def puzzle(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type", "= Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message) return tx def", "0 total_amount = sum(amounts) i = 0 for so, utxo_list", "to Send Transaction\") try: r = requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT)", "== receiver_public_key: logger.debug(\"Someone trying to send money to himself\") response.status", "data does not have Block Height\") return False return True", "greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try: peer = {} peer[\"port\"] = request.forms.get(\"port\")", "data={\"headerhash\": hhash}) result = json.loads(r.text) if result: return True return", "+ pubkey) message = \"Well Done!\" if check_balance(MY_WALLET.public_key) >= sum(amounts):", "html += \"<tr><th>Transaction \" + str(i) + \"</th><td>\" + str(s)", "request to the peer r = requests.post(url + \"/greetpeer\", data=data)", "i < len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes]", "except Exception as e: logger.error(\"Server: New Block: invalid block received", "Sending new block to peers\") # Broadcast block to other", "block_json = decompress(request_data) if block_json: try: block = Block.from_json(block_json).object() #", "return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename): log_ip(request, inspect.stack()[0][3]) return static_file(filename,", "= new_peer_list if PEER_LIST: max_peer = max(PEER_LIST, key=lambda k: k[\"blockheight\"])", "message_type=message_type, question=question) @app.get('/about') def about(): return template(\"about.html\") # @app.get(\"/wallet\") #", "the peer. if data.get(\"blockheight\", None): peer.update(data) else: logger.debug(\"Main: Peer data", "prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash): log_ip(request, inspect.stack()[0][3]) try: block =", "\"Invalid Block\" @app.post(\"/newblock\") def received_new_block(): log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16)", "Block.from_json(get_block_from_db(blockhash)).object() tx = None for t in block.transactions: if t.hash()", "Public Key\") response.status = 400 return \"Invalid Receiver Public Key\"", "int(request.query.prev or 0) if prev < 0: prev = 0", "explorer(): log_ip(request, inspect.stack()[0][3]) prev = int(request.query.prev or 0) if prev", "utils.logger import logger, iplogger from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db", "-> bool: try: url = get_peer_url(peer) data = {\"port\": consts.MINER_SERVER_PORT,", "block already exists if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block exists, doing", "or 0) if prev < 0: prev = 0 hdr_list", "hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data: bytes) -> str: global", "added to Mempool\") return False, \"Not Valid Transaction\" else: return", "peer list with the new data received from the peer.", "requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\": hhash}) result = json.loads(r.text) if result:", "{} data[\"sign_this\"] = transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction(): log_ip(request,", "template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\") def explorer(): log_ip(request, inspect.stack()[0][3]) prev =", "\"<td>\" + str(hdr.merkle_root) + \"</td></tr>\" html += \"<tr><th>\" + \"Timestamp\"", "logger.error(\"Sync: Block received is invalid, Cannot Sync\") break return #", "peer_list = json.loads(r.text) return peer_list except Exception as e: logger.error(\"Could", "def checkblance(): log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename):", "Contact Admin.\" message_type = \"warning\" else: message = \"Invalid Unique", "html += \"</table>\" return str(html) @app.get(\"/chains\") def visualize_chain(): log_ip(request, inspect.stack()[0][3])", "\"Invalid Block Received\" # Kill Miner t = Timer(1, miner.stop_mining)", "utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if current_amount >= total_amount:", "address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message) return", "= \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # else:", ") if r.status_code == 400: logger.info(\"Wallet: Could not Send Transaction.", "not greet peer\" + str(e)) return False def receive_block_from_peer(peer: Dict[str,", "+ str(peer[\"port\"]) def greet_peer(peer: Dict[str, Any]) -> bool: try: url", "than God, more evil than the devil, the poor have", "block(blockhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() except Exception as", "log_ip(request, inspect.stack()[0][3]) data = [] start = BLOCKCHAIN.active_chain.length - 10", "it to be mined!\" # else: # message = \"Some", "+ str(hdr.prev_block_hash) + \"</td></tr>\" html += \"<tr><th>\" + \"Merkle Root\"", "Message\" if \"message\" in data: message = data[\"message\"] if len(receiver_public_key)", "# Send a POST request to the peer r =", "else 0 headers = [] hdr_list = BLOCKCHAIN.active_chain.header_list if len(hdr_list)", "True for entry in PEER_LIST: ip = entry[\"ip\"] port =", "request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY = True for entry in", "hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i for i in range(prev", "import utils.constants as consts from core import Block, BlockChain, SingleOutput,", "+ \"Transactions\" + \"</th>\" html += \"<td>\" + str(len(block.transactions)) +", "\"</th><td>\" + str(s) + \"</td></tr>\" html += \"</table>\" return str(html)", "inspect.stack()[0][3]) message = \"\" message_type = \"info\" uuid = request.forms.get(\"uuid\")", "+ \")</td></tr>\" ) # get block block = Block.from_json(get_block_from_db(dhash(hdr))).object() html", "i, address in enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i], address=address) change =", "\"Transaction Already received\" except Exception as e: logger.error(\"Server: New Transaction:", "consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} # Send a POST request", "List[Dict[str, Any]]: try: r = requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list =", "if tx_out.address == sender_public_key: current_amount += tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so),", "def sendinfo(): log_ip(request, inspect.stack()[0][3]) s = ( \"No. of Blocks:", "\"Transactions\" + \"</th>\" html += \"<td>\" + str(len(block.transactions)) + \"</td></tr>\"", "utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db from utils.utils import compress, decompress,", "message = \"Well Done!\" if check_balance(MY_WALLET.public_key) >= sum(amounts): result =", "as e: logger.error(\"Could not connect to DNS Seed\") return []", "static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def sendinfo(): log_ip(request, inspect.stack()[0][3]) s = (", "List[str], amounts: List[int]): current_balance = check_balance(MY_WALLET.public_key) for key in receiver_public_keys:", "logger.error(\"Could not connect to DNS Seed\") return [] def get_peer_url(peer:", "- 10 if BLOCKCHAIN.active_chain.length > 10 else 0 headers =", "requests.post(url + \"/greetpeer\", data=data) data = json.loads(r.text) # Update the", "# receiver = str(request.forms.get(\"port\" + str(i))) # bounty = int(request.forms.get(\"amount\"", "Balance to make Transaction\") response.status = 400 return \"Insufficient Balance", "str(i))) # bounty = int(request.forms.get(\"amount\" + str(i))) # publickey =", "if db_block: return compress(db_block) else: logger.error(\"ERROR CALLED GETBLOCK FOR NON", "inspect.stack()[0][3]) s = ( \"No. of Blocks: \" + str(BLOCKCHAIN.active_chain.length)", "x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if tx_out.address ==", "as file: uuid_json = file.read() valid_ids = set(json.loads(uuid_json)) @app.post(\"/\") def", "message_type = \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\")", "= json.loads(r.text) if result: return True return False def get_block_header_hash(height):", "+ str(e)) return template(\"error.html\") return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def", "= [] for i in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode()", "hdr.timestamp d[\"data\"] = render_block_header(hdr) headers.append(d) data.append(headers) return template(\"chains.html\", data=data, start=start)", "change > 0: vout[i + 1] = TxOut(amount=change, address=sender_public_key) tx", "get_block_from_db, get_wallet_from_db, read_header_list_from_db from utils.utils import compress, decompress, dhash from", "data=data, start=start) @app.get(\"/explorer\") def explorer(): log_ip(request, inspect.stack()[0][3]) prev = int(request.query.prev", "= {} d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp d[\"data\"] =", "+ str(peer)) Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start() def start_mining_thread(): time.sleep(5)", "ID, try again.\" # message_type = \"danger\" # return template(\"wallet.html\",", "@app.get(\"/chains\") def visualize_chain(): log_ip(request, inspect.stack()[0][3]) data = [] start =", "\":\" + str(peer[\"port\"]) def greet_peer(peer: Dict[str, Any]) -> bool: try:", "receiver_public_key: logger.debug(\"Someone trying to send money to himself\") response.status =", "@app.post(\"/transactionHistory\") def transaction_history(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key =", "peers send_to_all_peers(\"/newblock\", request_data) # TODO Make new chain/ orphan set", "= {\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\" return json.dumps(data)", "str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s", "dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if hashed == dk:", ">= total_amount: # result = send_bounty(receivers, amounts) # if result:", "range(prev * 8, (prev + 1) * 8) if i", "List\") except Exception as e: logger.debug(\"Server: Greet Error: \" +", "str(peer[\"port\"]) def greet_peer(peer: Dict[str, Any]) -> bool: try: url =", "Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to Send Transaction\") try: r", "in range(prev * 8, (prev + 1) * 8) if", "total_amount = sum(amounts) i = 0 for so, utxo_list in", "template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\") def mining(): log_ip(request, inspect.stack()[0][3]) password", "sig=\"\") i += 1 for i, address in enumerate(receiver_public_keys): vout[i]", "200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for hdr in hdr_list:", "check_balance(MY_WALLET.public_key) >= sum(amounts): result = send_bounty([pubkey], amounts) if result: message", "except Exception as e: # logger.error(e) # message = \"Some", "except Exception as e: logger.error(\"Wallet: Could not Send Transaction. Try", "serve_static(filename): log_ip(request, inspect.stack()[0][3]) return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def get_favicon(): log_ip(request,", "BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data: bytes) -> str:", "try: url = get_peer_url(peer) data = {\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION,", "( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \" (\" + str(hdr.timestamp)", "0 headers = [] hdr_list = BLOCKCHAIN.active_chain.header_list if len(hdr_list) >", "return template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash): log_ip(request,", "Block Height\") return False return True except Exception as e:", "* 8) if i < len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for", "= 400 logger.error(\"Wallet: Could not Send Transaction. Invalid transaction\") return", "compress, decompress, dhash from wallet import Wallet app = Bottle()", "BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received, Adding to Mempool\") BLOCKCHAIN.mempool.add(tx)", "True, \"Done\" # Transactions for all active chains @app.post(\"/newtransaction\") def", "Requests: Error while sending data in process\" + str(peer)) Process(target=request_task,", "def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r", "logger.info(\"Wallet: Attempting to Send Transaction\") try: r = requests.post( \"http://0.0.0.0:\"", "i += 1 for i, address in enumerate(receiver_public_keys): vout[i] =", "import Authority from utils.logger import logger, iplogger from utils.storage import", "ADD_ENTRY = True for entry in PEER_LIST: ip = entry[\"ip\"]", "response.status = 400 logger.error(\"Wallet: Could not Send Transaction. Invalid transaction\")", "from wsgi_lineprof.middleware import LineProfilerMiddleware with open(\"lineprof\" + str(consts.MINER_SERVER_PORT) + \".log\",", "200 else: response.status = 400 return message question = '''What", "daemon=True).start() def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def fetch_peer_list() ->", "checkingbalance(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"] logger.debug(public_key)", "Invalid transaction\") return \"Try Again\" except Exception as e: response.status", "def transaction_history(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"]", "r.status_code == 400: logger.info(\"Wallet: Could not Send Transaction. Invalid Transaction\")", "exists, doing nothing\") return \"Block already Received Before\" if BLOCKCHAIN.add_block(block):", "data = request.json bounty = int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"] sender_public_key", "sender_public_key == receiver_public_key: logger.debug(\"Someone trying to send money to himself\")", "it to be mined!\" valid_ids.remove(uuid) else: message = \"Some Error", "data = {\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\" return", ": Called function {fname}\") @app.post(\"/checkBalance\") def checkingbalance(): log_ip(request, inspect.stack()[0][3]) data", "please wait for it to be mined!\" # else: #", "header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync with all my peers", "inspect.stack()[0][3]) # number = int(request.forms.get(\"number\")) # message = \"\" #", "message = \"Some Error Occured, Contact Admin.\" # message_type =", "= list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def", "= get_ip(request) iplogger.info(f\"{client_ip} : Called function {fname}\") @app.post(\"/checkBalance\") def checkingbalance():", "Mempool if tx not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction", "else: logger.error(\"ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK\") return \"Invalid", "import Pool, Process from threading import Thread, Timer from typing", "inspect.stack()[0][3]) balance = check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist,", "True except Exception as e: logger.error(\"Wallet: Could not Send Transaction.", "break if tx_out.address == sender_public_key: current_amount += tx_out.amount vin[i] =", "BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received, Adding to Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast", "return \"Invalid Block Received\" # Kill Miner t = Timer(1,", "= \"Your reward is being sent, please wait for it", "def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message=\"\") -> Transaction: vout", "Received Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a New Valid Block,", "name=\"Miner\", daemon=True).start() def fetch_peer_list() -> List[Dict[str, Any]]: try: r =", "+ str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start() def check_balance(pub_key: str) ->", "= decompress(request_data) if transaction_json: try: tx = Transaction.from_json(transaction_json).object() # Add", "= request.json transaction = Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction)", "def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block: r = requests.post(get_peer_url(peer)", "False def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block: r =", "import hashlib import inspect import requests import waitress from bottle", "= {} peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr peer[\"time\"] =", "= data[\"public_key\"] logger.debug(public_key) current_balance = check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\") def", "# amounts.append(bounty) # if check_balance(MY_WALLET.public_key) >= total_amount: # result =", "from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db from utils.utils import compress,", "BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try: peer", "+= \"<td>\" + str(hdr.prev_block_hash) + \"</td></tr>\" html += \"<tr><th>\" +", "def process_new_transaction(request_data: bytes) -> str: global BLOCKCHAIN transaction_json = decompress(request_data)", "inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def sendinfo(): log_ip(request, inspect.stack()[0][3]) s", "to Mempool\") return False, \"Not Valid Transaction\" else: return True,", "BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\" return json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash: str)", "prev < 0: prev = 0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes", "= int(request.forms.get(\"amount\" + str(i))) # publickey = \"\" # if", "@app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505) def error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3]) return", "logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot send to myself\")", "if result: message = \"Your reward is being sent, please", "New Transaction: Invalid tx received: \" + str(e)) return False,", "return peer_list except Exception as e: logger.error(\"Could not connect to", "str(e)) pass data = {\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type =", "if \"message\" in data: message = data[\"message\"] if len(receiver_public_key) <", "if result: response.status = 200 else: response.status = 400 return", "sent, please wait for it to be mined!\" # else:", "\" + str(e)) return template(\"error.html\") return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\")", "str(not consts.NO_MINING)) return \"Mining Toggled, \" + \"NOT MINING\" if", "message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance(): log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\",", "return cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\")", "logger.debug(\"Cannot send to myself\") else: transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key,", "return s def render_block_header(hdr): html = \"<table>\" html += \"<tr><th>\"", "greet_peer(peer: Dict[str, Any]) -> bool: try: url = get_peer_url(peer) data", "import json import time from functools import lru_cache from multiprocessing", "received_new_block(): log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data: bytes) ->", "ID!\" message_type = \"danger\" return template(\"index.html\", message=message, message_type=message_type, question=question) @app.get('/about')", "not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received, Adding to", "template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # else: # publickey = receiver", "BLOCKCHAIN = BlockChain() PEER_LIST: List[Dict[str, Any]] = [] MY_WALLET =", "inspect import requests import waitress from bottle import BaseTemplate, Bottle,", "the peer data in the peer list with the new", "= requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\": hhash}) result = json.loads(r.text) if", "Received\") return \"Invalid Block\" @app.post(\"/newblock\") def received_new_block(): log_ip(request, inspect.stack()[0][3]) return", "host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) except KeyboardInterrupt:", "@app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3]) try: block =", "def get_peer_url(peer: Dict[str, Any]) -> str: return \"http://\" + str(peer[\"ip\"])", "Transaction\") response.status = 400 return \"Insufficient Balance to make Transaction,", "\"Insufficient Balance to make Transaction, need more \" + str(bounty", "request.json transaction = Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet:", "Exception as e: response.status = 400 logger.error(\"Wallet: Could not Send", "tx_out = utxo_list[0] if tx_out.address == pub_key: current_balance += int(tx_out.amount)", "receiver_public_keys: if len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key Length\") return", "vout=vout, message=message) return tx def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\")", "= request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\") def", "+ \"</th>\" html += \"<td>\" + str(len(block.transactions)) + \"</td></tr>\" #", "template(\"error.html\") if __name__ == \"__main__\": try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting", "requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1)) except Exception as e:", "@app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename): log_ip(request, inspect.stack()[0][3]) return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\")", "\"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\" return json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash:", "wallet(): # log_ip(request, inspect.stack()[0][3]) # return template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key)", "{} data[\"send_this\"] = transaction.to_json() transaction.vin = {} data[\"sign_this\"] = transaction.to_json()", "# message = \"Some Error Occured, Contact Admin.\" # message_type", "return int(current_balance) def send_bounty(receiver_public_keys: List[str], amounts: List[int]): current_balance = check_balance(MY_WALLET.public_key)", "inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data: bytes) -> str: global", "\"__main__\": try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New Chain from Genesis\")", "transaction.to_json() transaction.vin = {} data[\"sign_this\"] = transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\")", "\"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # else: #", "compress(db_block) else: logger.error(\"ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK\") return", "str(e)) return \"Try Again\" else: logger.info(\"Wallet: Transaction Sent, Wait for", "transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() tx =", "+ \":\" + str(peer[\"port\"]) def greet_peer(peer: Dict[str, Any]) -> bool:", "def visualize_chain(): log_ip(request, inspect.stack()[0][3]) data = [] start = BLOCKCHAIN.active_chain.length", "= sum(amounts) i = 0 for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():", "def send_to_all_peers(url, data): def request_task(peers, url, data): for peer in", "400 return \"Invalid Receiver Public Key\" current_balance = check_balance(sender_public_key) if", "result = send_bounty([pubkey], amounts) if result: message = \"Your reward", "tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i += 1 for", "hdr in hdr_list: d = {} d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"]", "0: prev = 0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i", "= time.time() peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY =", "send money to himself\") response.status = 400 return \"Cannot send", "send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\")) hash_list = [] for", "b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if hashed ==", "json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try: peer = {}", "inspect.stack()[0][3]) data = request.json transaction = Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"]", "str) -> str: if headerhash: db_block = get_block_from_db(headerhash) if db_block:", "Adding to Chain\") logger.debug(\"Server: Sending new block to peers\") #", "message=message, message_type=message_type, question=question) with open('uuids.json', 'r') as file: uuid_json =", "def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def fetch_peer_list() -> List[Dict[str,", "Bottle, request, response, static_file, template, error import utils.constants as consts", "current_balance < total_amount: logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot", "publickey = wallet[1] # else: # message = \"Error with", "Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode: Not Mining\") # Start server", "message_type = \"info\" # try: # receivers = [] #", "= int(request.forms.get(\"number\")) # message = \"\" # message_type = \"info\"", "str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s def render_block_header(hdr): html = \"<table>\" html", "joined, Adding to List\") except Exception as e: logger.debug(\"Server: Greet", "\"info\" # try: # receivers = [] # amounts =", "mining Thread Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode: Not Mining\") #", "# receivers.append(publickey) # amounts.append(bounty) # if check_balance(MY_WALLET.public_key) >= total_amount: #", "password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if hashed == dk: consts.NO_MINING = not", "= \"Some Error Occured, Contact Admin.\" message_type = \"warning\" else:", "e: # logger.error(e) # message = \"Some Error Occured. Please", "BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\": fork_height}) hash_list =", "logger.error(\"Wallet: Could not Send Transaction. Try Again.\" + str(e)) return", "= 400 return message question = '''What is greater than", "( \"No. of Blocks: \" + str(BLOCKCHAIN.active_chain.length) + \"<br>\" +", "to Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast block to other peers send_to_all_peers(\"/newtransaction\",", "Transaction\") else: logger.info(\"Wallet: Transaction Sent, Wait for it to be", "str: if headerhash: db_block = get_block_from_db(headerhash) if db_block: return compress(db_block)", "# message_type = \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)", "= request.json bounty = int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"] sender_public_key =", "\"<td>\" + str(hdr.prev_block_hash) + \"</td></tr>\" html += \"<tr><th>\" + \"Merkle", "= str(request.forms.get(\"port\" + str(i))) # bounty = int(request.forms.get(\"amount\" + str(i)))", "send_to_all_peers(\"/newblock\", request_data) # TODO Make new chain/ orphan set for", "template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance(): log_ip(request, inspect.stack()[0][3]) return", "Any]) -> bool: try: url = get_peer_url(peer) data = {\"port\":", "mining(): log_ip(request, inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk", "not added to Mempool\") return False, \"Not Valid Transaction\" else:", "for peer in PEER_LIST: if greet_peer(peer): new_peer_list.append(peer) PEER_LIST = new_peer_list", "response.status = 400 logger.error(\"Wallet: Could not Send Transaction. Try Again.\"", "= \"You have Insufficient Balance!\" # message_type = \"warning\" #", "else: # message = \"You have Insufficient Balance!\" # message_type", "sender_public_key = data[\"sender_public_key\"] message = \"No Message\" if \"message\" in", "Transaction\" else: return True, \"Transaction Already received\" except Exception as", "account(pubkey): log_ip(request, inspect.stack()[0][3]) balance = check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return", "json.loads(r.text) if result: return True return False def get_block_header_hash(height): return", "return False, \"Not Valid Transaction\" else: return True, \"Transaction Already", "if current_amount >= total_amount: break if tx_out.address == sender_public_key: current_amount", "with open(\"lineprof\" + str(consts.MINER_SERVER_PORT) + \".log\", \"w\") as f: app", "json.loads(decompress(r.text.encode())) for hhash in hash_list: block = receive_block_from_peer(max_peer, hhash) if", "request.json bounty = int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"]", "\"NOT MINING\" if consts.NO_MINING else \"MINING\" else: return \"Password Mismatch,\"", "// 2) def send_to_all_peers(url, data): def request_task(peers, url, data): for", "consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def send_to_all_peers(url, data):", "\"Timestamp\" + \"</th>\" html += ( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\"))", "render_block_header(hdr): html = \"<table>\" html += \"<tr><th>\" + \"Height\" +", "message_type=message_type, pubkey=MY_WALLET.public_key) # else: # publickey = receiver # total_amount", "bytes) -> str: global BLOCKCHAIN block_json = decompress(request_data) if block_json:", "as e: logger.error(\"Server: New Block: invalid block received \" +", "logger.error(\"ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK\") return \"Invalid Hash\"", "\"Mining Toggled, \" + \"NOT MINING\" if consts.NO_MINING else \"MINING\"", "= fetch_peer_list() new_peer_list = [] for peer in PEER_LIST: if", "= \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def", "str(consts.MINER_SERVER_PORT) + \".log\", \"w\") as f: app = LineProfilerMiddleware(app, stream=f,", "int(tx_out.amount) return int(current_balance) def send_bounty(receiver_public_keys: List[str], amounts: List[int]): current_balance =", "Called function {fname}\") @app.post(\"/checkBalance\") def checkingbalance(): log_ip(request, inspect.stack()[0][3]) data =", "template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # except Exception as e: #", "Could not greet peer\" + str(e)) return False def receive_block_from_peer(peer:", "+= \"<tr><th>Transaction \" + str(i) + \"</th><td>\" + str(s) +", "= BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\": fork_height}) hash_list", "tx_out.address == pub_key: current_balance += int(tx_out.amount) return int(current_balance) def send_bounty(receiver_public_keys:", "return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # except Exception as e:", "enumerate(block.transactions): # s = \"coinbase: \" + str(transaction.is_coinbase) + \",", "app = Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING = False BLOCKCHAIN", "def serve_static(filename): log_ip(request, inspect.stack()[0][3]) return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def get_favicon():", "def about(): return template(\"about.html\") # @app.get(\"/wallet\") # def wallet(): #", "inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key)", "message_type = \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) #", "data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code == 400: logger.info(\"Wallet: Could", "myself\") else: transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority: Faucet Money\")", "html += \"<td>\" + str(hdr.height) + \"</td></tr>\" html += \"<tr><th>\"", "utils.constants as consts from core import Block, BlockChain, SingleOutput, Transaction,", "request.forms.get(\"pubkey\") amounts = [300] if uuid in valid_ids: logger.debug(\"Valid Answer,", "+= bounty # receivers.append(publickey) # amounts.append(bounty) # if check_balance(MY_WALLET.public_key) >=", "sync_with_peers() # Start mining Thread Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode:", "TxOut, genesis_block from authority import Authority from utils.logger import logger,", "1), ) if r.status_code == 400: logger.info(\"Wallet: Could not Send", "logger.info(\"Server: Received block exists, doing nothing\") return \"Block already Received", "be mined!\" valid_ids.remove(uuid) else: message = \"Some Error Occured, Contact", "if i < len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in", "for it to be mined!\" valid_ids.remove(uuid) else: message = \"Some", "import BaseTemplate, Bottle, request, response, static_file, template, error import utils.constants", "send money to youself\" else: transaction = create_transaction([receiver_public_key], [bounty], sender_public_key,", "blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes] transactions = list(BLOCKCHAIN.mempool)", "total_amount: break if tx_out.address == sender_public_key: current_amount += tx_out.amount vin[i]", "from functools import lru_cache from multiprocessing import Pool, Process from", "= [300] if uuid in valid_ids: logger.debug(\"Valid Answer, Rewarding \"", "get block block = Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\" + \"Transactions\"", "+ 1] = TxOut(amount=change, address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()),", "str(request.forms.get(\"port\" + str(i))) # bounty = int(request.forms.get(\"amount\" + str(i))) #", "# receivers = [] # amounts = [] # total_amount", "import get_block_from_db, get_wallet_from_db, read_header_list_from_db from utils.utils import compress, decompress, dhash", "\" + pubkey) message = \"Well Done!\" if check_balance(MY_WALLET.public_key) >=", "\"</th>\" html += \"<td>\" + str(hdr.merkle_root) + \"</td></tr>\" html +=", "data = {\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} # Send", "False def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length", "vout[i + 1] = TxOut(amount=change, address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION, locktime=0,", "+ str(e)) return \"Try Again\" else: logger.info(\"Wallet: Transaction Sent, Wait", "False def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message=\"\") -> Transaction:", "1)) except Exception as e: logger.debug(\"Server: Requests: Error while sending", "= create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting", "# return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # except Exception as", "GETBLOCK FOR NON EXISTENT BLOCK\") return \"Invalid Hash\" @app.post(\"/getblock\") def", "peer[\"port\"]: ADD_ENTRY = False if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet, A", "not valid, not added to Mempool\") return False, \"Not Valid", "except Exception as e: logger.error(\"Sync: Error: \" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD", "for it to be mined!\" # else: # message =", "* 8, (prev + 1) * 8) if i <", "return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def sendinfo(): log_ip(request, inspect.stack()[0][3]) s =", "logger.info(\"FullNode: Not Mining\") # Start server if LINE_PROFILING: from wsgi_lineprof.middleware", "for key in receiver_public_keys: if len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public", "+ str(e)) return False def receive_block_from_peer(peer: Dict[str, Any], header_hash) ->", "valid_ids = set(json.loads(uuid_json)) @app.post(\"/\") def puzzle(): log_ip(request, inspect.stack()[0][3]) message =", "@app.get(\"/info\") def sendinfo(): log_ip(request, inspect.stack()[0][3]) s = ( \"No. of", "consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\" return json.dumps(data) @lru_cache(maxsize=128) def", "\"Height\" + \"</th>\" html += \"<td>\" + str(hdr.height) + \"</td></tr>\"", "= hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if hashed == dk: consts.NO_MINING", "hashed == dk: consts.NO_MINING = not consts.NO_MINING logger.info(\"Mining: \" +", "\"<tr><th>\" + \"Merkle Root\" + \"</th>\" html += \"<td>\" +", "if hashed == dk: consts.NO_MINING = not consts.NO_MINING logger.info(\"Mining: \"", "try: block = Block.from_json(get_block_from_db(blockhash)).object() tx = None for t in", "= requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text) return peer_list except", "headerhash: db_block = get_block_from_db(headerhash) if db_block: return compress(db_block) else: logger.error(\"ERROR", "Received a New Valid Block, Adding to Chain\") logger.debug(\"Server: Sending", "message = \"Error with the Receiver Port ID, try again.\"", "Occured. Please try again later.\" # message_type = \"danger\" #", "else \"MINING\" else: return \"Password Mismatch,\" + \"NOT MINING\" if", "# Broadcast block to other peers send_to_all_peers(\"/newblock\", request_data) # TODO", "get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block exists, doing nothing\") return \"Block already", "data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request,", "return json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash: str) -> str: if headerhash:", "password = request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"),", "so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if current_amount >=", "\"Invalid Hash\" @app.post(\"/getblock\") def getblock(): log_ip(request, inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\")", "wait for it to be mined!\" valid_ids.remove(uuid) else: message =", "\"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" + \"Balance \" + str(check_balance(MY_WALLET.public_key))", "# if len(receiver) < 10: # wallet = get_wallet_from_db(receiver) #", "in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received, Adding to Mempool\")", "== dk: consts.NO_MINING = not consts.NO_MINING logger.info(\"Mining: \" + str(not", "Sent, Wait for it to be Mined\") return \"Done\" @app.post(\"/transactionHistory\")", "transactions = list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\")", "the peer r = requests.post(url + \"/greetpeer\", data=data) data =", "if len(hdr_list) > 200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for", "Send Transaction. Invalid transaction\") return \"Try Again\" except Exception as", "+ dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" + \"Balance \" + str(check_balance(MY_WALLET.public_key)) +", "\"info\" return template(\"index.html\", message=message, message_type=message_type, question=question) with open('uuids.json', 'r') as", "Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash): r = requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\":", "Chain\") header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync with all my", "Any]] = [] MY_WALLET = Wallet() miner = Authority() def", "+ str(i))) # publickey = \"\" # if len(receiver) <", "getblock(): log_ip(request, inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\") def", "json import time from functools import lru_cache from multiprocessing import", "# get block block = Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\" +", "TxOut(amount=amounts[i], address=address) change = (current_amount - total_amount) if change >", "data=data, timeout=(5, 1)) except Exception as e: logger.debug(\"Server: Requests: Error", "{} d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp d[\"data\"] = render_block_header(hdr)", "balance\") elif MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot send to myself\") else:", "False, \"Not Valid Transaction\" else: return True, \"Transaction Already received\"", "return tx def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def log_ip(request,", "peer in PEER_LIST: if greet_peer(peer): new_peer_list.append(peer) PEER_LIST = new_peer_list if", "== pub_key: current_balance += int(tx_out.amount) return int(current_balance) def send_bounty(receiver_public_keys: List[str],", "with {get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}\") sync(max_peer) except", "make Transaction\") response.status = 400 return \"Insufficient Balance to make", "received, Adding to Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast block to other", "Could not Send Transaction. Try Again.\" + str(e)) return False", "root=\"static\") @app.get(\"/info\") def sendinfo(): log_ip(request, inspect.stack()[0][3]) s = ( \"No.", "uuid in valid_ids: logger.debug(\"Valid Answer, Rewarding \" + pubkey) message", "wallet_post(): # log_ip(request, inspect.stack()[0][3]) # number = int(request.forms.get(\"number\")) # message", "message = \"Some Error Occured, Contact Admin.\" message_type = \"warning\"", "# message = \"Your transaction is sent, please wait for", "current_balance = check_balance(sender_public_key) if current_balance < bounty: logger.debug(\"Insufficient Balance to", "pubkey=pubkey) @app.post(\"/mining\") def mining(): log_ip(request, inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\") hashed", "indexes] transactions = list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\",", "else: transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message) data = {}", "not Send Transaction. Invalid transaction\") return \"Try Again\" except Exception", "inspect.stack()[0][3]) data = [] start = BLOCKCHAIN.active_chain.length - 10 if", "logger.info(\"Mining: \" + str(not consts.NO_MINING)) return \"Mining Toggled, \" +", "request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000)", "actual_answer = \"nothing\" @app.get(\"/\") def home(): log_ip(request, inspect.stack()[0][3]) message =", "= get_wallet_from_db(receiver) # if wallet is not None: # publickey", "read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync with all my peers sync_with_peers() #", "utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if tx_out.address == pub_key:", "dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp d[\"data\"] = render_block_header(hdr) headers.append(d) data.append(headers) return", "lru_cache from multiprocessing import Pool, Process from threading import Thread,", "# if check_balance(MY_WALLET.public_key) >= total_amount: # result = send_bounty(receivers, amounts)", "received: \" + str(e)) return False, \"Not Valid Transaction\" return", "BlockChain() PEER_LIST: List[Dict[str, Any]] = [] MY_WALLET = Wallet() miner", "= set(json.loads(uuid_json)) @app.post(\"/\") def puzzle(): log_ip(request, inspect.stack()[0][3]) message = \"\"", "check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\") def make_transaction(): log_ip(request, inspect.stack()[0][3]) data =", "Transaction\") try: r = requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) + \"/newtransaction\",", "return \"Invalid Receiver Public Key\" current_balance = check_balance(sender_public_key) if current_balance", "+ BLOCKCHAIN.active_chain.header_list[-100:] for hdr in hdr_list: d = {} d[\"hash\"]", "consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505) def error_handle(url=\"url\", error=\"404\"):", "# message_type = \"info\" # try: # receivers = []", "log_ip(request, inspect.stack()[0][3]) prev = int(request.query.prev or 0) if prev <", "Receiver Public Key\") response.status = 400 return \"Invalid Receiver Public", "Please try again later.\" # message_type = \"danger\" # return", "e: logger.error(\"Could not connect to DNS Seed\") return [] def", "message_type = \"warning\" else: message = \"Invalid Unique ID!\" message_type", "send_to_all_peers(url, data): def request_task(peers, url, data): for peer in peers:", "again later.\" # message_type = \"danger\" # return template(\"wallet.html\", message=message,", "template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash): log_ip(request, inspect.stack()[0][3])", "already Received Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a New Valid", "else: return \"Password Mismatch,\" + \"NOT MINING\" if consts.NO_MINING else", "time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def fetch_peer_list() -> List[Dict[str, Any]]: try:", "# if result: # message = \"Your transaction is sent,", "key in receiver_public_keys: if len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key", "\"Your transaction is sent, please wait for it to be", "consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block) else: #", "str(e)) return template(\"error.html\") return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash,", "open(\"lineprof\" + str(consts.MINER_SERVER_PORT) + \".log\", \"w\") as f: app =", "# return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance(): log_ip(request,", "logger.info(\"FullNode: Starting New Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block) else: # Restore", "\"Password Mismatch,\" + \"NOT MINING\" if consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\")", "as e: logger.error(\"Server: New Transaction: Invalid tx received: \" +", "nothing\") return \"Block already Received Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received", "\" + str(e)) return \"Invalid Block Received\" # Kill Miner", "valid_ids.remove(uuid) else: message = \"Some Error Occured, Contact Admin.\" message_type", "= \"\" message_type = \"info\" uuid = request.forms.get(\"uuid\") pubkey =", "@app.get(\"/checkmybalance\") def checkblance(): log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def", "consts.NO_MINING)) return \"Mining Toggled, \" + \"NOT MINING\" if consts.NO_MINING", "request.json public_key = data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\")", "+ \"<br>Public Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s def", "all active chains @app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result, message", "response.status = 200 else: response.status = 400 return message question", "bottle import BaseTemplate, Bottle, request, response, static_file, template, error import", "Wallet() miner = Authority() def mining_thread_task(): while True: if not", "balance = check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist, balance=balance,", "Wallet app = Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING = False", "public_key = data[\"public_key\"] logger.debug(public_key) current_balance = check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\")", "total_amount) if change > 0: vout[i + 1] = TxOut(amount=change,", "\"Some Error Occured, Contact Admin.\" message_type = \"warning\" else: message", "Could not Send Transaction. Try Again.\" + str(e)) return \"Try", "0: vout[i + 1] = TxOut(amount=change, address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION,", "= \"info\" # try: # receivers = [] # amounts", "+ \"</td></tr>\" # for i, transaction in enumerate(block.transactions): # s", "is sent, please wait for it to be mined!\" #", "+= \"<td>\" + str(hdr.merkle_root) + \"</td></tr>\" html += \"<tr><th>\" +", "< bounty: logger.debug(\"Insufficient Balance to make Transaction\") response.status = 400", "400 logger.error(\"Wallet: Could not Send Transaction. Invalid transaction\") return \"Try", "+ \"</th>\" html += \"<td>\" + str(hdr.merkle_root) + \"</td></tr>\" html", "publickey = receiver # total_amount += bounty # receivers.append(publickey) #", "return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash): r = requests.post(get_peer_url(peer) + \"/checkblock\",", "Transaction received, Adding to Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast block to", "= \"Some Error Occured, Contact Admin.\" # message_type = \"warning\"", "total_amount: logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot send to", "+ \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code == 400:", "publickey = \"\" # if len(receiver) < 10: # wallet", "else: return True, \"Transaction Already received\" except Exception as e:", "return \"Block Received\" logger.error(\"Server: Invalid Block Received\") return \"Invalid Block\"", "@app.get(\"/explorer\") def explorer(): log_ip(request, inspect.stack()[0][3]) prev = int(request.query.prev or 0)", "logger.debug(\"Server: Greet, A new peer joined, Adding to List\") except", "= request.remote_addr peer[\"time\"] = time.time() peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"] =", "Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to Send Transaction\") try: r =", "def block(blockhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() except Exception", "check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\")", "- current_balance) elif sender_public_key == receiver_public_key: logger.debug(\"Someone trying to send", "\" + str(bounty - current_balance) elif sender_public_key == receiver_public_key: logger.debug(\"Someone", "logger.debug(public_key) current_balance = check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\") def make_transaction(): log_ip(request,", "peers send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The transation is not valid, not", "return True return False def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer):", "Received\" # Kill Miner t = Timer(1, miner.stop_mining) t.start() return", "html += \"<tr><th>\" + \"Timestamp\" + \"</th>\" html += (", "request_task(peers, url, data): for peer in peers: try: requests.post(get_peer_url(peer) +", "400 return \"Cannot send money to youself\" else: transaction =", "be Mined\") return \"Done\" @app.post(\"/transactionHistory\") def transaction_history(): log_ip(request, inspect.stack()[0][3]) data", "if prev < 0: prev = 0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list))", "Block\" @app.post(\"/newblock\") def received_new_block(): log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16) def", "message = data[\"message\"] if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public", "it to be Mined\") return \"Done\" @app.post(\"/transactionHistory\") def transaction_history(): log_ip(request,", "\" + str(i) + \"</th><td>\" + str(s) + \"</td></tr>\" html", "Timer from typing import Any, Dict, List from datetime import", "received\" except Exception as e: logger.error(\"Server: New Transaction: Invalid tx", "inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() tx = None for t", "again.\" # message_type = \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type,", "< total_amount: logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot send", "Hash\" + \"</th>\" html += \"<td>\" + dhash(hdr) + \"</td></tr>\"", "fname): client_ip = get_ip(request) iplogger.info(f\"{client_ip} : Called function {fname}\") @app.post(\"/checkBalance\")", "in receiver_public_keys: logger.debug(\"Cannot send to myself\") else: transaction = create_transaction(receiver_public_keys,", "\"\" message_type = \"info\" return template(\"index.html\", message=message, message_type=message_type, question=question) with", "template(\"about.html\") # @app.get(\"/wallet\") # def wallet(): # log_ip(request, inspect.stack()[0][3]) #", "core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block from", "create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message=\"\") -> Transaction: vout =", "amounts.append(bounty) # if check_balance(MY_WALLET.public_key) >= total_amount: # result = send_bounty(receivers,", "Greet Error: \" + str(e)) pass data = {\"version\": consts.MINER_VERSION,", "die?''' actual_answer = \"nothing\" @app.get(\"/\") def home(): log_ip(request, inspect.stack()[0][3]) message", "iplogger from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db from utils.utils import", "try: PEER_LIST = fetch_peer_list() new_peer_list = [] for peer in", "+ \"Merkle Root\" + \"</th>\" html += \"<td>\" + str(hdr.merkle_root)", "if tx_out.address == pub_key: current_balance += int(tx_out.amount) return int(current_balance) def", "else: message = \"Invalid Unique ID!\" message_type = \"danger\" return", "about(): return template(\"about.html\") # @app.get(\"/wallet\") # def wallet(): # log_ip(request,", "message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # except Exception as e: # logger.error(e)", "return # Periodically sync with all the peers def sync_with_peers():", "transaction in enumerate(block.transactions): # s = \"coinbase: \" + str(transaction.is_coinbase)", "\" + str(transaction.is_coinbase) + \", fees: \" + str(transaction.fees) #", "except Exception as e: logger.error(\"Could not connect to DNS Seed\")", "str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code ==", "greater than God, more evil than the devil, the poor", "# if wallet is not None: # publickey = wallet[1]", "name=\"transaction\") def block(blockhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() except", "new block to peers\") # Broadcast block to other peers", "Error Occured, Contact Admin.\" # message_type = \"warning\" # else:", "+ \"Block Hash\" + \"</th>\" html += \"<td>\" + dhash(hdr)", "transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting to Send Transaction\") try: r =", "peers: try: requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1)) except Exception", "transaction_history(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"] tx_hist", "Invalid Transaction\") else: logger.info(\"Wallet: Transaction Sent, Wait for it to", "str(e)) return False def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message=\"\")", "str(e)) return \"Invalid Block Received\" # Kill Miner t =", "inspect.stack()[0][3]) message = \"\" message_type = \"info\" return template(\"index.html\", message=message,", "error=\"404\"): log_ip(request, inspect.stack()[0][3]) return template(\"error.html\") if __name__ == \"__main__\": try:", "the peers def sync_with_peers(): try: PEER_LIST = fetch_peer_list() new_peer_list =", "if block already exists if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block exists,", "ip == peer[\"ip\"] and port == peer[\"port\"]: ADD_ENTRY = False", "log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data: bytes) -> str:", "message=message, message_type=message_type, question=question) @app.get('/about') def about(): return template(\"about.html\") # @app.get(\"/wallet\")", "\"</td></tr>\" html += \"</table>\" return str(html) @app.get(\"/chains\") def visualize_chain(): log_ip(request,", "str: global BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json: try: tx", "is not added except Exception as e: logger.error(\"Server: New Block:", "Send Transaction. Try Again.\" + str(e)) return \"Try Again\" else:", "BLOCKCHAIN.active_chain.header_list if len(hdr_list) > 200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:]", "message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") # def wallet_post(): # log_ip(request, inspect.stack()[0][3])", "@error(404) @error(505) def error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3]) return template(\"error.html\") if", "and port == peer[\"port\"]: ADD_ENTRY = False if ADD_ENTRY: PEER_LIST.append(peer)", "# Start mining Thread Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode: Not", "log_ip(request, inspect.stack()[0][3]) data = request.json bounty = int(data[\"bounty\"]) receiver_public_key =", "data.get(\"blockheight\", None): peer.update(data) else: logger.debug(\"Main: Peer data does not have", "as e: logger.debug(\"Server: Greet Error: \" + str(e)) pass data", "True: if not miner.is_mining() and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET)", "= dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp d[\"data\"] = render_block_header(hdr) headers.append(d) data.append(headers)", "1) * 8) if i < len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object()", "current_balance) elif sender_public_key == receiver_public_key: logger.debug(\"Someone trying to send money", "import lru_cache from multiprocessing import Pool, Process from threading import", "PEER_LIST.append(peer) logger.debug(\"Server: Greet, A new peer joined, Adding to List\")", "@app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey): log_ip(request, inspect.stack()[0][3]) balance = check_balance(pubkey) tx_hist", "\" + str(e)) return False, \"Not Valid Transaction\" return True,", "for all active chains @app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result,", "app.get_url LINE_PROFILING = False BLOCKCHAIN = BlockChain() PEER_LIST: List[Dict[str, Any]]", "sum(amounts): result = send_bounty([pubkey], amounts) if result: message = \"Your", "def get_favicon(): log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def sendinfo():", "\"coinbase: \" + str(transaction.is_coinbase) + \", fees: \" + str(transaction.fees)", "except Exception as e: logger.debug(\"Main: Could not greet peer\" +", "return False def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height =", "utxo_list[0] if current_amount >= total_amount: break if tx_out.address == sender_public_key:", "= 200 else: response.status = 400 return message question =", "to other peers send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The transation is not", "import compress, decompress, dhash from wallet import Wallet app =", "genesis_block from authority import Authority from utils.logger import logger, iplogger", "mined!\" valid_ids.remove(uuid) else: message = \"Some Error Occured, Contact Admin.\"", "headers = [] hdr_list = BLOCKCHAIN.active_chain.header_list if len(hdr_list) > 200:", "@error(403) @error(404) @error(505) def error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3]) return template(\"error.html\")", "Check if block already exists if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block", "if consts.NO_MINING else \"MINING\" else: return \"Password Mismatch,\" + \"NOT", "[] for i in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16)", "template(\"error.html\") return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash, txhash): log_ip(request,", "evil than the devil, the poor have it, the rich", "TxOut(amount=change, address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message)", "= BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10 else 0", "vout = {} vin = {} current_amount = 0 total_amount", "import inspect import requests import waitress from bottle import BaseTemplate,", "result = json.loads(r.text) if result: return True return False def", "r = requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5,", "for i in range(prev * 8, (prev + 1) *", "if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received, Adding to Mempool\") BLOCKCHAIN.mempool.add(tx) #", "@app.get(\"/\") def home(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type =", "Transaction, TxIn, TxOut, genesis_block from authority import Authority from utils.logger", "Sent, Wait for it to be Mined\") return True except", "BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING = False BLOCKCHAIN = BlockChain() PEER_LIST:", "consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} # Send a POST request to the", "+= 1 for i, address in enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i],", "def render_block_header(hdr): html = \"<table>\" html += \"<tr><th>\" + \"Height\"", "vin = {} current_amount = 0 total_amount = sum(amounts) i", "< 10: # wallet = get_wallet_from_db(receiver) # if wallet is", "+ \"</td></tr>\" html += \"<tr><th>\" + \"Prev Block Hash\" +", "\", fees: \" + str(transaction.fees) # html += \"<tr><th>Transaction \"", "message = \"Your reward is being sent, please wait for", "cached_get_block(headerhash: str) -> str: if headerhash: db_block = get_block_from_db(headerhash) if", ">= sum(amounts): result = send_bounty([pubkey], amounts) if result: message =", "0 for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if", "Any, Dict, List from datetime import datetime import hashlib import", "# log_ip(request, inspect.stack()[0][3]) # number = int(request.forms.get(\"number\")) # message =", "= int(request.forms.get(\"myheight\")) hash_list = [] for i in range(peer_height, BLOCKCHAIN.active_chain.length):", "List[Dict[str, Any]] = [] MY_WALLET = Wallet() miner = Authority()", "r = requests.post(url + \"/greetpeer\", data=data) data = json.loads(r.text) #", "return \"Block already Received Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a", "= data[\"message\"] if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public Key\")", "def transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() tx", "\"application/json\" return json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash: str) -> str: if", "Block received is invalid, Cannot Sync\") break return # Periodically", "List[str], amounts: List[int], sender_public_key, message=\"\") -> Transaction: vout = {}", "= data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f():", "eat it, you'll die?''' actual_answer = \"nothing\" @app.get(\"/\") def home():", "# Periodically sync with all the peers def sync_with_peers(): try:", "\"/getblock\", data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash): r =", "MY_WALLET.public_key, message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to Send Transaction\")", "message=message) return tx def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def", "logger.debug(\"Server: Sending new block to peers\") # Broadcast block to", "= '''What is greater than God, more evil than the", "TxIn, TxOut, genesis_block from authority import Authority from utils.logger import", "json.loads(r.text) # Update the peer data in the peer list", "hashlib import inspect import requests import waitress from bottle import", "if current_balance < bounty: logger.debug(\"Insufficient Balance to make Transaction\") response.status", "BLOCKCHAIN block_json = decompress(request_data) if block_json: try: block = Block.from_json(block_json).object()", "utils.utils import compress, decompress, dhash from wallet import Wallet app", "\"Block Hash\" + \"</th>\" html += \"<td>\" + dhash(hdr) +", "log_ip(request, inspect.stack()[0][3]) result, message = process_new_transaction(request.body.read()) if result: response.status =", "\"danger\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance():", "Genesis\") BLOCKCHAIN.add_block(genesis_block) else: # Restore Blockchain logger.info(\"FullNode: Restoring Existing Chain\")", "if ip == peer[\"ip\"] and port == peer[\"port\"]: ADD_ENTRY =", "port == peer[\"port\"]: ADD_ENTRY = False if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server:", "Error Occured. Please try again later.\" # message_type = \"danger\"", "# number = int(request.forms.get(\"number\")) # message = \"\" # message_type", "return compress(db_block) else: logger.error(\"ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK\")", "= [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes] transactions = list(BLOCKCHAIN.mempool) return", "+ \"</th>\" html += \"<td>\" + str(hdr.height) + \"</td></tr>\" html", "name=\"transaction\") def transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object()", "or request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname): client_ip = get_ip(request) iplogger.info(f\"{client_ip} :", "visualize_chain(): log_ip(request, inspect.stack()[0][3]) data = [] start = BLOCKCHAIN.active_chain.length -", "= render_block_header(hdr) headers.append(d) data.append(headers) return template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\") def", "log_ip(request, inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return json.dumps(True) return", "if len(receiver) < 10: # wallet = get_wallet_from_db(receiver) # if", "it, and if you eat it, you'll die?''' actual_answer =", "def fetch_peer_list() -> List[Dict[str, Any]]: try: r = requests.post(consts.SEED_SERVER_URL, data={\"port\":", "PEER_LIST = new_peer_list if PEER_LIST: max_peer = max(PEER_LIST, key=lambda k:", "False, \"Not Valid Transaction\" return True, \"Done\" # Transactions for", "TODO Make new chain/ orphan set for Block that is", "the rich need it, and if you eat it, you'll", "= create_transaction([receiver_public_key], [bounty], sender_public_key, message=message) data = {} data[\"send_this\"] =", "if __name__ == \"__main__\": try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New", "Hash\" + \"</th>\" html += \"<td>\" + str(hdr.prev_block_hash) + \"</td></tr>\"", "inspect.stack()[0][3]) result, message = process_new_transaction(request.body.read()) if result: response.status = 200", "POST request to the peer r = requests.post(url + \"/greetpeer\",", "Exception as e: logger.error(\"Sync: Error: \" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD *", "= \"<table>\" html += \"<tr><th>\" + \"Height\" + \"</th>\" html", "message = \"You have Insufficient Balance!\" # message_type = \"warning\"", "vout[i] = TxOut(amount=amounts[i], address=address) change = (current_amount - total_amount) if", "json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height =", "global BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json: try: tx =", "inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() except Exception as e: logger.debug(\"BLOCK/blockhash:", "global BLOCKCHAIN block_json = decompress(request_data) if block_json: try: block =", "return True except Exception as e: logger.error(\"Wallet: Could not Send", "for t in block.transactions: if t.hash() == txhash: tx =", "+= ( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \" (\" +", "+ \"Prev Block Hash\" + \"</th>\" html += \"<td>\" +", "10: # wallet = get_wallet_from_db(receiver) # if wallet is not", "else: response.status = 400 return message question = '''What is", "import requests import waitress from bottle import BaseTemplate, Bottle, request,", "= request.json public_key = data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist)", "= {\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} # Send a", "# total_amount = 0 # for i in range(0, number):", "logger.debug(\"Someone trying to send money to himself\") response.status = 400", "received \" + str(e)) return \"Invalid Block Received\" # Kill", "= entry[\"ip\"] port = entry[\"port\"] if ip == peer[\"ip\"] and", "+ str(hdr.merkle_root) + \"</td></tr>\" html += \"<tr><th>\" + \"Timestamp\" +", "= 0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i for i", "Cannot Sync\") break return # Periodically sync with all the", "sendinfo(): log_ip(request, inspect.stack()[0][3]) s = ( \"No. of Blocks: \"", "\"Try Again\" except Exception as e: response.status = 400 logger.error(\"Wallet:", "@app.get(\"/favicon.ico\") def get_favicon(): log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def", "str(s) + \"</td></tr>\" html += \"</table>\" return str(html) @app.get(\"/chains\") def", "else: logger.debug(\"The transation is not valid, not added to Mempool\")", "valid_ids: logger.debug(\"Valid Answer, Rewarding \" + pubkey) message = \"Well", "e: logger.debug(\"Server: Requests: Error while sending data in process\" +", "> 10 else 0 headers = [] hdr_list = BLOCKCHAIN.active_chain.header_list", "transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to Send Transaction\") try: r = requests.post(", "i, transaction in enumerate(block.transactions): # s = \"coinbase: \" +", "to himself\") response.status = 400 return \"Cannot send money to", "- total_amount) if change > 0: vout[i + 1] =", "+ str(e)) return False def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key,", "\"Try Again\" else: logger.info(\"Wallet: Transaction Sent, Wait for it to", "time.time() peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY = True", "= app.get_url LINE_PROFILING = False BLOCKCHAIN = BlockChain() PEER_LIST: List[Dict[str,", "str(e)) return template(\"error.html\") return template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def", "return \"Password Mismatch,\" + \"NOT MINING\" if consts.NO_MINING else \"MINING\"", "\"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505) def error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3])", "you'll die?''' actual_answer = \"nothing\" @app.get(\"/\") def home(): log_ip(request, inspect.stack()[0][3])", "return False def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block: r", "return str(current_balance) @app.post(\"/makeTransaction\") def make_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json", "BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json: try: tx = Transaction.from_json(transaction_json).object()", "logger.debug(\"Server: Greet Error: \" + str(e)) pass data = {\"version\":", "Transaction Sent, Wait for it to be Mined\") return \"Done\"", "r = requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object() def", "elif MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot send to myself\") else: transaction", "= [i for i in range(prev * 8, (prev +", "create_transaction([receiver_public_key], [bounty], sender_public_key, message=message) data = {} data[\"send_this\"] = transaction.to_json()", "hhash}) result = json.loads(r.text) if result: return True return False", "Occured, Contact Admin.\" # message_type = \"warning\" # else: #", "-> Block: r = requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\": header_hash}) return", "@app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try: peer = {} peer[\"port\"]", "receiver # total_amount += bounty # receivers.append(publickey) # amounts.append(bounty) #", "data=data) data = json.loads(r.text) # Update the peer data in", "except Exception as e: logger.debug(\"Server: Requests: Error while sending data", "BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10 else 0 headers", "sync_with_peers).start() def check_balance(pub_key: str) -> int: current_balance = 0 for", "peer[\"time\"] = time.time() peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY", "Root\" + \"</th>\" html += \"<td>\" + str(hdr.merkle_root) + \"</td></tr>\"", "from datetime import datetime import hashlib import inspect import requests", "url, data=data, timeout=(5, 1)) except Exception as e: logger.debug(\"Server: Requests:", "log_ip(request, inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"] logger.debug(public_key) current_balance", "if r.status_code == 400: logger.info(\"Wallet: Could not Send Transaction. Invalid", "to List\") except Exception as e: logger.debug(\"Server: Greet Error: \"", "Again\" except Exception as e: response.status = 400 logger.error(\"Wallet: Could", "if you eat it, you'll die?''' actual_answer = \"nothing\" @app.get(\"/\")", "1] = TxOut(amount=change, address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin,", "Receiver Public Key\" current_balance = check_balance(sender_public_key) if current_balance < bounty:", "peer r = requests.post(url + \"/greetpeer\", data=data) data = json.loads(r.text)", "peer[\"ip\"] = request.remote_addr peer[\"time\"] = time.time() peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"]", "Exception as e: logger.error(\"Server: New Transaction: Invalid tx received: \"", "-> str: global BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json: try:", "json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\")) hash_list", "received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result, message = process_new_transaction(request.body.read()) if result: response.status", "str(bounty - current_balance) elif sender_public_key == receiver_public_key: logger.debug(\"Someone trying to", "== 400: response.status = 400 logger.error(\"Wallet: Could not Send Transaction.", "does not have Block Height\") return False return True except", "# logger.error(e) # message = \"Some Error Occured. Please try", "Block.from_json(block_json).object() # Check if block already exists if get_block_from_db(dhash(block.header)): logger.info(\"Server:", "def check_balance(pub_key: str) -> int: current_balance = 0 for x,", "i in indexes] transactions = list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks, transactions=transactions,", "Send a POST request to the peer r = requests.post(url", "key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)}, he seems to", "for hdr in hdr_list: d = {} d[\"hash\"] = dhash(hdr)[-5:]", "log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() tx = None for", "if change > 0: vout[i + 1] = TxOut(amount=change, address=sender_public_key)", "i in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data:", "int(request.forms.get(\"number\")) # message = \"\" # message_type = \"info\" #", "= Authority() def mining_thread_task(): while True: if not miner.is_mining() and", "block = Block.from_json(get_block_from_db(blockhash)).object() except Exception as e: logger.debug(\"BLOCK/blockhash: \" +", "+= int(tx_out.amount) return int(current_balance) def send_bounty(receiver_public_keys: List[str], amounts: List[int]): current_balance", "\" + \"NOT MINING\" if consts.NO_MINING else \"MINING\" else: return", "Block: invalid block received \" + str(e)) return \"Invalid Block", "\" + str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) )", "def getblock(): log_ip(request, inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\")", "= Transaction.from_json(transaction_json).object() # Add transaction to Mempool if tx not", "\"<td>\" + dhash(hdr) + \"</td></tr>\" html += \"<tr><th>\" + \"Prev", "\"</td></tr>\" html += \"<tr><th>\" + \"Block Hash\" + \"</th>\" html", "not None: # publickey = wallet[1] # else: # message", "send_bounty(receiver_public_keys: List[str], amounts: List[int]): current_balance = check_balance(MY_WALLET.public_key) for key in", "+= \"<tr><th>\" + \"Transactions\" + \"</th>\" html += \"<td>\" +", "get_block_from_db(headerhash) if db_block: return compress(db_block) else: logger.error(\"ERROR CALLED GETBLOCK FOR", "def puzzle(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type = \"info\"", "import datetime import hashlib import inspect import requests import waitress", "MY_WALLET.public_key in receiver_public_keys: logger.debug(\"Cannot send to myself\") else: transaction =", "[Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes] transactions = list(BLOCKCHAIN.mempool) return template(\"explorer.html\",", "to peers\") # Broadcast block to other peers send_to_all_peers(\"/newblock\", request_data)", "Authority() def mining_thread_task(): while True: if not miner.is_mining() and not", "data = json.loads(r.text) # Update the peer data in the", "e: logger.debug(\"Main: Could not greet peer\" + str(e)) return False", "len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key Length\") return False total_amount", "inspect.stack()[0][3]) return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def get_favicon(): log_ip(request, inspect.stack()[0][3]) return", "def mining(): log_ip(request, inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\"", "timeout=(5, 1)) except Exception as e: logger.debug(\"Server: Requests: Error while", "data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code == 400: response.status =", "+ str(not consts.NO_MINING)) return \"Mining Toggled, \" + \"NOT MINING\"", "it, you'll die?''' actual_answer = \"nothing\" @app.get(\"/\") def home(): log_ip(request,", "dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) +", "Transaction Sent, Wait for it to be Mined\") return True", "= \"\" message_type = \"info\" return template(\"index.html\", message=message, message_type=message_type, question=question)", "+ \"</td></tr>\" html += \"<tr><th>\" + \"Block Hash\" + \"</th>\"", "= request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\",", "return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname): client_ip = get_ip(request)", "True return False def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height", "\"Invalid Unique ID!\" message_type = \"danger\" return template(\"index.html\", message=message, message_type=message_type,", "= 400 return \"Insufficient Balance to make Transaction, need more", "if get_block_from_db(headerhash): return json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request,", "Periodically sync with all the peers def sync_with_peers(): try: PEER_LIST", "log_ip(request, inspect.stack()[0][3]) message = \"\" message_type = \"info\" uuid =", "Starting New Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block) else: # Restore Blockchain", "the poor have it, the rich need it, and if", "have it, the rich need it, and if you eat", "to the peer r = requests.post(url + \"/greetpeer\", data=data) data", "check_balance(MY_WALLET.public_key) >= total_amount: # result = send_bounty(receivers, amounts) # if", "log_ip(request, inspect.stack()[0][3]) return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def get_favicon(): log_ip(request, inspect.stack()[0][3])", "= transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction(): log_ip(request, inspect.stack()[0][3]) data", "wallet import Wallet app = Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING", "as e: logger.debug(\"BLOCK/blockhash: \" + str(e)) return template(\"error.html\") return template(\"block.html\",", "data[\"public_key\"] logger.debug(public_key) current_balance = check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\") def make_transaction():", "pubkey=MY_WALLET.public_key) # except Exception as e: # logger.error(e) # message", "youself\" else: transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message) data =", "True, \"Transaction Already received\" except Exception as e: logger.error(\"Server: New", "= request.forms.get(\"pubkey\") amounts = [300] if uuid in valid_ids: logger.debug(\"Valid", "= \"warning\" # else: # message = \"You have Insufficient", "[300] if uuid in valid_ids: logger.debug(\"Valid Answer, Rewarding \" +", "\"</th>\" html += \"<td>\" + str(hdr.prev_block_hash) + \"</td></tr>\" html +=", "= [] for peer in PEER_LIST: if greet_peer(peer): new_peer_list.append(peer) PEER_LIST", "len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes] transactions =", "is not valid, not added to Mempool\") return False, \"Not", "# message = \"Some Error Occured. Please try again later.\"", "+ \"</th>\" html += \"<td>\" + dhash(hdr) + \"</td></tr>\" html", "{max_peer['blockheight']}\") sync(max_peer) except Exception as e: logger.error(\"Sync: Error: \" +", "r.status_code == 400: response.status = 400 logger.error(\"Wallet: Could not Send", "Contact Admin.\" # message_type = \"warning\" # else: # message", "error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3]) return template(\"error.html\") if __name__ == \"__main__\":", "# for i in range(0, number): # receiver = str(request.forms.get(\"port\"", "Dict[str, Any]) -> bool: try: url = get_peer_url(peer) data =", "+ str(transaction.is_coinbase) + \", fees: \" + str(transaction.fees) # html", "message_type=message_type, pubkey=MY_WALLET.public_key) # except Exception as e: # logger.error(e) #", "False BLOCKCHAIN = BlockChain() PEER_LIST: List[Dict[str, Any]] = [] MY_WALLET", "Timer(1, miner.stop_mining) t.start() return \"Block Received\" logger.error(\"Server: Invalid Block Received\")", "if uuid in valid_ids: logger.debug(\"Valid Answer, Rewarding \" + pubkey)", "Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING = False BLOCKCHAIN = BlockChain()", "is invalid, Cannot Sync\") break return # Periodically sync with", "@app.post(\"/\") def puzzle(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type =", "chain/ orphan set for Block that is not added except", "Exception as e: logger.error(\"Wallet: Could not Send Transaction. Try Again.\"", "more evil than the devil, the poor have it, the", "t.hash() == txhash: tx = t except Exception as e:", "r = requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text) return peer_list", "blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash): log_ip(request, inspect.stack()[0][3]) try:", "as f: app = LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16,", "400 logger.error(\"Wallet: Could not Send Transaction. Try Again.\" + str(e))", "@app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object()", "import LineProfilerMiddleware with open(\"lineprof\" + str(consts.MINER_SERVER_PORT) + \".log\", \"w\") as", "\"Merkle Root\" + \"</th>\" html += \"<td>\" + str(hdr.merkle_root) +", "e: logger.error(\"Server: New Transaction: Invalid tx received: \" + str(e))", "in block.transactions: if t.hash() == txhash: tx = t except", "not have Block Height\") return False return True except Exception", "for Block that is not added except Exception as e:", "to Mempool if tx not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid", "Height\") return False return True except Exception as e: logger.debug(\"Main:", "process_new_block(request_data: bytes) -> str: global BLOCKCHAIN block_json = decompress(request_data) if", "json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash: str) -> str: if headerhash: db_block", "inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\",", "Adding to Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast block to other peers", "to have height {max_peer['blockheight']}\") sync(max_peer) except Exception as e: logger.error(\"Sync:", "+ 1) * 8) if i < len(hdr_list)] blocks =", "MINING\" if consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505) def", "BLOCKCHAIN.active_chain.length} # Send a POST request to the peer r", "= request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash", "# Kill Miner t = Timer(1, miner.stop_mining) t.start() return \"Block", "Again.\" + str(e)) return \"Try Again\" else: logger.info(\"Wallet: Transaction Sent,", "True except Exception as e: logger.debug(\"Main: Could not greet peer\"", "and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def", "+ str(len(block.transactions)) + \"</td></tr>\" # for i, transaction in enumerate(block.transactions):", "== txhash: tx = t except Exception as e: logger.debug(\"Transaction/bhash/tx:", "logger.debug(\"Insufficient Balance to make Transaction\") response.status = 400 return \"Insufficient", "db_block = get_block_from_db(headerhash) if db_block: return compress(db_block) else: logger.error(\"ERROR CALLED", "Error: \" + str(e)) pass data = {\"version\": consts.MINER_VERSION, \"blockheight\":", "+= \"<td>\" + dhash(hdr) + \"</td></tr>\" html += \"<tr><th>\" +", "if wallet is not None: # publickey = wallet[1] #", "dhash(hdr) + \"</td></tr>\" html += \"<tr><th>\" + \"Prev Block Hash\"", "return [] def get_peer_url(peer: Dict[str, Any]) -> str: return \"http://\"", "peer_height = int(request.forms.get(\"myheight\")) hash_list = [] for i in range(peer_height,", "return True, \"Transaction Already received\" except Exception as e: logger.error(\"Server:", "= 400 return \"Cannot send money to youself\" else: transaction", "decompress, dhash from wallet import Wallet app = Bottle() BaseTemplate.defaults[\"get_url\"]", "pub_key: current_balance += int(tx_out.amount) return int(current_balance) def send_bounty(receiver_public_keys: List[str], amounts:", "= \"Your transaction is sent, please wait for it to", ") return s def render_block_header(hdr): html = \"<table>\" html +=", "in enumerate(block.transactions): # s = \"coinbase: \" + str(transaction.is_coinbase) +", "BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if tx_out.address == pub_key: current_balance +=", "request_data) # TODO Make new chain/ orphan set for Block", "with open('uuids.json', 'r') as file: uuid_json = file.read() valid_ids =", "pubkey) message = \"Well Done!\" if check_balance(MY_WALLET.public_key) >= sum(amounts): result", "+ str(transaction.fees) # html += \"<tr><th>Transaction \" + str(i) +", "Transaction. Invalid transaction\") return \"Try Again\" except Exception as e:", "@app.post(\"/wallet\") # def wallet_post(): # log_ip(request, inspect.stack()[0][3]) # number =", "transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction(): log_ip(request, inspect.stack()[0][3]) data =", "= json.loads(decompress(r.text.encode())) for hhash in hash_list: block = receive_block_from_peer(max_peer, hhash)", "peers sync_with_peers() # Start mining Thread Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING:", "logger.debug(\"The transation is not valid, not added to Mempool\") return", "inspect.stack()[0][3]) try: peer = {} peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"] =", "transation is not valid, not added to Mempool\") return False,", "Restore Blockchain logger.info(\"FullNode: Restoring Existing Chain\") header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list)", "if consts.NO_MINING: logger.info(\"FullNode: Not Mining\") # Start server if LINE_PROFILING:", "if len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key Length\") return False", "# @app.get(\"/wallet\") # def wallet(): # log_ip(request, inspect.stack()[0][3]) # return", "0 for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if", "Mining\") # Start server if LINE_PROFILING: from wsgi_lineprof.middleware import LineProfilerMiddleware", "hhash) if not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received is invalid, Cannot", "amounts) if result: message = \"Your reward is being sent,", "if result: # message = \"Your transaction is sent, please", "template(\"index.html\", message=message, message_type=message_type, question=question) with open('uuids.json', 'r') as file: uuid_json", "-> str: if headerhash: db_block = get_block_from_db(headerhash) if db_block: return", "= read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync with all my peers sync_with_peers()", "Receiver Port ID, try again.\" # message_type = \"danger\" #", "need it, and if you eat it, you'll die?''' actual_answer", "Attempting to Send Transaction\") try: r = requests.post( \"http://0.0.0.0:\" +", "def account(pubkey): log_ip(request, inspect.stack()[0][3]) balance = check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey)", "for i in indexes] transactions = list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks,", "BLOCK\") return \"Invalid Hash\" @app.post(\"/getblock\") def getblock(): log_ip(request, inspect.stack()[0][3]) hhash", "= file.read() valid_ids = set(json.loads(uuid_json)) @app.post(\"/\") def puzzle(): log_ip(request, inspect.stack()[0][3])", "sent, please wait for it to be mined!\" valid_ids.remove(uuid) else:", "Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting to Send", "block exists, doing nothing\") return \"Block already Received Before\" if", "\"<td>\" + str(hdr.height) + \"</td></tr>\" html += \"<tr><th>\" + \"Block", "\"Block Received\" logger.error(\"Server: Invalid Block Received\") return \"Invalid Block\" @app.post(\"/newblock\")", "render_block_header(hdr) headers.append(d) data.append(headers) return template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\") def explorer():", "transaction_json: try: tx = Transaction.from_json(transaction_json).object() # Add transaction to Mempool", "# Update the peer data in the peer list with", "\"blockheight\": BLOCKCHAIN.active_chain.length} # Send a POST request to the peer", "BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a New Valid Block, Adding to Chain\")", "url, data): for peer in peers: try: requests.post(get_peer_url(peer) + url,", "= \"Error with the Receiver Port ID, try again.\" #", "Dict[str, Any], header_hash) -> Block: r = requests.post(get_peer_url(peer) + \"/getblock\",", "return True except Exception as e: logger.debug(\"Main: Could not greet", "\"MINING\" else: return \"Password Mismatch,\" + \"NOT MINING\" if consts.NO_MINING", "Valid Transaction\" return True, \"Done\" # Transactions for all active", "int(request.forms.get(\"amount\" + str(i))) # publickey = \"\" # if len(receiver)", "[] # total_amount = 0 # for i in range(0,", "s = \"coinbase: \" + str(transaction.is_coinbase) + \", fees: \"", "False total_amount = sum(amounts) if current_balance < total_amount: logger.debug(\"Insuficient balance\")", "get_ip(request) iplogger.info(f\"{client_ip} : Called function {fname}\") @app.post(\"/checkBalance\") def checkingbalance(): log_ip(request,", "as e: logger.debug(\"Transaction/bhash/tx: \" + str(e)) return template(\"error.html\") return template(\"transaction.html\",", "time from functools import lru_cache from multiprocessing import Pool, Process", "sum(amounts) if current_balance < total_amount: logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key in", "tx received: \" + str(e)) return False, \"Not Valid Transaction\"", "for entry in PEER_LIST: ip = entry[\"ip\"] port = entry[\"port\"]", "for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if tx_out.address", "from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block", "= 0 for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0]", "@app.post(\"/sendTransaction\") def send_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json transaction =", "result: message = \"Your reward is being sent, please wait", "= \"Well Done!\" if check_balance(MY_WALLET.public_key) >= sum(amounts): result = send_bounty([pubkey],", "db_block: return compress(db_block) else: logger.error(\"ERROR CALLED GETBLOCK FOR NON EXISTENT", "get_wallet_from_db, read_header_list_from_db from utils.utils import compress, decompress, dhash from wallet", "# message = \"\" # message_type = \"info\" # try:", "total_amount: # result = send_bounty(receivers, amounts) # if result: #", "400: logger.info(\"Wallet: Could not Send Transaction. Invalid Transaction\") else: logger.info(\"Wallet:", "[i for i in range(prev * 8, (prev + 1)", "if result: return True return False def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height])", "str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start() def check_balance(pub_key: str) -> int:", "current_balance = 0 for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out =", "except Exception as e: logger.error(\"Server: New Transaction: Invalid tx received:", "if BLOCKCHAIN.active_chain.length > 10 else 0 headers = [] hdr_list", "wallet is not None: # publickey = wallet[1] # else:", "\"<tr><th>\" + \"Prev Block Hash\" + \"</th>\" html += \"<td>\"", "= check_balance(MY_WALLET.public_key) for key in receiver_public_keys: if len(key) < consts.PUBLIC_KEY_LENGTH:", "Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a New Valid Block, Adding", "block to other peers send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The transation is", "block = Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\" + \"Transactions\" + \"</th>\"", "reward is being sent, please wait for it to be", "from utils.utils import compress, decompress, dhash from wallet import Wallet", "receiver = str(request.forms.get(\"port\" + str(i))) # bounty = int(request.forms.get(\"amount\" +", "log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def sendinfo(): log_ip(request, inspect.stack()[0][3])", "headers.append(d) data.append(headers) return template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\") def explorer(): log_ip(request,", "peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY = True for", "block block = Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\" + \"Transactions\" +", "rich need it, and if you eat it, you'll die?'''", "start=start) @app.get(\"/explorer\") def explorer(): log_ip(request, inspect.stack()[0][3]) prev = int(request.query.prev or", "inspect.stack()[0][3]) prev = int(request.query.prev or 0) if prev < 0:", "check_balance(pub_key: str) -> int: current_balance = 0 for x, utxo_list", "= \"Some Error Occured. Please try again later.\" # message_type", "result, message = process_new_transaction(request.body.read()) if result: response.status = 200 else:", "def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) + \"/getblockhashes\",", "\"\" # if len(receiver) < 10: # wallet = get_wallet_from_db(receiver)", "Sync\") break return # Periodically sync with all the peers", "new peer joined, Adding to List\") except Exception as e:", "greet_peer(peer): new_peer_list.append(peer) PEER_LIST = new_peer_list if PEER_LIST: max_peer = max(PEER_LIST,", "d[\"data\"] = render_block_header(hdr) headers.append(d) data.append(headers) return template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\")", "@app.post(\"/checkblock\") def checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\") if get_block_from_db(headerhash):", "Admin.\" message_type = \"warning\" else: message = \"Invalid Unique ID!\"", "Port ID, try again.\" # message_type = \"danger\" # return", "Start mining Thread Thread(target=start_mining_thread, daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode: Not Mining\")", "message=message, message_type=message_type, pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance(): log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key))", "Blockchain logger.info(\"FullNode: Restoring Existing Chain\") header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) #", "tx = t except Exception as e: logger.debug(\"Transaction/bhash/tx: \" +", "trying to send money to himself\") response.status = 400 return", "new_peer_list = [] for peer in PEER_LIST: if greet_peer(peer): new_peer_list.append(peer)", "html += \"<tr><th>\" + \"Merkle Root\" + \"</th>\" html +=", "in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data: bytes)", "change = (current_amount - total_amount) if change > 0: vout[i", "{get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}\") sync(max_peer) except Exception", "Send Transaction. Invalid Transaction\") else: logger.info(\"Wallet: Transaction Sent, Wait for", "sync with all the peers def sync_with_peers(): try: PEER_LIST =", "html += \"<td>\" + str(hdr.prev_block_hash) + \"</td></tr>\" html += \"<tr><th>\"", "a New Valid Block, Adding to Chain\") logger.debug(\"Server: Sending new", "to DNS Seed\") return [] def get_peer_url(peer: Dict[str, Any]) ->", "try: tx = Transaction.from_json(transaction_json).object() # Add transaction to Mempool if", "for i in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def", "Miner t = Timer(1, miner.stop_mining) t.start() return \"Block Received\" logger.error(\"Server:", "tx def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname):", "\"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code == 400: logger.info(\"Wallet:", "inspect.stack()[0][3]) data = request.json bounty = int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"]", "Block.from_json(get_block_from_db(blockhash)).object() except Exception as e: logger.debug(\"BLOCK/blockhash: \" + str(e)) return", "+ \"</th>\" html += ( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) +", "= \"\" # if len(receiver) < 10: # wallet =", "+ str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return", "# Transactions for all active chains @app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request,", "def error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3]) return template(\"error.html\") if __name__ ==", "while True: if not miner.is_mining() and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain,", "= [] start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length >", "try again later.\" # message_type = \"danger\" # return template(\"wallet.html\",", "Mempool\") return False, \"Not Valid Transaction\" else: return True, \"Transaction", "= \"No Message\" if \"message\" in data: message = data[\"message\"]", "else: logger.info(\"Wallet: Transaction Sent, Wait for it to be Mined\")", "+ str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code", "you eat it, you'll die?''' actual_answer = \"nothing\" @app.get(\"/\") def", "+= \"</table>\" return str(html) @app.get(\"/chains\") def visualize_chain(): log_ip(request, inspect.stack()[0][3]) data", "entry in PEER_LIST: ip = entry[\"ip\"] port = entry[\"port\"] if", "html += \"<td>\" + str(len(block.transactions)) + \"</td></tr>\" # for i,", "int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"] message = \"No", "break return # Periodically sync with all the peers def", "request.remote_addr peer[\"time\"] = time.time() peer[\"version\"] = request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\")", "# publickey = \"\" # if len(receiver) < 10: #", "except Exception as e: logger.debug(\"Server: Greet Error: \" + str(e))", "Transaction: Invalid tx received: \" + str(e)) return False, \"Not", "than the devil, the poor have it, the rich need", ") if r.status_code == 400: response.status = 400 logger.error(\"Wallet: Could", "+ \"</th>\" html += \"<td>\" + str(hdr.prev_block_hash) + \"</td></tr>\" html", "request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname): client_ip = get_ip(request) iplogger.info(f\"{client_ip}", "Could not Send Transaction. Invalid Transaction\") else: logger.info(\"Wallet: Transaction Sent,", "Valid Block, Adding to Chain\") logger.debug(\"Server: Sending new block to", "\")</td></tr>\" ) # get block block = Block.from_json(get_block_from_db(dhash(hdr))).object() html +=", "peer[\"ip\"] and port == peer[\"port\"]: ADD_ENTRY = False if ADD_ENTRY:", "+= \"<tr><th>\" + \"Block Hash\" + \"</th>\" html += \"<td>\"", "return \"Cannot send money to youself\" else: transaction = create_transaction([receiver_public_key],", "= True for entry in PEER_LIST: ip = entry[\"ip\"] port", "len(receiver) < 10: # wallet = get_wallet_from_db(receiver) # if wallet", "= \"danger\" return template(\"index.html\", message=message, message_type=message_type, question=question) @app.get('/about') def about():", "the new data received from the peer. if data.get(\"blockheight\", None):", "get_wallet_from_db(receiver) # if wallet is not None: # publickey =", "be mined!\" # else: # message = \"Some Error Occured,", "# Check if block already exists if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received", "have Block Height\") return False return True except Exception as", "Transaction. Try Again.\" + str(e)) return False def create_transaction(receiver_public_keys: List[str],", "if LINE_PROFILING: from wsgi_lineprof.middleware import LineProfilerMiddleware with open(\"lineprof\" + str(consts.MINER_SERVER_PORT)", "return False def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message=\"\") ->", "400 return \"Insufficient Balance to make Transaction, need more \"", "\"<br>Public Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s def render_block_header(hdr):", "str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename): log_ip(request, inspect.stack()[0][3]) return static_file(filename, root=\"static\")", "if not miner.is_mining() and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD", "+= \"<tr><th>\" + \"Timestamp\" + \"</th>\" html += ( \"<td>\"", "range(0, number): # receiver = str(request.forms.get(\"port\" + str(i))) # bounty", "consts from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut,", "balance=balance, pubkey=pubkey) @app.post(\"/mining\") def mining(): log_ip(request, inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\")", "try: r = requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text) return", "if current_balance < total_amount: logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key in receiver_public_keys:", "= BlockChain() PEER_LIST: List[Dict[str, Any]] = [] MY_WALLET = Wallet()", "wait for it to be mined!\" # else: # message", "d[\"time\"] = hdr.timestamp d[\"data\"] = render_block_header(hdr) headers.append(d) data.append(headers) return template(\"chains.html\",", "uuid_json = file.read() valid_ids = set(json.loads(uuid_json)) @app.post(\"/\") def puzzle(): log_ip(request,", "prev = 0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i for", "# html += \"<tr><th>Transaction \" + str(i) + \"</th><td>\" +", "that is not added except Exception as e: logger.error(\"Server: New", "s def render_block_header(hdr): html = \"<table>\" html += \"<tr><th>\" +", "Balance to make Transaction, need more \" + str(bounty -", "# Broadcast block to other peers send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The", "html += ( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \" (\"", "BLOCKCHAIN.add_block(genesis_block) else: # Restore Blockchain logger.info(\"FullNode: Restoring Existing Chain\") header_list", "to other peers send_to_all_peers(\"/newblock\", request_data) # TODO Make new chain/", "logger.debug(\"Invalid Receiver Public Key\") response.status = 400 return \"Invalid Receiver", "Transaction\" return True, \"Done\" # Transactions for all active chains", "ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet, A new peer joined, Adding to", "def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname): client_ip", "\"<tr><th>\" + \"Height\" + \"</th>\" html += \"<td>\" + str(hdr.height)", "Unique ID!\" message_type = \"danger\" return template(\"index.html\", message=message, message_type=message_type, question=question)", "html += \"<tr><th>\" + \"Block Hash\" + \"</th>\" html +=", "other peers send_to_all_peers(\"/newblock\", request_data) # TODO Make new chain/ orphan", "message = \"\" # message_type = \"info\" # try: #", "= False BLOCKCHAIN = BlockChain() PEER_LIST: List[Dict[str, Any]] = []", "if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public Key\") response.status =", "PEER_LIST: List[Dict[str, Any]] = [] MY_WALLET = Wallet() miner =", "def send_bounty(receiver_public_keys: List[str], amounts: List[int]): current_balance = check_balance(MY_WALLET.public_key) for key", "not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received is invalid, Cannot Sync\") break", "data), daemon=True).start() def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def fetch_peer_list()", "logger.error(e) # message = \"Some Error Occured. Please try again", "str(len(block.transactions)) + \"</td></tr>\" # for i, transaction in enumerate(block.transactions): #", "data={\"myheight\": fork_height}) hash_list = json.loads(decompress(r.text.encode())) for hhash in hash_list: block", "\"http://\" + str(peer[\"ip\"]) + \":\" + str(peer[\"port\"]) def greet_peer(peer: Dict[str,", "if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a New Valid Block, Adding to", "Dict, List from datetime import datetime import hashlib import inspect", "message = process_new_transaction(request.body.read()) if result: response.status = 200 else: response.status", "e: logger.debug(\"Server: Greet Error: \" + str(e)) pass data =", "functools import lru_cache from multiprocessing import Pool, Process from threading", "tx not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received, Adding", "html += \"<tr><th>\" + \"Transactions\" + \"</th>\" html += \"<td>\"", "logger.debug(transaction) logger.info(\"Wallet: Attempting to Send Transaction\") try: r = requests.post(", "try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block)", "template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3]) try:", "template(\"index.html\", message=message, message_type=message_type, question=question) @app.get('/about') def about(): return template(\"about.html\") #", "pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") # def wallet_post(): # log_ip(request, inspect.stack()[0][3]) #", "BLOCKCHAIN.active_chain.header_list[-100:] for hdr in hdr_list: d = {} d[\"hash\"] =", "+ \"Height\" + \"</th>\" html += \"<td>\" + str(hdr.height) +", "peer\" + str(e)) return False def receive_block_from_peer(peer: Dict[str, Any], header_hash)", "Seed\") return [] def get_peer_url(peer: Dict[str, Any]) -> str: return", "\"No. of Blocks: \" + str(BLOCKCHAIN.active_chain.length) + \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1])", "[] def get_peer_url(peer: Dict[str, Any]) -> str: return \"http://\" +", "e: logger.error(\"Server: New Block: invalid block received \" + str(e))", "8) if i < len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i", "\"Not Valid Transaction\" return True, \"Done\" # Transactions for all", "request.forms.get(\"blockheight\") ADD_ENTRY = True for entry in PEER_LIST: ip =", "sum(amounts) i = 0 for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out", "fetch_peer_list() -> List[Dict[str, Any]]: try: r = requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT})", "he seems to have height {max_peer['blockheight']}\") sync(max_peer) except Exception as", "str(i))) # publickey = \"\" # if len(receiver) < 10:", "vin=vin, vout=vout, message=message) return tx def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or", "in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if current_amount >= total_amount: break", "message=message) data = {} data[\"send_this\"] = transaction.to_json() transaction.vin = {}", "+ str(e)) return False, \"Not Valid Transaction\" return True, \"Done\"", "address in enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i], address=address) change = (current_amount", "\"</th>\" html += \"<td>\" + dhash(hdr) + \"</td></tr>\" html +=", "return template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey): log_ip(request, inspect.stack()[0][3])", "\"</th>\" html += \"<td>\" + str(len(block.transactions)) + \"</td></tr>\" # for", "LINE_PROFILING: from wsgi_lineprof.middleware import LineProfilerMiddleware with open(\"lineprof\" + str(consts.MINER_SERVER_PORT) +", "Rewarding \" + pubkey) message = \"Well Done!\" if check_balance(MY_WALLET.public_key)", "= receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received is", "def request_task(peers, url, data): for peer in peers: try: requests.post(get_peer_url(peer)", "\"You have Insufficient Balance!\" # message_type = \"warning\" # return", "message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") # def wallet_post(): # log_ip(request,", "return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3])", "exists if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block exists, doing nothing\") return", "return message question = '''What is greater than God, more", "inspect.stack()[0][3]) return template(\"error.html\") if __name__ == \"__main__\": try: if consts.NEW_BLOCKCHAIN:", "str(current_balance) @app.post(\"/makeTransaction\") def make_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json bounty", "Key\") response.status = 400 return \"Invalid Receiver Public Key\" current_balance", "PEER_LIST: ip = entry[\"ip\"] port = entry[\"port\"] if ip ==", "God, more evil than the devil, the poor have it,", "send_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json transaction = Transaction.from_json(data[\"transaction\"]).object() sig", "> 0: vout[i + 1] = TxOut(amount=change, address=sender_public_key) tx =", "block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey): log_ip(request, inspect.stack()[0][3]) balance = check_balance(pubkey)", "checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return json.dumps(True)", "waitress from bottle import BaseTemplate, Bottle, request, response, static_file, template,", "= (current_amount - total_amount) if change > 0: vout[i +", "hhash): r = requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\": hhash}) result =", "-> str: return \"http://\" + str(peer[\"ip\"]) + \":\" + str(peer[\"port\"])", "0) if prev < 0: prev = 0 hdr_list =", "template(\"error.html\") return template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey): log_ip(request,", "is being sent, please wait for it to be mined!\"", "url = get_peer_url(peer) data = {\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\":", "have Insufficient Balance!\" # message_type = \"warning\" # return template(\"wallet.html\",", "timeout=(5, 1), ) if r.status_code == 400: response.status = 400", "+ str(hdr.timestamp) + \")</td></tr>\" ) # get block block =", "== peer[\"port\"]: ADD_ENTRY = False if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet,", "check_balance(sender_public_key) if current_balance < bounty: logger.debug(\"Insufficient Balance to make Transaction\")", "return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data: bytes) -> str: global BLOCKCHAIN", "# wallet = get_wallet_from_db(receiver) # if wallet is not None:", "logger.debug(\"Main: Peer data does not have Block Height\") return False", "Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block from authority import", "logger.error(\"Wallet: Could not Send Transaction. Invalid transaction\") return \"Try Again\"", "@app.get(\"/wallet\") # def wallet(): # log_ip(request, inspect.stack()[0][3]) # return template(\"wallet.html\",", "with the new data received from the peer. if data.get(\"blockheight\",", "from utils.logger import logger, iplogger from utils.storage import get_block_from_db, get_wallet_from_db,", "< len(hdr_list)] blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes] transactions", "transaction\") return \"Try Again\" except Exception as e: response.status =", "# message = \"Error with the Receiver Port ID, try", "Block: r = requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object()", "amounts = [] # total_amount = 0 # for i", "{\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type = \"application/json\" return json.dumps(data) @lru_cache(maxsize=128)", "\"</th>\" html += ( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \"", "= requests.post(url + \"/greetpeer\", data=data) data = json.loads(r.text) # Update", "= Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting to", "the peer list with the new data received from the", "hash_list = [] for i in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return", "Exception as e: logger.error(\"Could not connect to DNS Seed\") return", "mining_thread_task(): while True: if not miner.is_mining() and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool,", "BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block from authority import Authority", "Try Again.\" + str(e)) return \"Try Again\" else: logger.info(\"Wallet: Transaction", "= TxOut(amount=amounts[i], address=address) change = (current_amount - total_amount) if change", "= {} vin = {} current_amount = 0 total_amount =", "iplogger.info(f\"{client_ip} : Called function {fname}\") @app.post(\"/checkBalance\") def checkingbalance(): log_ip(request, inspect.stack()[0][3])", "\"\" # message_type = \"info\" # try: # receivers =", "+ \"/getblock\", data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash): r", "send to myself\") else: transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority:", "New Valid Block, Adding to Chain\") logger.debug(\"Server: Sending new block", "str(transaction.is_coinbase) + \", fees: \" + str(transaction.fees) # html +=", "sender_public_key: current_amount += tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i", "= BLOCKCHAIN.active_chain.header_list if len(hdr_list) > 200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100] +", "e: logger.error(\"Sync: Error: \" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start()", "time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def send_to_all_peers(url, data): def request_task(peers, url, data):", "return template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") # def wallet_post():", "b\"<PASSWORD>\", 200000) if hashed == dk: consts.NO_MINING = not consts.NO_MINING", "stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app, host=\"0.0.0.0\", threads=16,", "[] # amounts = [] # total_amount = 0 #", "Mined\") return \"Done\" @app.post(\"/transactionHistory\") def transaction_history(): log_ip(request, inspect.stack()[0][3]) data =", "as e: response.status = 400 logger.error(\"Wallet: Could not Send Transaction.", "peer joined, Adding to List\") except Exception as e: logger.debug(\"Server:", "other peers send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The transation is not valid,", "height {max_peer['blockheight']}\") sync(max_peer) except Exception as e: logger.error(\"Sync: Error: \"", "Exception as e: logger.debug(\"Transaction/bhash/tx: \" + str(e)) return template(\"error.html\") return", "bounty = int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"] message", "= Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\" + \"Transactions\" + \"</th>\" html", "= request.forms.get(\"version\") peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY = True for entry", "it, the rich need it, and if you eat it,", "= TxOut(amount=change, address=sender_public_key) tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout,", "to make Transaction, need more \" + str(bounty - current_balance)", "message = \"Invalid Unique ID!\" message_type = \"danger\" return template(\"index.html\",", "= \"coinbase: \" + str(transaction.is_coinbase) + \", fees: \" +", "= \"warning\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # except", "= max(PEER_LIST, key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)}, he", "len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public Key\") response.status = 400", "args=(PEER_LIST, url, data), daemon=True).start() def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start()", "Block Received\") return \"Invalid Block\" @app.post(\"/newblock\") def received_new_block(): log_ip(request, inspect.stack()[0][3])", "data[\"sender_public_key\"] message = \"No Message\" if \"message\" in data: message", "+= \"<td>\" + str(len(block.transactions)) + \"</td></tr>\" # for i, transaction", "DNS Seed\") return [] def get_peer_url(peer: Dict[str, Any]) -> str:", "log_ip(request, inspect.stack()[0][3]) data = request.json transaction = Transaction.from_json(data[\"transaction\"]).object() sig =", "a POST request to the peer r = requests.post(url +", "str(BLOCKCHAIN.active_chain.length) + \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" + \"Balance \"", "it to be Mined\") return True except Exception as e:", "amounts: List[int], sender_public_key, message=\"\") -> Transaction: vout = {} vin", "A new peer joined, Adding to List\") except Exception as", "\" + str(transaction.fees) # html += \"<tr><th>Transaction \" + str(i)", "be Mined\") return True except Exception as e: logger.error(\"Wallet: Could", "Any]]: try: r = requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text)", "miner.is_mining() and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2)", "logger.debug(\"Transaction/bhash/tx: \" + str(e)) return template(\"error.html\") return template(\"transaction.html\", tx=tx, block=block)", "request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash =", "Any]) -> str: return \"http://\" + str(peer[\"ip\"]) + \":\" +", "sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\":", "return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # else: # publickey =", "from multiprocessing import Pool, Process from threading import Thread, Timer", "def sync_with_peers(): try: PEER_LIST = fetch_peer_list() new_peer_list = [] for", "receiver_public_key = data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"] message = \"No Message\"", "= sum(amounts) if current_balance < total_amount: logger.debug(\"Insuficient balance\") elif MY_WALLET.public_key", "active chains @app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result, message =", "BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def send_to_all_peers(url, data): def request_task(peers,", "hash_list: block = receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block", "indexes = [i for i in range(prev * 8, (prev", "+ \" (\" + str(hdr.timestamp) + \")</td></tr>\" ) # get", "str(e)) return False, \"Not Valid Transaction\" return True, \"Done\" #", "\"nothing\" @app.get(\"/\") def home(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type", "result = send_bounty(receivers, amounts) # if result: # message =", "name=\"static\") def serve_static(filename): log_ip(request, inspect.stack()[0][3]) return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def", "wallet[1] # else: # message = \"Error with the Receiver", "+ str(i))) # bounty = int(request.forms.get(\"amount\" + str(i))) # publickey", "chains @app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result, message = process_new_transaction(request.body.read())", "\"</th>\" html += \"<td>\" + str(hdr.height) + \"</td></tr>\" html +=", "response, static_file, template, error import utils.constants as consts from core", "for peer in peers: try: requests.post(get_peer_url(peer) + url, data=data, timeout=(5,", "\"Invalid Receiver Public Key\" current_balance = check_balance(sender_public_key) if current_balance <", "400: response.status = 400 logger.error(\"Wallet: Could not Send Transaction. Invalid", "@error(505) def error_handle(url=\"url\", error=\"404\"): log_ip(request, inspect.stack()[0][3]) return template(\"error.html\") if __name__", "in enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i], address=address) change = (current_amount -", "money to youself\" else: transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message)", "request, response, static_file, template, error import utils.constants as consts from", "i in range(0, number): # receiver = str(request.forms.get(\"port\" + str(i)))", "+ \"NOT MINING\" if consts.NO_MINING else \"MINING\" else: return \"Password", "Wait for it to be Mined\") return \"Done\" @app.post(\"/transactionHistory\") def", "{} peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr peer[\"time\"] = time.time()", "Syncing with {get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}\") sync(max_peer)", "json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json transaction", "List[int], sender_public_key, message=\"\") -> Transaction: vout = {} vin =", "List from datetime import datetime import hashlib import inspect import", "in range(0, number): # receiver = str(request.forms.get(\"port\" + str(i))) #", "else: # Restore Blockchain logger.info(\"FullNode: Restoring Existing Chain\") header_list =", "= BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\") def mining():", "decompress(request_data) if block_json: try: block = Block.from_json(block_json).object() # Check if", "+ str(hdr.height) + \"</td></tr>\" html += \"<tr><th>\" + \"Block Hash\"", "return str(html) @app.get(\"/chains\") def visualize_chain(): log_ip(request, inspect.stack()[0][3]) data = []", "= TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i += 1 for i, address", "new chain/ orphan set for Block that is not added", "Pool, Process from threading import Thread, Timer from typing import", "get_favicon(): log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\") def sendinfo(): log_ip(request,", "Length\") return False total_amount = sum(amounts) if current_balance < total_amount:", "multiprocessing import Pool, Process from threading import Thread, Timer from", "result: response.status = 200 else: response.status = 400 return message", "message = \"No Message\" if \"message\" in data: message =", "start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10 else", "make Transaction, need more \" + str(bounty - current_balance) elif", "Adding to List\") except Exception as e: logger.debug(\"Server: Greet Error:", "return False total_amount = sum(amounts) if current_balance < total_amount: logger.debug(\"Insuficient", "else: # message = \"Error with the Receiver Port ID,", "current_amount = 0 total_amount = sum(amounts) i = 0 for", "consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key Length\") return False total_amount = sum(amounts)", "List[int]): current_balance = check_balance(MY_WALLET.public_key) for key in receiver_public_keys: if len(key)", "inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"] logger.debug(public_key) current_balance =", "= check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\") def make_transaction(): log_ip(request, inspect.stack()[0][3]) data", "\"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), ) if r.status_code == 400: response.status", "in peers: try: requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1)) except", "Error: \" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start() def check_balance(pub_key:", "log_ip(request, inspect.stack()[0][3]) return template(\"error.html\") if __name__ == \"__main__\": try: if", "in receiver_public_keys: if len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key Length\")", "+= tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i += 1", "start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def fetch_peer_list() -> List[Dict[str, Any]]:", "= Block.from_json(get_block_from_db(blockhash)).object() tx = None for t in block.transactions: if", "+ str(i) + \"</th><td>\" + str(s) + \"</td></tr>\" html +=", "+ \"/checkblock\", data={\"headerhash\": hhash}) result = json.loads(r.text) if result: return", "import Wallet app = Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING =", "t.start() return \"Block Received\" logger.error(\"Server: Invalid Block Received\") return \"Invalid", "0 # for i in range(0, number): # receiver =", "html += \"<tr><th>\" + \"Height\" + \"</th>\" html += \"<td>\"", "to Chain\") logger.debug(\"Server: Sending new block to peers\") # Broadcast", "seems to have height {max_peer['blockheight']}\") sync(max_peer) except Exception as e:", "def send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\")) hash_list = []", "== \"__main__\": try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New Chain from", "else: message = \"Some Error Occured, Contact Admin.\" message_type =", "\"\" message_type = \"info\" uuid = request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\")", ">= total_amount: break if tx_out.address == sender_public_key: current_amount += tx_out.amount", "Block that is not added except Exception as e: logger.error(\"Server:", "\"</td></tr>\" html += \"<tr><th>\" + \"Timestamp\" + \"</th>\" html +=", "Invalid Block Received\") return \"Invalid Block\" @app.post(\"/newblock\") def received_new_block(): log_ip(request,", "= None for t in block.transactions: if t.hash() == txhash:", "Exception as e: logger.debug(\"Main: Could not greet peer\" + str(e))", "-> str: global BLOCKCHAIN block_json = decompress(request_data) if block_json: try:", "d = {} d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp d[\"data\"]", "= Timer(1, miner.stop_mining) t.start() return \"Block Received\" logger.error(\"Server: Invalid Block", "requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash):", "LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app, host=\"0.0.0.0\",", "\"Not Valid Transaction\" else: return True, \"Transaction Already received\" except", "poor have it, the rich need it, and if you", "try: peer = {} peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr", "log_ip(request, inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"] tx_hist =", "block to other peers send_to_all_peers(\"/newblock\", request_data) # TODO Make new", "= send_bounty([pubkey], amounts) if result: message = \"Your reward is", "BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received is invalid, Cannot Sync\") break return", "def checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return", "hhash = request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock(): log_ip(request, inspect.stack()[0][3])", "= False if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet, A new peer", "logger.debug(\"Invalid Public Key Length\") return False total_amount = sum(amounts) if", "Greet, A new peer joined, Adding to List\") except Exception", "\"<table>\" html += \"<tr><th>\" + \"Height\" + \"</th>\" html +=", "data.append(headers) return template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\") def explorer(): log_ip(request, inspect.stack()[0][3])", "< consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Public Key Length\") return False total_amount =", "import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block from authority", "t in block.transactions: if t.hash() == txhash: tx = t", "peer = {} peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr peer[\"time\"]", "puzzle(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type = \"info\" uuid", "'''What is greater than God, more evil than the devil,", "Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\" + \"Transactions\" + \"</th>\" html +=", "datetime import hashlib import inspect import requests import waitress from", "question=question) @app.get('/about') def about(): return template(\"about.html\") # @app.get(\"/wallet\") # def", "# message_type = \"warning\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)", "prev = int(request.query.prev or 0) if prev < 0: prev", "200000) if hashed == dk: consts.NO_MINING = not consts.NO_MINING logger.info(\"Mining:", "try: requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1)) except Exception as", "return \"Try Again\" except Exception as e: response.status = 400", "hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if", "FOR NON EXISTENT BLOCK\") return \"Invalid Hash\" @app.post(\"/getblock\") def getblock():", "hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for hdr in hdr_list: d", "8, (prev + 1) * 8) if i < len(hdr_list)]", "# else: # message = \"You have Insufficient Balance!\" #", "in hash_list: block = receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error(\"Sync:", "tx = Transaction.from_json(transaction_json).object() # Add transaction to Mempool if tx", "\"Error with the Receiver Port ID, try again.\" # message_type", "peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr peer[\"time\"] = time.time() peer[\"version\"]", "try: block = Block.from_json(get_block_from_db(blockhash)).object() except Exception as e: logger.debug(\"BLOCK/blockhash: \"", "return json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try: peer =", "str(hdr.timestamp) + \")</td></tr>\" ) # get block block = Block.from_json(get_block_from_db(dhash(hdr))).object()", "Again.\" + str(e)) return False def create_transaction(receiver_public_keys: List[str], amounts: List[int],", ") # get block block = Block.from_json(get_block_from_db(dhash(hdr))).object() html += \"<tr><th>\"", "= get_peer_url(peer) data = {\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length}", "= \"nothing\" @app.get(\"/\") def home(): log_ip(request, inspect.stack()[0][3]) message = \"\"", "data = {} data[\"send_this\"] = transaction.to_json() transaction.vin = {} data[\"sign_this\"]", "# Restore Blockchain logger.info(\"FullNode: Restoring Existing Chain\") header_list = read_header_list_from_db()", "from wallet import Wallet app = Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url", "-> Transaction: vout = {} vin = {} current_amount =", "transaction = Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting", "Error Occured, Contact Admin.\" message_type = \"warning\" else: message =", "Error while sending data in process\" + str(peer)) Process(target=request_task, args=(PEER_LIST,", "current_balance = check_balance(public_key) return str(current_balance) @app.post(\"/makeTransaction\") def make_transaction(): log_ip(request, inspect.stack()[0][3])", "received from the peer. if data.get(\"blockheight\", None): peer.update(data) else: logger.debug(\"Main:", "while sending data in process\" + str(peer)) Process(target=request_task, args=(PEER_LIST, url,", "= BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try:", "request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname): client_ip = get_ip(request) iplogger.info(f\"{client_ip} : Called", "Broadcast block to other peers send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The transation", "txhash): log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() tx = None", "False if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet, A new peer joined,", "Mismatch,\" + \"NOT MINING\" if consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403)", "[bounty], sender_public_key, message=message) data = {} data[\"send_this\"] = transaction.to_json() transaction.vin", "# result = send_bounty(receivers, amounts) # if result: # message", "read_header_list_from_db from utils.utils import compress, decompress, dhash from wallet import", "Blocks: \" + str(BLOCKCHAIN.active_chain.length) + \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\"", "peer data in the peer list with the new data", "= data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"] message = \"No Message\" if", "transaction.vin = {} data[\"sign_this\"] = transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\") def", "if not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received is invalid, Cannot Sync\")", "\"</td></tr>\" # for i, transaction in enumerate(block.transactions): # s =", "receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received is invalid,", "data[\"send_this\"] = transaction.to_json() transaction.vin = {} data[\"sign_this\"] = transaction.to_json() return", "try: # receivers = [] # amounts = [] #", "entry[\"port\"] if ip == peer[\"ip\"] and port == peer[\"port\"]: ADD_ENTRY", "if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block) else:", "return \"Insufficient Balance to make Transaction, need more \" +", "10 else 0 headers = [] hdr_list = BLOCKCHAIN.active_chain.header_list if", "str(e)) return False def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block:", "header_hash}) return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash): r = requests.post(get_peer_url(peer) +", "new data received from the peer. if data.get(\"blockheight\", None): peer.update(data)", "i in range(prev * 8, (prev + 1) * 8)", "if check_balance(MY_WALLET.public_key) >= sum(amounts): result = send_bounty([pubkey], amounts) if result:", "+ str(s) + \"</td></tr>\" html += \"</table>\" return str(html) @app.get(\"/chains\")", "\"message\" in data: message = data[\"message\"] if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH:", "Thread, Timer from typing import Any, Dict, List from datetime", "\"warning\" else: message = \"Invalid Unique ID!\" message_type = \"danger\"", "if transaction_json: try: tx = Transaction.from_json(transaction_json).object() # Add transaction to", "message = \"\" message_type = \"info\" uuid = request.forms.get(\"uuid\") pubkey", "max_peer = max(PEER_LIST, key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)},", "log_ip(request, inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock():", "message_type = \"info\" uuid = request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\") amounts", "check_balance(MY_WALLET.public_key) for key in receiver_public_keys: if len(key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid", "pubkey=MY_WALLET.public_key) @app.get(\"/checkmybalance\") def checkblance(): log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\")", "Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s def render_block_header(hdr): html", "[] for peer in PEER_LIST: if greet_peer(peer): new_peer_list.append(peer) PEER_LIST =", "\" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start() def check_balance(pub_key: str)", "pubkey = request.forms.get(\"pubkey\") amounts = [300] if uuid in valid_ids:", "= 0 total_amount = sum(amounts) i = 0 for so,", "get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r =", "tx = None for t in block.transactions: if t.hash() ==", "Received\" logger.error(\"Server: Invalid Block Received\") return \"Invalid Block\" @app.post(\"/newblock\") def", "Send Transaction. Try Again.\" + str(e)) return False def create_transaction(receiver_public_keys:", "@app.post(\"/makeTransaction\") def make_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json bounty =", "message = \"Some Error Occured. Please try again later.\" #", "process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data: bytes) -> str: global BLOCKCHAIN transaction_json", "return \"Invalid Block\" @app.post(\"/newblock\") def received_new_block(): log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read())", "(\" + str(hdr.timestamp) + \")</td></tr>\" ) # get block block", "New Block: invalid block received \" + str(e)) return \"Invalid", "with all my peers sync_with_peers() # Start mining Thread Thread(target=start_mining_thread,", "threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) except KeyboardInterrupt: miner.stop_mining()", "to be mined!\" valid_ids.remove(uuid) else: message = \"Some Error Occured,", "def checkingbalance(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key = data[\"public_key\"]", "fork_height = BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\": fork_height})", "more \" + str(bounty - current_balance) elif sender_public_key == receiver_public_key:", "= json.loads(r.text) # Update the peer data in the peer", "Transaction: vout = {} vin = {} current_amount = 0", "url, data), daemon=True).start() def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\", daemon=True).start() def", "if data.get(\"blockheight\", None): peer.update(data) else: logger.debug(\"Main: Peer data does not", "except Exception as e: logger.debug(\"BLOCK/blockhash: \" + str(e)) return template(\"error.html\")", "transaction to Mempool if tx not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx):", "\"<tr><th>\" + \"Block Hash\" + \"</th>\" html += \"<td>\" +", "def make_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json bounty = int(data[\"bounty\"])", "BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\") def mining(): log_ip(request,", "Exception as e: logger.debug(\"BLOCK/blockhash: \" + str(e)) return template(\"error.html\") return", "(prev + 1) * 8) if i < len(hdr_list)] blocks", "if block_json: try: block = Block.from_json(block_json).object() # Check if block", "Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block) else: # Restore Blockchain logger.info(\"FullNode: Restoring", "tx_out = utxo_list[0] if current_amount >= total_amount: break if tx_out.address", "name=\"account\") def account(pubkey): log_ip(request, inspect.stack()[0][3]) balance = check_balance(pubkey) tx_hist =", "= 400 logger.error(\"Wallet: Could not Send Transaction. Try Again.\" +", "+ \"Balance \" + str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key: <br>\" +", "@lru_cache(maxsize=128) def cached_get_block(headerhash: str) -> str: if headerhash: db_block =", "+ str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \" (\" + str(hdr.timestamp) + \")</td></tr>\"", "make_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json bounty = int(data[\"bounty\"]) receiver_public_key", "logger, iplogger from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db from utils.utils", "import time from functools import lru_cache from multiprocessing import Pool,", "hhash in hash_list: block = receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block):", "logger.error(\"Server: Invalid Block Received\") return \"Invalid Block\" @app.post(\"/newblock\") def received_new_block():", "peer.update(data) else: logger.debug(\"Main: Peer data does not have Block Height\")", "process_new_transaction(request_data: bytes) -> str: global BLOCKCHAIN transaction_json = decompress(request_data) if", "in data: message = data[\"message\"] if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid", "inspect.stack()[0][3]) # return template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") #", "current_amount += tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i +=", "tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return json.dumps(tx_hist) @app.post(\"/greetpeer\") def greet_peer_f(): log_ip(request, inspect.stack()[0][3])", "if tx not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug(\"Valid Transaction received,", "# return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # else: # publickey", "sender_public_key, message=message) data = {} data[\"send_this\"] = transaction.to_json() transaction.vin =", "= [] # amounts = [] # total_amount = 0", "# s = \"coinbase: \" + str(transaction.is_coinbase) + \", fees:", "+ \".log\", \"w\") as f: app = LineProfilerMiddleware(app, stream=f, async_stream=True)", "check_block_with_peer(peer, hhash): r = requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\": hhash}) result", "def send_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json transaction = Transaction.from_json(data[\"transaction\"]).object()", "tx_out.address == sender_public_key: current_amount += tx_out.amount vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key,", "log_ip(request, fname): client_ip = get_ip(request) iplogger.info(f\"{client_ip} : Called function {fname}\")", "logger.debug(\"Valid Answer, Rewarding \" + pubkey) message = \"Well Done!\"", "Transaction, need more \" + str(bounty - current_balance) elif sender_public_key", "= [] hdr_list = BLOCKCHAIN.active_chain.header_list if len(hdr_list) > 200: hdr_list", "%H:%M:%S\")) + \" (\" + str(hdr.timestamp) + \")</td></tr>\" ) #", "html += \"<td>\" + dhash(hdr) + \"</td></tr>\" html += \"<tr><th>\"", "\"<tr><th>Transaction \" + str(i) + \"</th><td>\" + str(s) + \"</td></tr>\"", "set for Block that is not added except Exception as", "data received from the peer. if data.get(\"blockheight\", None): peer.update(data) else:", "Start server if LINE_PROFILING: from wsgi_lineprof.middleware import LineProfilerMiddleware with open(\"lineprof\"", "peers\") # Broadcast block to other peers send_to_all_peers(\"/newblock\", request_data) #", "result: return True return False def get_block_header_hash(height): return dhash(BLOCKCHAIN.active_chain.header_list[height]) def", "Kill Miner t = Timer(1, miner.stop_mining) t.start() return \"Block Received\"", "= ( \"No. of Blocks: \" + str(BLOCKCHAIN.active_chain.length) + \"<br>\"", "= send_bounty(receivers, amounts) # if result: # message = \"Your", "not added except Exception as e: logger.error(\"Server: New Block: invalid", "+ str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1]) ) return s def render_block_header(hdr): html = \"<table>\"", "Admin.\" # message_type = \"warning\" # else: # message =", "e: logger.debug(\"BLOCK/blockhash: \" + str(e)) return template(\"error.html\") return template(\"block.html\", block=block)", "function {fname}\") @app.post(\"/checkBalance\") def checkingbalance(): log_ip(request, inspect.stack()[0][3]) data = request.json", "message_type = \"warning\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) #", "hdr_list: d = {} d[\"hash\"] = dhash(hdr)[-5:] d[\"time\"] = hdr.timestamp", "Exception as e: logger.debug(\"Server: Requests: Error while sending data in", "template, error import utils.constants as consts from core import Block,", "r = requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\": hhash}) result = json.loads(r.text)", "response.status = 400 return \"Invalid Receiver Public Key\" current_balance =", "* 2, sync_with_peers).start() def check_balance(pub_key: str) -> int: current_balance =", "log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename): log_ip(request, inspect.stack()[0][3])", "Public Key\" current_balance = check_balance(sender_public_key) if current_balance < bounty: logger.debug(\"Insufficient", "elif sender_public_key == receiver_public_key: logger.debug(\"Someone trying to send money to", "\"Your reward is being sent, please wait for it to", "def greet_peer_f(): log_ip(request, inspect.stack()[0][3]) try: peer = {} peer[\"port\"] =", "@lru_cache(maxsize=16) def process_new_block(request_data: bytes) -> str: global BLOCKCHAIN block_json =", "message=\"\") -> Transaction: vout = {} vin = {} current_amount", "# bounty = int(request.forms.get(\"amount\" + str(i))) # publickey = \"\"", "def received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result, message = process_new_transaction(request.body.read()) if result:", "tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\") def", "k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)}, he seems to have", "-> List[Dict[str, Any]]: try: r = requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list", "max(PEER_LIST, key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)}, he seems", "= 0 for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0]", "static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def get_favicon(): log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\")", "2, sync_with_peers).start() def check_balance(pub_key: str) -> int: current_balance = 0", "log_ip(request, inspect.stack()[0][3]) try: block = Block.from_json(get_block_from_db(blockhash)).object() except Exception as e:", "# Sync with all my peers sync_with_peers() # Start mining", "+ \"</td></tr>\" html += \"<tr><th>\" + \"Merkle Root\" + \"</th>\"", "and if you eat it, you'll die?''' actual_answer = \"nothing\"", "all my peers sync_with_peers() # Start mining Thread Thread(target=start_mining_thread, daemon=True).start()", "pub_key=sender_public_key, sig=\"\") i += 1 for i, address in enumerate(receiver_public_keys):", "for i, transaction in enumerate(block.transactions): # s = \"coinbase: \"", "+ str(peer[\"ip\"]) + \":\" + str(peer[\"port\"]) def greet_peer(peer: Dict[str, Any])", "datetime import datetime import hashlib import inspect import requests import", "\"Done\" @app.post(\"/transactionHistory\") def transaction_history(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key", "= t except Exception as e: logger.debug(\"Transaction/bhash/tx: \" + str(e))", "+ str(e)) pass data = {\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} response.content_type", "if t.hash() == txhash: tx = t except Exception as", "wsgi_lineprof.middleware import LineProfilerMiddleware with open(\"lineprof\" + str(consts.MINER_SERVER_PORT) + \".log\", \"w\")", "all the peers def sync_with_peers(): try: PEER_LIST = fetch_peer_list() new_peer_list", "return \"Mining Toggled, \" + \"NOT MINING\" if consts.NO_MINING else", "< 0: prev = 0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes =", "Broadcast block to other peers send_to_all_peers(\"/newblock\", request_data) # TODO Make", "miner.stop_mining) t.start() return \"Block Received\" logger.error(\"Server: Invalid Block Received\") return", "return template(\"about.html\") # @app.get(\"/wallet\") # def wallet(): # log_ip(request, inspect.stack()[0][3])", "= request.json public_key = data[\"public_key\"] logger.debug(public_key) current_balance = check_balance(public_key) return", "block = receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error(\"Sync: Block received", "block to peers\") # Broadcast block to other peers send_to_all_peers(\"/newblock\",", "return template(\"chains.html\", data=data, start=start) @app.get(\"/explorer\") def explorer(): log_ip(request, inspect.stack()[0][3]) prev", "fees: \" + str(transaction.fees) # html += \"<tr><th>Transaction \" +", "Chain\") logger.debug(\"Server: Sending new block to peers\") # Broadcast block", "later.\" # message_type = \"danger\" # return template(\"wallet.html\", message=message, message_type=message_type,", "def home(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type = \"info\"", "new_peer_list.append(peer) PEER_LIST = new_peer_list if PEER_LIST: max_peer = max(PEER_LIST, key=lambda", "total_amount += bounty # receivers.append(publickey) # amounts.append(bounty) # if check_balance(MY_WALLET.public_key)", "current_balance = check_balance(MY_WALLET.public_key) for key in receiver_public_keys: if len(key) <", "list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i for i in range(prev * 8,", "message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # else: # publickey = receiver #", "PEER_LIST = fetch_peer_list() new_peer_list = [] for peer in PEER_LIST:", "[] start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10", "html += \"<tr><th>\" + \"Prev Block Hash\" + \"</th>\" html", "\" + str(not consts.NO_MINING)) return \"Mining Toggled, \" + \"NOT", "app = LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else:", "= list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i for i in range(prev *", "current_amount >= total_amount: break if tx_out.address == sender_public_key: current_amount +=", "= requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1),", "t except Exception as e: logger.debug(\"Transaction/bhash/tx: \" + str(e)) return", "< consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public Key\") response.status = 400 return", "message_type = \"danger\" return template(\"index.html\", message=message, message_type=message_type, question=question) @app.get('/about') def", "response.status = 400 return \"Insufficient Balance to make Transaction, need", "bool: try: url = get_peer_url(peer) data = {\"port\": consts.MINER_SERVER_PORT, \"version\":", "# Add transaction to Mempool if tx not in BLOCKCHAIN.mempool:", "MINING\" if consts.NO_MINING else \"MINING\" else: return \"Password Mismatch,\" +", "r = requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\": fork_height}) hash_list = json.loads(decompress(r.text.encode()))", "port = entry[\"port\"] if ip == peer[\"ip\"] and port ==", "= not consts.NO_MINING logger.info(\"Mining: \" + str(not consts.NO_MINING)) return \"Mining", "data in process\" + str(peer)) Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start()", "= int(data[\"bounty\"]) receiver_public_key = data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"] message =", "= process_new_transaction(request.body.read()) if result: response.status = 200 else: response.status =", "Valid Transaction\" else: return True, \"Transaction Already received\" except Exception", "in process\" + str(peer)) Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start() def", "'r') as file: uuid_json = file.read() valid_ids = set(json.loads(uuid_json)) @app.post(\"/\")", "receivers.append(publickey) # amounts.append(bounty) # if check_balance(MY_WALLET.public_key) >= total_amount: # result", "log_ip(request, inspect.stack()[0][3]) message = \"\" message_type = \"info\" return template(\"index.html\",", "+ dhash(hdr) + \"</td></tr>\" html += \"<tr><th>\" + \"Prev Block", "+ str(e)) return \"Invalid Block Received\" # Kill Miner t", "LINE_PROFILING = False BLOCKCHAIN = BlockChain() PEER_LIST: List[Dict[str, Any]] =", "Sync with all my peers sync_with_peers() # Start mining Thread", "i = 0 for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out =", "BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if current_amount >= total_amount: break if", "checkblance(): log_ip(request, inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename): log_ip(request,", "Done!\" if check_balance(MY_WALLET.public_key) >= sum(amounts): result = send_bounty([pubkey], amounts) if", "+ \", fees: \" + str(transaction.fees) # html += \"<tr><th>Transaction", "if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block exists, doing nothing\") return \"Block", "open('uuids.json', 'r') as file: uuid_json = file.read() valid_ids = set(json.loads(uuid_json))", "logger.info(\"FullNode: Restoring Existing Chain\") header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync", "response.status = 400 return \"Cannot send money to youself\" else:", "Existing Chain\") header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync with all", "question=question) with open('uuids.json', 'r') as file: uuid_json = file.read() valid_ids", "please wait for it to be mined!\" valid_ids.remove(uuid) else: message", "= LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app,", "+= \"<tr><th>\" + \"Height\" + \"</th>\" html += \"<td>\" +", "# message = \"You have Insufficient Balance!\" # message_type =", "None for t in block.transactions: if t.hash() == txhash: tx", "def check_block_with_peer(peer, hhash): r = requests.post(get_peer_url(peer) + \"/checkblock\", data={\"headerhash\": hhash})", "as e: logger.error(\"Wallet: Could not Send Transaction. Try Again.\" +", "fetch_peer_list() new_peer_list = [] for peer in PEER_LIST: if greet_peer(peer):", "receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block: r = requests.post(get_peer_url(peer) +", "Could not Send Transaction. Invalid transaction\") return \"Try Again\" except", "to send money to himself\") response.status = 400 return \"Cannot", "MY_WALLET = Wallet() miner = Authority() def mining_thread_task(): while True:", "\"/checkblock\", data={\"headerhash\": hhash}) result = json.loads(r.text) if result: return True", "sig = data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting to Send Transaction\")", "Not Mining\") # Start server if LINE_PROFILING: from wsgi_lineprof.middleware import", "= {} data[\"send_this\"] = transaction.to_json() transaction.vin = {} data[\"sign_this\"] =", "# Start server if LINE_PROFILING: from wsgi_lineprof.middleware import LineProfilerMiddleware with", "__name__ == \"__main__\": try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode: Starting New Chain", "home(): log_ip(request, inspect.stack()[0][3]) message = \"\" message_type = \"info\" return", "dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" + \"Balance \" + str(check_balance(MY_WALLET.public_key)) + \"<br>Public", "= Wallet() miner = Authority() def mining_thread_task(): while True: if", "fork_height}) hash_list = json.loads(decompress(r.text.encode())) for hhash in hash_list: block =", "transaction_json = decompress(request_data) if transaction_json: try: tx = Transaction.from_json(transaction_json).object() #", "client_ip = get_ip(request) iplogger.info(f\"{client_ip} : Called function {fname}\") @app.post(\"/checkBalance\") def", "return False return True except Exception as e: logger.debug(\"Main: Could", "bounty: logger.debug(\"Insufficient Balance to make Transaction\") response.status = 400 return", "None: # publickey = wallet[1] # else: # message =", "\"</td></tr>\" html += \"<tr><th>\" + \"Prev Block Hash\" + \"</th>\"", "tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey): log_ip(request, inspect.stack()[0][3]) balance =", "wallet = get_wallet_from_db(receiver) # if wallet is not None: #", "SingleOutput, Transaction, TxIn, TxOut, genesis_block from authority import Authority from", "consts.NO_MINING logger.info(\"Mining: \" + str(not consts.NO_MINING)) return \"Mining Toggled, \"", "+ \"</td></tr>\" html += \"</table>\" return str(html) @app.get(\"/chains\") def visualize_chain():", "data={\"headerhash\": header_hash}) return Block.from_json(decompress(r.text)).object() def check_block_with_peer(peer, hhash): r = requests.post(get_peer_url(peer)", "10 if BLOCKCHAIN.active_chain.length > 10 else 0 headers = []", "return json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height", "consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public Key\") response.status = 400 return \"Invalid", "\"danger\" return template(\"index.html\", message=message, message_type=message_type, question=question) @app.get('/about') def about(): return", "# publickey = receiver # total_amount += bounty # receivers.append(publickey)", "Exception as e: # logger.error(e) # message = \"Some Error", "+= \"<tr><th>\" + \"Merkle Root\" + \"</th>\" html += \"<td>\"", "valid, not added to Mempool\") return False, \"Not Valid Transaction\"", "= decompress(request_data) if block_json: try: block = Block.from_json(block_json).object() # Check", "if check_balance(MY_WALLET.public_key) >= total_amount: # result = send_bounty(receivers, amounts) #", "= Block.from_json(get_block_from_db(blockhash)).object() except Exception as e: logger.debug(\"BLOCK/blockhash: \" + str(e))", "= wallet[1] # else: # message = \"Error with the", "dk: consts.NO_MINING = not consts.NO_MINING logger.info(\"Mining: \" + str(not consts.NO_MINING))", "data = request.json public_key = data[\"public_key\"] logger.debug(public_key) current_balance = check_balance(public_key)", "Process from threading import Thread, Timer from typing import Any,", "Public Key Length\") return False total_amount = sum(amounts) if current_balance", "= data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting to Send Transaction\") try:", "number = int(request.forms.get(\"number\")) # message = \"\" # message_type =", "Block Received\" # Kill Miner t = Timer(1, miner.stop_mining) t.start()", "return template(\"index.html\", message=message, message_type=message_type, question=question) @app.get('/about') def about(): return template(\"about.html\")", "in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if tx_out.address == pub_key: current_balance", "not Send Transaction. Invalid Transaction\") else: logger.info(\"Wallet: Transaction Sent, Wait", "Transaction. Invalid Transaction\") else: logger.info(\"Wallet: Transaction Sent, Wait for it", "{} current_amount = 0 total_amount = sum(amounts) i = 0", "block = Block.from_json(block_json).object() # Check if block already exists if", "need more \" + str(bounty - current_balance) elif sender_public_key ==", "timeout=(5, 1), ) if r.status_code == 400: logger.info(\"Wallet: Could not", "server if LINE_PROFILING: from wsgi_lineprof.middleware import LineProfilerMiddleware with open(\"lineprof\" +", "(current_amount - total_amount) if change > 0: vout[i + 1]", "peer_list except Exception as e: logger.error(\"Could not connect to DNS", "total_amount = sum(amounts) if current_balance < total_amount: logger.debug(\"Insuficient balance\") elif", "authority import Authority from utils.logger import logger, iplogger from utils.storage", "log_ip(request, inspect.stack()[0][3]) s = ( \"No. of Blocks: \" +", "ADD_ENTRY = False if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet, A new", "if headerhash: db_block = get_block_from_db(headerhash) if db_block: return compress(db_block) else:", "{fname}\") @app.post(\"/checkBalance\") def checkingbalance(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key", "+ \"Timestamp\" + \"</th>\" html += ( \"<td>\" + str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y", "Balance!\" # message_type = \"warning\" # return template(\"wallet.html\", message=message, message_type=message_type,", "Send Transaction\") try: r = requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) +", "already exists if get_block_from_db(dhash(block.header)): logger.info(\"Server: Received block exists, doing nothing\")", "invalid block received \" + str(e)) return \"Invalid Block Received\"", "e: response.status = 400 logger.error(\"Wallet: Could not Send Transaction. Try", "transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash): log_ip(request, inspect.stack()[0][3]) try: block", "have height {max_peer['blockheight']}\") sync(max_peer) except Exception as e: logger.error(\"Sync: Error:", "Exception as e: logger.debug(\"Server: Greet Error: \" + str(e)) pass", "return \"Invalid Hash\" @app.post(\"/getblock\") def getblock(): log_ip(request, inspect.stack()[0][3]) hhash =", "received is invalid, Cannot Sync\") break return # Periodically sync", "Wait for it to be Mined\") return True except Exception", "peers def sync_with_peers(): try: PEER_LIST = fetch_peer_list() new_peer_list = []", "int(request.forms.get(\"myheight\")) hash_list = [] for i in range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i]))", "get_block_from_db(headerhash): return json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request, inspect.stack()[0][3])", "BLOCKCHAIN.mempool.add(tx) # Broadcast block to other peers send_to_all_peers(\"/newtransaction\", request_data) else:", "requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\": fork_height}) hash_list = json.loads(decompress(r.text.encode())) for hhash", "Occured, Contact Admin.\" message_type = \"warning\" else: message = \"Invalid", "in valid_ids: logger.debug(\"Valid Answer, Rewarding \" + pubkey) message =", "message_type=message_type, question=question) with open('uuids.json', 'r') as file: uuid_json = file.read()", "log_ip(request, inspect.stack()[0][3]) try: peer = {} peer[\"port\"] = request.forms.get(\"port\") peer[\"ip\"]", "new_peer_list if PEER_LIST: max_peer = max(PEER_LIST, key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync:", "request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return json.dumps(True) return json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes():", "html += \"<td>\" + str(hdr.merkle_root) + \"</td></tr>\" html += \"<tr><th>\"", "e: logger.debug(\"Transaction/bhash/tx: \" + str(e)) return template(\"error.html\") return template(\"transaction.html\", tx=tx,", "consts.NO_MINING: logger.info(\"FullNode: Not Mining\") # Start server if LINE_PROFILING: from", "if r.status_code == 400: response.status = 400 logger.error(\"Wallet: Could not", "data: message = data[\"message\"] if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver", "# publickey = wallet[1] # else: # message = \"Error", "@app.post(\"/getblock\") def getblock(): log_ip(request, inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\") return cached_get_block(hhash)", "Insufficient Balance!\" # message_type = \"warning\" # return template(\"wallet.html\", message=message,", "> 200: hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for hdr in", "message_type = \"warning\" # else: # message = \"You have", "= check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey)", "str(datetime.fromtimestamp(hdr.timestamp).strftime(\"%d-%m-%Y %H:%M:%S\")) + \" (\" + str(hdr.timestamp) + \")</td></tr>\" )", "Make new chain/ orphan set for Block that is not", "logger.debug(\"Main: Could not greet peer\" + str(e)) return False def", "Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message) return tx def get_ip(request):", "import Any, Dict, List from datetime import datetime import hashlib", "in indexes] transactions = list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev)", "added except Exception as e: logger.error(\"Server: New Block: invalid block", "\"warning\" # return template(\"wallet.html\", message=message, message_type=message_type, pubkey=MY_WALLET.public_key) # except Exception", "BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for hdr in hdr_list: d = {}", "= BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:] for hdr in hdr_list: d =", "get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\") or request.environ.get(\"REMOTE_ADDR\") def log_ip(request, fname): client_ip =", "Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast block to other peers send_to_all_peers(\"/newtransaction\", request_data)", "= \"application/json\" return json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash: str) -> str:", "Toggled, \" + \"NOT MINING\" if consts.NO_MINING else \"MINING\" else:", "question = '''What is greater than God, more evil than", "hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if hashed == dk: consts.NO_MINING =", "= get_block_from_db(headerhash) if db_block: return compress(db_block) else: logger.error(\"ERROR CALLED GETBLOCK", "amounts: List[int]): current_balance = check_balance(MY_WALLET.public_key) for key in receiver_public_keys: if", "data[\"signature\"] transaction.add_sign(sig) logger.debug(transaction) logger.info(\"Wallet: Attempting to Send Transaction\") try: r", "range(peer_height, BLOCKCHAIN.active_chain.length): hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i])) return compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data: bytes) ->", "\"w\") as f: app = LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\",", "\"</table>\" return str(html) @app.get(\"/chains\") def visualize_chain(): log_ip(request, inspect.stack()[0][3]) data =", "not Send Transaction. Try Again.\" + str(e)) return \"Try Again\"", "inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\") if get_block_from_db(headerhash): return json.dumps(True) return json.dumps(False)", "= Bottle() BaseTemplate.defaults[\"get_url\"] = app.get_url LINE_PROFILING = False BLOCKCHAIN =", "invalid, Cannot Sync\") break return # Periodically sync with all", "log_ip(request, inspect.stack()[0][3]) # return template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\")", "\"<tr><th>\" + \"Transactions\" + \"</th>\" html += \"<td>\" + str(len(block.transactions))", "block = Block.from_json(get_block_from_db(blockhash)).object() tx = None for t in block.transactions:", "str(transaction.fees) # html += \"<tr><th>Transaction \" + str(i) + \"</th><td>\"", "transaction is sent, please wait for it to be mined!\"", "logger.debug(\"Valid Transaction received, Adding to Mempool\") BLOCKCHAIN.mempool.add(tx) # Broadcast block", "# else: # message = \"Error with the Receiver Port", "return \"Done\" @app.post(\"/transactionHistory\") def transaction_history(): log_ip(request, inspect.stack()[0][3]) data = request.json", "\" + str(e)) return template(\"error.html\") return template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\",", "hash_list = json.loads(decompress(r.text.encode())) for hhash in hash_list: block = receive_block_from_peer(max_peer,", "\"Balance \" + str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key: <br>\" + str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1])", "= hdr.timestamp d[\"data\"] = render_block_header(hdr) headers.append(d) data.append(headers) return template(\"chains.html\", data=data,", "= \"\" # message_type = \"info\" # try: # receivers", "\"<td>\" + str(len(block.transactions)) + \"</td></tr>\" # for i, transaction in", "if PEER_LIST: max_peer = max(PEER_LIST, key=lambda k: k[\"blockheight\"]) logger.debug(f\"Sync: Syncing", "pubkey=MY_WALLET.public_key) # else: # publickey = receiver # total_amount +=", "[] MY_WALLET = Wallet() miner = Authority() def mining_thread_task(): while", "k[\"blockheight\"]) logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)}, he seems to have height", "except Exception as e: logger.debug(\"Transaction/bhash/tx: \" + str(e)) return template(\"error.html\")", "data = [] start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length", "+ url, data=data, timeout=(5, 1)) except Exception as e: logger.debug(\"Server:", "= \"Invalid Unique ID!\" message_type = \"danger\" return template(\"index.html\", message=message,", "inspect.stack()[0][3]) hhash = request.forms.get(\"headerhash\") return cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock(): log_ip(request,", "BaseTemplate, Bottle, request, response, static_file, template, error import utils.constants as", "NON EXISTENT BLOCK\") return \"Invalid Hash\" @app.post(\"/getblock\") def getblock(): log_ip(request,", "2) def send_to_all_peers(url, data): def request_task(peers, url, data): for peer", "logger.info(\"Wallet: Transaction Sent, Wait for it to be Mined\") return", "# return template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") # def", "Block, Adding to Chain\") logger.debug(\"Server: Sending new block to peers\")", "to be Mined\") return True except Exception as e: logger.error(\"Wallet:", "orphan set for Block that is not added except Exception", "str(i) + \"</th><td>\" + str(s) + \"</td></tr>\" html += \"</table>\"", "-> int: current_balance = 0 for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():", "entry[\"ip\"] port = entry[\"port\"] if ip == peer[\"ip\"] and port", "block received \" + str(e)) return \"Invalid Block Received\" #", "# total_amount += bounty # receivers.append(publickey) # amounts.append(bounty) # if", "block_json: try: block = Block.from_json(block_json).object() # Check if block already", "message = \"Your transaction is sent, please wait for it", "\"Some Error Occured. Please try again later.\" # message_type =", "locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message) return tx def get_ip(request): return", "to myself\") else: transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority: Faucet", "miner = Authority() def mining_thread_task(): while True: if not miner.is_mining()", "Any], header_hash) -> Block: r = requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\":", "Key Length\") return False total_amount = sum(amounts) if current_balance <", "process_new_transaction(request.body.read()) if result: response.status = 200 else: response.status = 400", "return template(\"index.html\", message=message, message_type=message_type, question=question) with open('uuids.json', 'r') as file:", "try again.\" # message_type = \"danger\" # return template(\"wallet.html\", message=message,", "return \"http://\" + str(peer[\"ip\"]) + \":\" + str(peer[\"port\"]) def greet_peer(peer:", "logger.debug(f\"Sync: Syncing with {get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}\")", "himself\") response.status = 400 return \"Cannot send money to youself\"", "= {} data[\"sign_this\"] = transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction():", "request_data) else: logger.debug(\"The transation is not valid, not added to", "txhash: tx = t except Exception as e: logger.debug(\"Transaction/bhash/tx: \"", "+ \"</td></tr>\" html += \"<tr><th>\" + \"Timestamp\" + \"</th>\" html", "return json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction(): log_ip(request, inspect.stack()[0][3]) data = request.json", "peer[\"blockheight\"] = request.forms.get(\"blockheight\") ADD_ENTRY = True for entry in PEER_LIST:", "async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT)", "root=\"static\") @app.get(\"/favicon.ico\") def get_favicon(): log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\", root=\"static\") @app.get(\"/info\")", "return static_file(filename, root=\"static\") @app.get(\"/favicon.ico\") def get_favicon(): log_ip(request, inspect.stack()[0][3]) return static_file(\"favicon.ico\",", "as e: logger.debug(\"Server: Requests: Error while sending data in process\"", "@app.post(\"/newblock\") def received_new_block(): log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data:", "uuid = request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\") amounts = [300] if", "html = \"<table>\" html += \"<tr><th>\" + \"Height\" + \"</th>\"", "logger.debug(\"BLOCK/blockhash: \" + str(e)) return template(\"error.html\") return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\",", "try: block = Block.from_json(block_json).object() # Check if block already exists", "def received_new_block(): log_ip(request, inspect.stack()[0][3]) return process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data: bytes)", "+ \"/greetpeer\", data=data) data = json.loads(r.text) # Update the peer", "send_to_all_peers(\"/newtransaction\", request_data) else: logger.debug(\"The transation is not valid, not added", "def log_ip(request, fname): client_ip = get_ip(request) iplogger.info(f\"{client_ip} : Called function", "== peer[\"ip\"] and port == peer[\"port\"]: ADD_ENTRY = False if", "template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey): log_ip(request, inspect.stack()[0][3]) balance", "data={\"port\": consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text) return peer_list except Exception as", "template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) # @app.post(\"/wallet\") # def wallet_post(): #", "@app.post(\"/mining\") def mining(): log_ip(request, inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\") hashed =", "current_balance < bounty: logger.debug(\"Insufficient Balance to make Transaction\") response.status =", "to youself\" else: transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message) data", "cached_get_block(hhash) @app.post(\"/checkblock\") def checkblock(): log_ip(request, inspect.stack()[0][3]) headerhash = request.forms.get(\"headerhash\") if", "ip = entry[\"ip\"] port = entry[\"port\"] if ip == peer[\"ip\"]", "# for i, transaction in enumerate(block.transactions): # s = \"coinbase:", "log_ip(request, inspect.stack()[0][3]) balance = check_balance(pubkey) tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey) return template(\"account.html\",", "message_type = \"info\" return template(\"index.html\", message=message, message_type=message_type, question=question) with open('uuids.json',", "current_balance += int(tx_out.amount) return int(current_balance) def send_bounty(receiver_public_keys: List[str], amounts: List[int]):", "logger.error(\"Sync: Error: \" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start() def", "= receiver # total_amount += bounty # receivers.append(publickey) # amounts.append(bounty)", "return template(\"account.html\", tx_hist=tx_hist, balance=balance, pubkey=pubkey) @app.post(\"/mining\") def mining(): log_ip(request, inspect.stack()[0][3])", "\"NOT MINING\" if consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505)", "sending data in process\" + str(peer)) Process(target=request_task, args=(PEER_LIST, url, data),", "\"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()), timeout=(5, 1), ) if", "else: # publickey = receiver # total_amount += bounty #", "of Blocks: \" + str(BLOCKCHAIN.active_chain.length) + \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) +", "\"Block already Received Before\" if BLOCKCHAIN.add_block(block): logger.info(\"Server: Received a New", "= json.loads(r.text) return peer_list except Exception as e: logger.error(\"Could not", "else: # message = \"Some Error Occured, Contact Admin.\" #", "transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet:", "list(BLOCKCHAIN.mempool) return template(\"explorer.html\", blocks=blocks, transactions=transactions, prev=prev) @app.route(\"/block/<blockhash>\", name=\"transaction\") def block(blockhash):", "Add transaction to Mempool if tx not in BLOCKCHAIN.mempool: if", "log_ip(request, inspect.stack()[0][3]) # number = int(request.forms.get(\"number\")) # message = \"\"", "+ \"<br>\" + \"Balance \" + str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key:", "\"Some Error Occured, Contact Admin.\" # message_type = \"warning\" #", "except Exception as e: response.status = 400 logger.error(\"Wallet: Could not", "# log_ip(request, inspect.stack()[0][3]) # return template(\"wallet.html\", message=\"\", message_type=\"\", pubkey=MY_WALLET.public_key) #", "= \"warning\" else: message = \"Invalid Unique ID!\" message_type =", "Dict[str, Any]) -> str: return \"http://\" + str(peer[\"ip\"]) + \":\"", "str(peer[\"ip\"]) + \":\" + str(peer[\"port\"]) def greet_peer(peer: Dict[str, Any]) ->", "error import utils.constants as consts from core import Block, BlockChain,", "to be Mined\") return \"Done\" @app.post(\"/transactionHistory\") def transaction_history(): log_ip(request, inspect.stack()[0][3])", "Transaction.from_json(transaction_json).object() # Add transaction to Mempool if tx not in", "Mined\") return True except Exception as e: logger.error(\"Wallet: Could not", "# amounts = [] # total_amount = 0 # for", "def mining_thread_task(): while True: if not miner.is_mining() and not consts.NO_MINING:", "\"/getblockhashes\", data={\"myheight\": fork_height}) hash_list = json.loads(decompress(r.text.encode())) for hhash in hash_list:", "requests import waitress from bottle import BaseTemplate, Bottle, request, response,", "connect to DNS Seed\") return [] def get_peer_url(peer: Dict[str, Any])", "= requests.post(get_peer_url(max_peer) + \"/getblockhashes\", data={\"myheight\": fork_height}) hash_list = json.loads(decompress(r.text.encode())) for", "process\" + str(peer)) Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start() def start_mining_thread():", "for i, address in enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i], address=address) change", "address=address) change = (current_amount - total_amount) if change > 0:", "def wallet_post(): # log_ip(request, inspect.stack()[0][3]) # number = int(request.forms.get(\"number\")) #", "tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message) return tx", "in the peer list with the new data received from", "= request.forms.get(\"port\") peer[\"ip\"] = request.remote_addr peer[\"time\"] = time.time() peer[\"version\"] =", "\" + str(e)) pass data = {\"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length}", "return template(\"error.html\") return template(\"block.html\", block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash, txhash):", "mined!\" # else: # message = \"Some Error Occured, Contact", "import Thread, Timer from typing import Any, Dict, List from", "not connect to DNS Seed\") return [] def get_peer_url(peer: Dict[str,", "vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig=\"\") i += 1 for i,", "logger.debug(\"Server: Requests: Error while sending data in process\" + str(peer))", "+ \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" + \"Balance \" +", "import logger, iplogger from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db from", "amounts, MY_WALLET.public_key, message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to Send", "Try Again.\" + str(e)) return False def create_transaction(receiver_public_keys: List[str], amounts:", "= request.forms.get(\"blockheight\") ADD_ENTRY = True for entry in PEER_LIST: ip", "+ \"/getblockhashes\", data={\"myheight\": fork_height}) hash_list = json.loads(decompress(r.text.encode())) for hhash in", "logger.info(\"Server: Received a New Valid Block, Adding to Chain\") logger.debug(\"Server:", "amounts) # if result: # message = \"Your transaction is", "Answer, Rewarding \" + pubkey) message = \"Well Done!\" if", "= utxo_list[0] if current_amount >= total_amount: break if tx_out.address ==", "return template(\"error.html\") return template(\"transaction.html\", tx=tx, block=block) @app.route(\"/address/<pubkey:re:.+>\", name=\"account\") def account(pubkey):", "# except Exception as e: # logger.error(e) # message =", "s = ( \"No. of Blocks: \" + str(BLOCKCHAIN.active_chain.length) +", "not consts.NO_MINING logger.info(\"Mining: \" + str(not consts.NO_MINING)) return \"Mining Toggled,", "\"<tr><th>\" + \"Timestamp\" + \"</th>\" html += ( \"<td>\" +", "= [] MY_WALLET = Wallet() miner = Authority() def mining_thread_task():", "\"warning\" # else: # message = \"You have Insufficient Balance!\"", "message = \"\" message_type = \"info\" return template(\"index.html\", message=message, message_type=message_type,", "in PEER_LIST: if greet_peer(peer): new_peer_list.append(peer) PEER_LIST = new_peer_list if PEER_LIST:", "list with the new data received from the peer. if", "if consts.NO_MINING else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505) def error_handle(url=\"url\",", "Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start() def start_mining_thread(): time.sleep(5) Thread(target=mining_thread_task, name=\"Miner\",", "receiver_public_keys: logger.debug(\"Cannot send to myself\") else: transaction = create_transaction(receiver_public_keys, amounts,", "data[\"message\"] if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH: logger.debug(\"Invalid Receiver Public Key\") response.status", "miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def send_to_all_peers(url, data): def", "\" + str(BLOCKCHAIN.active_chain.length) + \"<br>\" + dhash(BLOCKCHAIN.active_chain.header_list[-1]) + \"<br>\" +", "+= \"<tr><th>\" + \"Prev Block Hash\" + \"</th>\" html +=", "as e: logger.debug(\"Main: Could not greet peer\" + str(e)) return", "if greet_peer(peer): new_peer_list.append(peer) PEER_LIST = new_peer_list if PEER_LIST: max_peer =", "data[\"sign_this\"] = transaction.to_json() return json.dumps(data) @app.post(\"/sendTransaction\") def send_transaction(): log_ip(request, inspect.stack()[0][3])", "peer in peers: try: requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1))", "@lru_cache(maxsize=16) def process_new_transaction(request_data: bytes) -> str: global BLOCKCHAIN transaction_json =", "BLOCKCHAIN.build_from_header_list(header_list) # Sync with all my peers sync_with_peers() # Start", "# try: # receivers = [] # amounts = []", "with all the peers def sync_with_peers(): try: PEER_LIST = fetch_peer_list()", "log_ip(request, inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\")) hash_list = [] for i", "Block Hash\" + \"</th>\" html += \"<td>\" + str(hdr.prev_block_hash) +", "Transaction. Try Again.\" + str(e)) return \"Try Again\" else: logger.info(\"Wallet:", "= transaction.to_json() transaction.vin = {} data[\"sign_this\"] = transaction.to_json() return json.dumps(data)", "response.status = 400 return message question = '''What is greater", "# @app.post(\"/wallet\") # def wallet_post(): # log_ip(request, inspect.stack()[0][3]) # number", "@app.get('/about') def about(): return template(\"about.html\") # @app.get(\"/wallet\") # def wallet():", "as consts from core import Block, BlockChain, SingleOutput, Transaction, TxIn,", "message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to Send Transaction\") try:", "{\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} # Send a POST", "data = request.json public_key = data[\"public_key\"] tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key) return", "CALLED GETBLOCK FOR NON EXISTENT BLOCK\") return \"Invalid Hash\" @app.post(\"/getblock\")", "return \"Try Again\" else: logger.info(\"Wallet: Transaction Sent, Wait for it", "is greater than God, more evil than the devil, the", "def cached_get_block(headerhash: str) -> str: if headerhash: db_block = get_block_from_db(headerhash)", "# def wallet(): # log_ip(request, inspect.stack()[0][3]) # return template(\"wallet.html\", message=\"\",", "create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET) logger.info(\"Wallet: Attempting to", "sender_public_key, message=\"\") -> Transaction: vout = {} vin = {}", "= b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk = hashlib.pbkdf2_hmac(\"sha512\", password.encode(\"utf-8\"), b\"<PASSWORD>\", 200000) if hashed", "bounty # receivers.append(publickey) # amounts.append(bounty) # if check_balance(MY_WALLET.public_key) >= total_amount:", "return True, \"Done\" # Transactions for all active chains @app.post(\"/newtransaction\")", "for it to be Mined\") return \"Done\" @app.post(\"/transactionHistory\") def transaction_history():", "for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out = utxo_list[0] if current_amount", "from the peer. if data.get(\"blockheight\", None): peer.update(data) else: logger.debug(\"Main: Peer", "\"info\" uuid = request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\") amounts = [300]", "for hhash in hash_list: block = receive_block_from_peer(max_peer, hhash) if not", "bytes) -> str: global BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json:", "for it to be Mined\") return True except Exception as", "is not None: # publickey = wallet[1] # else: #", "typing import Any, Dict, List from datetime import datetime import", "= \"info\" return template(\"index.html\", message=message, message_type=message_type, question=question) with open('uuids.json', 'r')", "send_bounty(receivers, amounts) # if result: # message = \"Your transaction", "return False, \"Not Valid Transaction\" return True, \"Done\" # Transactions", "threading import Thread, Timer from typing import Any, Dict, List", "None): peer.update(data) else: logger.debug(\"Main: Peer data does not have Block", "in PEER_LIST: ip = entry[\"ip\"] port = entry[\"port\"] if ip", "str: global BLOCKCHAIN block_json = decompress(request_data) if block_json: try: block", "+ str(consts.MINER_SERVER_PORT) + \".log\", \"w\") as f: app = LineProfilerMiddleware(app,", "from threading import Thread, Timer from typing import Any, Dict,", "timestamp=int(time.time()), vin=vin, vout=vout, message=message) return tx def get_ip(request): return request.environ.get(\"HTTP_X_FORWARDED_FOR\")", "str(hdr.prev_block_hash) + \"</td></tr>\" html += \"<tr><th>\" + \"Merkle Root\" +", "logger.error(\"Server: New Transaction: Invalid tx received: \" + str(e)) return", "else: transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message=\"Authority: Faucet Money\") transaction.sign(MY_WALLET)", "waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) else: waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT) except", "peer. if data.get(\"blockheight\", None): peer.update(data) else: logger.debug(\"Main: Peer data does", "the devil, the poor have it, the rich need it,", "return process_new_block(request.body.read()) @lru_cache(maxsize=16) def process_new_transaction(request_data: bytes) -> str: global BLOCKCHAIN", "consts.NO_MINING = not consts.NO_MINING logger.info(\"Mining: \" + str(not consts.NO_MINING)) return", "requests.post(consts.SEED_SERVER_URL, data={\"port\": consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text) return peer_list except Exception", "+ \"</th><td>\" + str(s) + \"</td></tr>\" html += \"</table>\" return", "= int(request.query.prev or 0) if prev < 0: prev =", "log_ip(request, inspect.stack()[0][3]) password = request.body.read().decode(\"utf-8\") hashed = b\"\\x11`\\x1e\\xdd\\xd1\\xb6\\x80\\x0f\\xd4\\xb0t\\x90\\x9b\\xd3]\\xa0\\xcc\\x1d\\x04$\\x8b\\xb1\\x19J\\xaa!T5-\\x9eJ\\xfcI5\\xc0\\xbb\\xf5\\xb1\\x9d\\xba\\xbef@\\xa1)\\xcf\\x9b]c(R\\x91\\x0e\\x9dMM\\xb6\\x94\\xa9\\xe2\\x94il\\x15\" dk =", "e: logger.error(\"Wallet: Could not Send Transaction. Try Again.\" + str(e))", "response.content_type = \"application/json\" return json.dumps(data) @lru_cache(maxsize=128) def cached_get_block(headerhash: str) ->", "if ADD_ENTRY: PEER_LIST.append(peer) logger.debug(\"Server: Greet, A new peer joined, Adding", "block.transactions: if t.hash() == txhash: tx = t except Exception", "not Send Transaction. Try Again.\" + str(e)) return False def", "header_hash) -> Block: r = requests.post(get_peer_url(peer) + \"/getblock\", data={\"headerhash\": header_hash})", "daemon=True).start() if consts.NO_MINING: logger.info(\"FullNode: Not Mining\") # Start server if", "\"<br>\" + \"Balance \" + str(check_balance(MY_WALLET.public_key)) + \"<br>Public Key: <br>\"", "EXISTENT BLOCK\") return \"Invalid Hash\" @app.post(\"/getblock\") def getblock(): log_ip(request, inspect.stack()[0][3])", "Received block exists, doing nothing\") return \"Block already Received Before\"", "logger.error(\"Server: New Block: invalid block received \" + str(e)) return", "str(hdr.merkle_root) + \"</td></tr>\" html += \"<tr><th>\" + \"Timestamp\" + \"</th>\"", "return template(\"error.html\") if __name__ == \"__main__\": try: if consts.NEW_BLOCKCHAIN: logger.info(\"FullNode:", "= entry[\"port\"] if ip == peer[\"ip\"] and port == peer[\"port\"]:", "import waitress from bottle import BaseTemplate, Bottle, request, response, static_file,", "Invalid tx received: \" + str(e)) return False, \"Not Valid", "send_bounty([pubkey], amounts) if result: message = \"Your reward is being", "data = request.json transaction = Transaction.from_json(data[\"transaction\"]).object() sig = data[\"signature\"] transaction.add_sign(sig)", "inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\")) hash_list = [] for i in", "f: app = LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app, host=\"0.0.0.0\", threads=16, port=consts.MINER_SERVER_PORT)", "for i in range(0, number): # receiver = str(request.forms.get(\"port\" +", "file.read() valid_ids = set(json.loads(uuid_json)) @app.post(\"/\") def puzzle(): log_ip(request, inspect.stack()[0][3]) message", "receivers = [] # amounts = [] # total_amount =", "400 return message question = '''What is greater than God,", "+= \"<td>\" + str(hdr.height) + \"</td></tr>\" html += \"<tr><th>\" +", "data): for peer in peers: try: requests.post(get_peer_url(peer) + url, data=data,", "from typing import Any, Dict, List from datetime import datetime", "from Genesis\") BLOCKCHAIN.add_block(genesis_block) else: # Restore Blockchain logger.info(\"FullNode: Restoring Existing", "int: current_balance = 0 for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items(): tx_out", "inspect.stack()[0][3]) return str(check_balance(MY_WALLET.public_key)) @app.route(\"/static/<filename:path>\", name=\"static\") def serve_static(filename): log_ip(request, inspect.stack()[0][3]) return", "get_peer_url(peer) data = {\"port\": consts.MINER_SERVER_PORT, \"version\": consts.MINER_VERSION, \"blockheight\": BLOCKCHAIN.active_chain.length} #", "@app.post(\"/newtransaction\") def received_new_transaction(): log_ip(request, inspect.stack()[0][3]) result, message = process_new_transaction(request.body.read()) if", "message question = '''What is greater than God, more evil", "# def wallet_post(): # log_ip(request, inspect.stack()[0][3]) # number = int(request.forms.get(\"number\"))", "MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2) def send_to_all_peers(url, data): def request_task(peers, url,", "\".log\", \"w\") as f: app = LineProfilerMiddleware(app, stream=f, async_stream=True) waitress.serve(app,", "sync(max_peer) except Exception as e: logger.error(\"Sync: Error: \" + str(e))", "bounty = int(request.forms.get(\"amount\" + str(i))) # publickey = \"\" #", "BLOCKCHAIN.active_chain.length > 10 else 0 headers = [] hdr_list =", "money to himself\") response.status = 400 return \"Cannot send money", "Restoring Existing Chain\") header_list = read_header_list_from_db() BLOCKCHAIN.build_from_header_list(header_list) # Sync with", "data in the peer list with the new data received", "result: # message = \"Your transaction is sent, please wait", "compress(json.dumps(hash_list)).decode() @lru_cache(maxsize=16) def process_new_block(request_data: bytes) -> str: global BLOCKCHAIN block_json", "with the Receiver Port ID, try again.\" # message_type =", "= [] # total_amount = 0 # for i in", "= request.forms.get(\"uuid\") pubkey = request.forms.get(\"pubkey\") amounts = [300] if uuid", "1 for i, address in enumerate(receiver_public_keys): vout[i] = TxOut(amount=amounts[i], address=address)", "= Block.from_json(block_json).object() # Check if block already exists if get_block_from_db(dhash(block.header)):", "data[\"receiver_public_key\"] sender_public_key = data[\"sender_public_key\"] message = \"No Message\" if \"message\"", "return json.dumps(False) @app.post(\"/getblockhashes\") def send_block_hashes(): log_ip(request, inspect.stack()[0][3]) peer_height = int(request.forms.get(\"myheight\"))", "# else: # publickey = receiver # total_amount += bounty", "block=block) @app.route(\"/transaction/<blockhash>/<txhash>\", name=\"transaction\") def transaction(blockhash, txhash): log_ip(request, inspect.stack()[0][3]) try: block", "daemon=True).start() def fetch_peer_list() -> List[Dict[str, Any]]: try: r = requests.post(consts.SEED_SERVER_URL,", "consts.MINER_SERVER_PORT}) peer_list = json.loads(r.text) return peer_list except Exception as e:", "Key\" current_balance = check_balance(sender_public_key) if current_balance < bounty: logger.debug(\"Insufficient Balance", "0 hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list)) indexes = [i for i in", "return dhash(BLOCKCHAIN.active_chain.header_list[height]) def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer)", "[] hdr_list = BLOCKCHAIN.active_chain.header_list if len(hdr_list) > 200: hdr_list =", "False return True except Exception as e: logger.debug(\"Main: Could not", "int(current_balance) def send_bounty(receiver_public_keys: List[str], amounts: List[int]): current_balance = check_balance(MY_WALLET.public_key) for", "= 0 # for i in range(0, number): # receiver", "\"Prev Block Hash\" + \"</th>\" html += \"<td>\" + str(hdr.prev_block_hash)", "transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message) data = {} data[\"send_this\"]", "@app.post(\"/checkBalance\") def checkingbalance(): log_ip(request, inspect.stack()[0][3]) data = request.json public_key =", "str(hdr.height) + \"</td></tr>\" html += \"<tr><th>\" + \"Block Hash\" +", "else \"MINING\" @app.route(\"/<url:re:.+>\") @error(403) @error(404) @error(505) def error_handle(url=\"url\", error=\"404\"): log_ip(request,", "not miner.is_mining() and not consts.NO_MINING: miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET) time.sleep(consts.MINING_INTERVAL_THRESHOLD //", "static_file, template, error import utils.constants as consts from core import", "== 400: logger.info(\"Wallet: Could not Send Transaction. Invalid Transaction\") else:", "\"No Message\" if \"message\" in data: message = data[\"message\"] if", "logger.info(\"Wallet: Could not Send Transaction. Invalid Transaction\") else: logger.info(\"Wallet: Transaction", "as e: logger.error(\"Sync: Error: \" + str(e)) Timer(consts.MINING_INTERVAL_THRESHOLD * 2,", "Authority from utils.logger import logger, iplogger from utils.storage import get_block_from_db,", "\"Cannot send money to youself\" else: transaction = create_transaction([receiver_public_key], [bounty],", "New Chain from Genesis\") BLOCKCHAIN.add_block(genesis_block) else: # Restore Blockchain logger.info(\"FullNode:", "to make Transaction\") response.status = 400 return \"Insufficient Balance to", "try: r = requests.post( \"http://0.0.0.0:\" + str(consts.MINER_SERVER_PORT) + \"/newtransaction\", data=compress(transaction.to_json()),", "amounts = [300] if uuid in valid_ids: logger.debug(\"Valid Answer, Rewarding", "def explorer(): log_ip(request, inspect.stack()[0][3]) prev = int(request.query.prev or 0) if", "data): def request_task(peers, url, data): for peer in peers: try:", "Already received\" except Exception as e: logger.error(\"Server: New Transaction: Invalid" ]
[ "list): for x in X: if 'tensor' not in str(type(x)).lower():", "= False def get_explainer(self, method, T, X, **kwargs): if not", "3), 'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion': (Occlusion, 6),", "Results are not reliable!') constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG = 0", "(issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG == 0:", ":return: \"\"\" g = tf.compat.v1.get_default_graph() for op in g.get_operations(): if", "constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation (%s). ' 'This might lead to", "Keras and needs to be passed in feed_dict. :return: \"\"\"", "attribution method that requires ' 'gradient override but the original", "version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG", "def get_explainer(self, method, T, X, **kwargs): if not self.context_on: raise", "in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation (%s). ' 'This might lead", "0 and not op.name.startswith('gradients'): if op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported", "__init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method = None self.batch_size = None self.session", "be in %s' % list(attribution_methods.keys())) if isinstance(X, list): for x", "method, T, X, **kwargs): if not self.context_on: raise RuntimeError('Explain can", "Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods = OrderedDict({", "cover all cases where explanation methods would fail, and must", "import DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils import original_grad from deepexplain.tf.v1_x.methods import", "str(type(x)).lower(): raise RuntimeError('If a list, X must contain only Tensorflow", "be a Tensorflow Tensor object or a list of them')", "(GradientXInput, 2), 'intgrad': (IntegratedGradients, 3), 'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale,", "attribution_methods[self.method] else: raise RuntimeError('Method must be in %s' % list(attribution_methods.keys()))", "self.method in attribution_methods: method_class, method_flag = attribution_methods[self.method] else: raise RuntimeError('Method", "deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils import original_grad from deepexplain.tf.v1_x.methods", "in str(type(X)).lower(): raise RuntimeError('X must be a Tensorflow Tensor object", "None else graph self.graph_context = self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder", "' 'This might lead to unexpected or wrong results.' %", "op in g.get_operations(): if len(op.inputs) > 0 and not op.name.startswith('gradients'):", "issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain detected you", "in attribution_methods: method_class, method_flag = attribution_methods[self.method] else: raise RuntimeError('Method must", "use an attribution method that requires ' 'gradient override but", "not retrieve a session. Use DeepExplain(session=your_session).') def __enter__(self): # Override", "This is used by Keras and needs to be passed", "if constants._ENABLED_METHOD_CLASS is not None \\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return", "context.') # global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method = method if self.method", "must be a Tensorflow Tensor object') # logging.info('DeepExplain: running \"%s\"", "list(attribution_methods.keys())) if isinstance(X, list): for x in X: if 'tensor'", "exists in the graph. This is used by Keras and", "detected you are trying to use an attribution method that", "passed in feed_dict. :return: \"\"\" g = tf.compat.v1.get_default_graph() for op", "deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils", "self.method = None self.batch_size = None self.session = session self.graph", "' '(re)create your graph within the DeepExlain context. Results are", "def __exit__(self, type, value, traceback): self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type, value,", "DeepExplain(session=your_session).') def __enter__(self): # Override gradient of all ops created", "{tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG =", "_GRAD_OVERRIDE_CHECKFLAG self.method = method if self.method in attribution_methods: method_class, method_flag", "issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return original_grad(op, grad) class", "self.override_context.__enter__() self.context_on = True return self def __exit__(self, type, value,", "T, X, **kwargs): if not self.context_on: raise RuntimeError('Explain can be", "Tensorflow Tensor object or a list of them') if 'tensor'", "'keras_learning_phase' exists in the graph. This is used by Keras", "Occlusion, ShapleySampling attribution_methods = OrderedDict({ 'zero': (DummyZero, 0), 'saliency': (Saliency,", "Tensorflow Tensor object') # logging.info('DeepExplain: running \"%s\" explanation method (%d)'", "import warnings, logging from deepexplain.tf.v1_x import constants from deepexplain.tf.v1_x.baseClasses import", "0 self.keras_phase_placeholder = None return method def explain(self, method, T,", "method_flag = attribution_methods[self.method] else: raise RuntimeError('Method must be in %s'", "feed_dict. :return: \"\"\" g = tf.compat.v1.get_default_graph() for op in g.get_operations():", "must contain only Tensorflow Tensor objects') else: if 'tensor' not", "0), 'saliency': (Saliency, 1), 'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients, 3),", "in the graph. This is used by Keras and needs", "from __future__ import absolute_import from __future__ import division from __future__", "the graph. This is used by Keras and needs to", "Tensorflow Tensor objects') else: if 'tensor' not in str(type(X)).lower(): raise", "self.context_on = True return self def __exit__(self, type, value, traceback):", "in g.get_operations(): if len(op.inputs) > 0 and not op.name.startswith('gradients'): if", "are not reliable!') constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder", "from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP from", "def deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1 if", "return original_grad(op, grad) class DeepExplain(object): def __init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method", "used instead. You might have forgot to ' '(re)create your", "and needs to be passed in feed_dict. :return: \"\"\" g", "OrderedDict import warnings, logging from deepexplain.tf.v1_x import constants from deepexplain.tf.v1_x.baseClasses", "**kwargs): explainer = self.get_explainer(method, T, X, **kwargs) return explainer.run(xs, ys,", "in context self.graph_context.__enter__() self.override_context.__enter__() self.context_on = True return self def", "__future__ import division from __future__ import print_function import tensorflow as", "might lead to unexpected or wrong results.' % op.type) elif", "X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP))", "in str(type(T)).lower(): raise RuntimeError('T must be a Tensorflow Tensor object')", "and must be improved in the future. Also, check if", "1), 'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients, 3), 'elrp': (EpsilonLRP, 4),", "not op.name.startswith('gradients'): if op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation (%s).", "all ops created in context self.graph_context.__enter__() self.override_context.__enter__() self.context_on = True", "your graph within the DeepExlain context. Results are not reliable!')", "explain(self, method, T, X, xs, ys=None, batch_size=None, **kwargs): explainer =", "isinstance(X, list): for x in X: if 'tensor' not in", "DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils import original_grad from deepexplain.tf.v1_x.methods import DummyZero,", "or wrong results.' % op.type) elif 'keras_learning_phase' in op.name: self.keras_phase_placeholder", "}) print(f'Using tf version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad):", "deepexplain.tf.v1_x.utils import original_grad from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients,", "import OrderedDict import warnings, logging from deepexplain.tf.v1_x import constants from", "import original_grad from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP,", "= self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None self.context_on =", "'This might lead to unexpected or wrong results.' % op.type)", "of all ops created in context self.graph_context.__enter__() self.override_context.__enter__() self.context_on =", "a list, X must contain only Tensorflow Tensor objects') else:", "constants._ENABLED_METHOD_CLASS = method_class method = constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs)", "context. Results are not reliable!') constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG =", "the placeholder named 'keras_learning_phase' exists in the graph. This is", "be improved in the future. Also, check if the placeholder", "value, traceback): self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type, value, traceback) self.context_on =", "False def get_explainer(self, method, T, X, **kwargs): if not self.context_on:", "for x in X: if 'tensor' not in str(type(x)).lower(): raise", "within the DeepExlain context. Results are not reliable!') constants._ENABLED_METHOD_CLASS =", "= 0 self.keras_phase_placeholder = None return method def explain(self, method,", "wrong results.' % op.type) elif 'keras_learning_phase' in op.name: self.keras_phase_placeholder =", "ops created in context self.graph_context.__enter__() self.override_context.__enter__() self.context_on = True return", "in feed_dict. :return: \"\"\" g = tf.compat.v1.get_default_graph() for op in", "original_grad from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale,", "from deepexplain.tf.v1_x import constants from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from deepexplain.tf.v1_x.methods", "original gradient was used instead. You might have forgot to", "in the list of unsupported activation functions. This does not", "X, **kwargs): if not self.context_on: raise RuntimeError('Explain can be called", "the original gradient was used instead. You might have forgot", "override but the original gradient was used instead. You might", "<filename>deepexplain/tf/v1_x/main.py from __future__ import absolute_import from __future__ import division from", "self.session = session self.graph = session.graph if graph is None", "constants._ENABLED_METHOD_CLASS is not None \\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op,", "deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling", "warnings.warn('Detected unsupported activation (%s). ' 'This might lead to unexpected", "= 1 if constants._ENABLED_METHOD_CLASS is not None \\ and issubclass(constants._ENABLED_METHOD_CLASS,", "DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain", "self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None self.context_on = False if", "type, value, traceback): self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type, value, traceback) self.context_on", "g = tf.compat.v1.get_default_graph() for op in g.get_operations(): if len(op.inputs) >", "Also, check if the placeholder named 'keras_learning_phase' exists in the", "= None return method def explain(self, method, T, X, xs,", "'tensor' not in str(type(x)).lower(): raise RuntimeError('If a list, X must", "__enter__(self): # Override gradient of all ops created in context", "(self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS = method_class method", "for op in g.get_operations(): if len(op.inputs) > 0 and not", "only within a DeepExplain context.') # global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method", "activation functions. This does not cover all cases where explanation", "0: warnings.warn('DeepExplain detected you are trying to use an attribution", "to unexpected or wrong results.' % op.type) elif 'keras_learning_phase' in", "if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG ==", "str(type(X)).lower(): raise RuntimeError('X must be a Tensorflow Tensor object or", "self def __exit__(self, type, value, traceback): self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type,", "that requires ' 'gradient override but the original gradient was", "is used by Keras and needs to be passed in", "DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods =", "objects') else: if 'tensor' not in str(type(X)).lower(): raise RuntimeError('X must", "X: if 'tensor' not in str(type(x)).lower(): raise RuntimeError('If a list,", "results.' % op.type) elif 'keras_learning_phase' in op.name: self.keras_phase_placeholder = op.outputs[0]", "used by Keras and needs to be passed in feed_dict.", "traceback): self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type, value, traceback) self.context_on = False", "explanation method (%d)' % (self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0", "= None constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder = None return method", "X must contain only Tensorflow Tensor objects') else: if 'tensor'", "import GradientBasedMethod from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils import", "DeepExlain context. Results are not reliable!') constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG", "unexpected or wrong results.' % op.type) elif 'keras_learning_phase' in op.name:", "is not None \\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad)", "EpsilonLRP from deepexplain.tf.v1_x.utils import original_grad from deepexplain.tf.v1_x.methods import DummyZero, Saliency,", "import ops from collections import OrderedDict import warnings, logging from", "self.session is None: raise RuntimeError('DeepExplain: could not retrieve a session.", "'shapley_sampling': (ShapleySampling, 7) }) print(f'Using tf version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\")", "return method def explain(self, method, T, X, xs, ys=None, batch_size=None,", "collections import OrderedDict import warnings, logging from deepexplain.tf.v1_x import constants", "a Tensorflow Tensor object or a list of them') if", "if any op is in the list of unsupported activation", "def get_override_map(): return dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS) def", "= session self.graph = session.graph if graph is None else", "RuntimeError('Explain can be called only within a DeepExplain context.') #", "= self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None self.context_on = False if self.session", "must be improved in the future. Also, check if the", "would fail, and must be improved in the future. Also,", "value, traceback) self.context_on = False def get_explainer(self, method, T, X,", "global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method = method if self.method in attribution_methods:", "= OrderedDict({ 'zero': (DummyZero, 0), 'saliency': (Saliency, 1), 'grad*input': (GradientXInput,", "if self.method in attribution_methods: method_class, method_flag = attribution_methods[self.method] else: raise", "functions. This does not cover all cases where explanation methods", "DeepExplain(object): def __init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method = None self.batch_size =", "None return method def explain(self, method, T, X, xs, ys=None,", "tf version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS,", "not cover all cases where explanation methods would fail, and", "method (%d)' % (self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS", "This does not cover all cases where explanation methods would", "session. Use DeepExplain(session=your_session).') def __enter__(self): # Override gradient of all", "return explainer.run(xs, ys, batch_size) @staticmethod def get_override_map(): return dict((a, 'DeepExplainGrad')", "(EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion': (Occlusion, 6), 'shapley_sampling': (ShapleySampling,", "# constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1 if constants._ENABLED_METHOD_CLASS is not", "dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\" Heuristically", "= self.get_explainer(method, T, X, **kwargs) return explainer.run(xs, ys, batch_size) @staticmethod", "0 constants._ENABLED_METHOD_CLASS = method_class method = constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder,", "not in str(type(X)).lower(): raise RuntimeError('X must be a Tensorflow Tensor", "GradientBasedMethod from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils import original_grad", "self.method = method if self.method in attribution_methods: method_class, method_flag =", "method if self.method in attribution_methods: method_class, method_flag = attribution_methods[self.method] else:", "'tensor' not in str(type(X)).lower(): raise RuntimeError('X must be a Tensorflow", "method def explain(self, method, T, X, xs, ys=None, batch_size=None, **kwargs):", "4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion': (Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7)", "return dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\"", "contain only Tensorflow Tensor objects') else: if 'tensor' not in", "batch_size) @staticmethod def get_override_map(): return dict((a, 'DeepExplainGrad') for a in", "\"\"\" Heuristically check if any op is in the list", "future. Also, check if the placeholder named 'keras_learning_phase' exists in", "to use an attribution method that requires ' 'gradient override", "constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder = None return method def explain(self,", "self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type, value, traceback) self.context_on = False def", "'(re)create your graph within the DeepExlain context. Results are not", "None self.context_on = False if self.session is None: raise RuntimeError('DeepExplain:", "self.batch_size = None self.session = session self.graph = session.graph if", "if len(op.inputs) > 0 and not op.name.startswith('gradients'): if op.type in", "if 'tensor' not in str(type(x)).lower(): raise RuntimeError('If a list, X", "list of unsupported activation functions. This does not cover all", "class DeepExplain(object): def __init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method = None self.batch_size", "is in the list of unsupported activation functions. This does", "self.get_explainer(method, T, X, **kwargs) return explainer.run(xs, ys, batch_size) @staticmethod def", "original_grad(op, grad) class DeepExplain(object): def __init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method =", "the list of unsupported activation functions. This does not cover", "'intgrad': (IntegratedGradients, 3), 'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion':", "_check_ops(self): \"\"\" Heuristically check if any op is in the", "are trying to use an attribution method that requires '", "DeepExplain context.') # global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method = method if", "import constants from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from deepexplain.tf.v1_x.methods import DeepLIFTRescale,", "cases where explanation methods would fail, and must be improved", "len(op.inputs) > 0 and not op.name.startswith('gradients'): if op.type in constants.UNSUPPORTED_ACTIVATIONS:", "X, xs, ys=None, batch_size=None, **kwargs): explainer = self.get_explainer(method, T, X,", "\\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return original_grad(op,", "raise RuntimeError('Method must be in %s' % list(attribution_methods.keys())) if isinstance(X,", "EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain detected you are", "grad) else: return original_grad(op, grad) class DeepExplain(object): def __init__(self, graph=None,", "and constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain detected you are trying to", "and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return original_grad(op, grad)", "created in context self.graph_context.__enter__() self.override_context.__enter__() self.context_on = True return self", "context self.graph_context.__enter__() self.override_context.__enter__() self.context_on = True return self def __exit__(self,", "raise RuntimeError('X must be a Tensorflow Tensor object or a", "in the future. Also, check if the placeholder named 'keras_learning_phase'", "= method if self.method in attribution_methods: method_class, method_flag = attribution_methods[self.method]", "value, traceback) self.override_context.__exit__(type, value, traceback) self.context_on = False def get_explainer(self,", "not in str(type(x)).lower(): raise RuntimeError('If a list, X must contain", "from deepexplain.tf.v1_x.utils import original_grad from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput,", "self.keras_phase_placeholder = None return method def explain(self, method, T, X,", "**kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG", "object') # logging.info('DeepExplain: running \"%s\" explanation method (%d)' % (self.method,", "ys, batch_size) @staticmethod def get_override_map(): return dict((a, 'DeepExplainGrad') for a", "= None self.context_on = False if self.session is None: raise", "self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None self.context_on = False", "fail, and must be improved in the future. Also, check", "grad) class DeepExplain(object): def __init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method = None", "graph within the DeepExlain context. Results are not reliable!') constants._ENABLED_METHOD_CLASS", "gradient was used instead. You might have forgot to '", "You might have forgot to ' '(re)create your graph within", "constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS,", "graph. This is used by Keras and needs to be", "constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\" Heuristically check if any op is", "GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods = OrderedDict({ 'zero':", "(%s). ' 'This might lead to unexpected or wrong results.'", "of them') if 'tensor' not in str(type(T)).lower(): raise RuntimeError('T must", "import absolute_import from __future__ import division from __future__ import print_function", "= attribution_methods[self.method] else: raise RuntimeError('Method must be in %s' %", "'tensor' not in str(type(T)).lower(): raise RuntimeError('T must be a Tensorflow", "explainer.run(xs, ys, batch_size) @staticmethod def get_override_map(): return dict((a, 'DeepExplainGrad') for", "= False if self.session is None: raise RuntimeError('DeepExplain: could not", "if graph is None else graph self.graph_context = self.graph.as_default() self.override_context", "lead to unexpected or wrong results.' % op.type) elif 'keras_learning_phase'", "session=tf.compat.v1.get_default_session()): self.method = None self.batch_size = None self.session = session", "self.context_on = False if self.session is None: raise RuntimeError('DeepExplain: could", "from tensorflow.python.framework import ops from collections import OrderedDict import warnings,", "Heuristically check if any op is in the list of", "# Override gradient of all ops created in context self.graph_context.__enter__()", "else: if 'tensor' not in str(type(X)).lower(): raise RuntimeError('X must be", "constants from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP", "op is in the list of unsupported activation functions. This", "a in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\" Heuristically check if any", "could not retrieve a session. Use DeepExplain(session=your_session).') def __enter__(self): #", "if 'tensor' not in str(type(T)).lower(): raise RuntimeError('T must be a", "by Keras and needs to be passed in feed_dict. :return:", "def __init__(self, graph=None, session=tf.compat.v1.get_default_session()): self.method = None self.batch_size = None", "raise RuntimeError('Explain can be called only within a DeepExplain context.')", "check if any op is in the list of unsupported", "Tensor object') # logging.info('DeepExplain: running \"%s\" explanation method (%d)' %", "print_function import tensorflow as tf from tensorflow.python.framework import ops from", "== 0: warnings.warn('DeepExplain detected you are trying to use an", "graph=None, session=tf.compat.v1.get_default_session()): self.method = None self.batch_size = None self.session =", "constants._GRAD_OVERRIDE_CHECKFLAG = 1 if constants._ENABLED_METHOD_CLASS is not None \\ and", "raise RuntimeError('If a list, X must contain only Tensorflow Tensor", "them') if 'tensor' not in str(type(T)).lower(): raise RuntimeError('T must be", "'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients, 3), 'elrp': (EpsilonLRP, 4), 'deeplift':", "6), 'shapley_sampling': (ShapleySampling, 7) }) print(f'Using tf version = {tf.__version__}')", "GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return original_grad(op, grad) class DeepExplain(object):", "RuntimeError('T must be a Tensorflow Tensor object') # logging.info('DeepExplain: running", "the future. Also, check if the placeholder named 'keras_learning_phase' exists", "= True return self def __exit__(self, type, value, traceback): self.graph_context.__exit__(type,", "or a list of them') if 'tensor' not in str(type(T)).lower():", "(IntegratedGradients, 3), 'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion': (Occlusion,", "raise RuntimeError('DeepExplain: could not retrieve a session. Use DeepExplain(session=your_session).') def", "# global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method = method if self.method in", "return self def __exit__(self, type, value, traceback): self.graph_context.__exit__(type, value, traceback)", "= method_class method = constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if", "'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion': (Occlusion, 6), 'shapley_sampling':", "gradient of all ops created in context self.graph_context.__enter__() self.override_context.__enter__() self.context_on", "in %s' % list(attribution_methods.keys())) if isinstance(X, list): for x in", "@staticmethod def get_override_map(): return dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS)", "2), 'intgrad': (IntegratedGradients, 3), 'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5),", "and not op.name.startswith('gradients'): if op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation", "instead. You might have forgot to ' '(re)create your graph", "unsupported activation functions. This does not cover all cases where", "method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS = method_class method =", "must be in %s' % list(attribution_methods.keys())) if isinstance(X, list): for", "op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation (%s). ' 'This might", "X, **kwargs) return explainer.run(xs, ys, batch_size) @staticmethod def get_override_map(): return", "None: raise RuntimeError('DeepExplain: could not retrieve a session. Use DeepExplain(session=your_session).')", "'zero': (DummyZero, 0), 'saliency': (Saliency, 1), 'grad*input': (GradientXInput, 2), 'intgrad':", "T, X, **kwargs) return explainer.run(xs, ys, batch_size) @staticmethod def get_override_map():", "'saliency': (Saliency, 1), 'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients, 3), 'elrp':", "@ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1", "\"%s\" explanation method (%d)' % (self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG =", "running \"%s\" explanation method (%d)' % (self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG", "import print_function import tensorflow as tf from tensorflow.python.framework import ops", "RuntimeError('If a list, X must contain only Tensorflow Tensor objects')", "is None else graph self.graph_context = self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map())", "if 'tensor' not in str(type(X)).lower(): raise RuntimeError('X must be a", "from collections import OrderedDict import warnings, logging from deepexplain.tf.v1_x import", "str(type(T)).lower(): raise RuntimeError('T must be a Tensorflow Tensor object') #", "constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS = method_class method = constants._ENABLED_METHOD_CLASS(T, X,", "__future__ import print_function import tensorflow as tf from tensorflow.python.framework import", "OrderedDict({ 'zero': (DummyZero, 0), 'saliency': (Saliency, 1), 'grad*input': (GradientXInput, 2),", "(%d)' % (self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS =", "from __future__ import division from __future__ import print_function import tensorflow", "unsupported activation (%s). ' 'This might lead to unexpected or", "to be passed in feed_dict. :return: \"\"\" g = tf.compat.v1.get_default_graph()", "list of them') if 'tensor' not in str(type(T)).lower(): raise RuntimeError('T", "'occlusion': (Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7) }) print(f'Using tf version", "a DeepExplain context.') # global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method = method", "explanation methods would fail, and must be improved in the", "reliable!') constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder = None", "T, X, xs, ys=None, batch_size=None, **kwargs): explainer = self.get_explainer(method, T,", "__future__ import absolute_import from __future__ import division from __future__ import", "tf from tensorflow.python.framework import ops from collections import OrderedDict import", "> 0 and not op.name.startswith('gradients'): if op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected", "= session.graph if graph is None else graph self.graph_context =", "ys=None, batch_size=None, **kwargs): explainer = self.get_explainer(method, T, X, **kwargs) return", "5), 'occlusion': (Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7) }) print(f'Using tf", "in str(type(x)).lower(): raise RuntimeError('If a list, X must contain only", "but the original gradient was used instead. You might have", "else: return original_grad(op, grad) class DeepExplain(object): def __init__(self, graph=None, session=tf.compat.v1.get_default_session()):", "placeholder named 'keras_learning_phase' exists in the graph. This is used", "(Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7) }) print(f'Using tf version =", "Tensor objects') else: if 'tensor' not in str(type(X)).lower(): raise RuntimeError('X", "= tf.compat.v1.get_default_graph() for op in g.get_operations(): if len(op.inputs) > 0", "method_class method = constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS,", "' 'gradient override but the original gradient was used instead.", "RuntimeError('DeepExplain: could not retrieve a session. Use DeepExplain(session=your_session).') def __enter__(self):", "**kwargs): if not self.context_on: raise RuntimeError('Explain can be called only", "division from __future__ import print_function import tensorflow as tf from", "self.context_on = False def get_explainer(self, method, T, X, **kwargs): if", "None constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder = None return method def", "# logging.info('DeepExplain: running \"%s\" explanation method (%d)' % (self.method, method_flag))", "traceback) self.override_context.__exit__(type, value, traceback) self.context_on = False def get_explainer(self, method,", "check if the placeholder named 'keras_learning_phase' exists in the graph.", "not reliable!') constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder =", "attribution_methods: method_class, method_flag = attribution_methods[self.method] else: raise RuntimeError('Method must be", "= None self.batch_size = None self.session = session self.graph =", "Use DeepExplain(session=your_session).') def __enter__(self): # Override gradient of all ops", "'gradient override but the original gradient was used instead. You", "ops from collections import OrderedDict import warnings, logging from deepexplain.tf.v1_x", "self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\", "tensorflow as tf from tensorflow.python.framework import ops from collections import", "return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return original_grad(op, grad) class DeepExplain(object): def", "True return self def __exit__(self, type, value, traceback): self.graph_context.__exit__(type, value,", "= 0 constants._ENABLED_METHOD_CLASS = method_class method = constants._ENABLED_METHOD_CLASS(T, X, self.session,", "to ' '(re)create your graph within the DeepExlain context. Results", "activation (%s). ' 'This might lead to unexpected or wrong", "self.context_on: raise RuntimeError('Explain can be called only within a DeepExplain", "object or a list of them') if 'tensor' not in", "might have forgot to ' '(re)create your graph within the", "session self.graph = session.graph if graph is None else graph", "any op is in the list of unsupported activation functions.", "method, T, X, xs, ys=None, batch_size=None, **kwargs): explainer = self.get_explainer(method,", "batch_size=None, **kwargs): explainer = self.get_explainer(method, T, X, **kwargs) return explainer.run(xs,", "self.graph_context.__enter__() self.override_context.__enter__() self.context_on = True return self def __exit__(self, type,", "'deeplift': (DeepLIFTRescale, 5), 'occlusion': (Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7) })", "op.name.startswith('gradients'): if op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation (%s). '", "def __enter__(self): # Override gradient of all ops created in", "__exit__(self, type, value, traceback): self.graph_context.__exit__(type, value, traceback) self.override_context.__exit__(type, value, traceback)", "\\ and constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain detected you are trying", "raise RuntimeError('T must be a Tensorflow Tensor object') # logging.info('DeepExplain:", "None self.batch_size = None self.session = session self.graph = session.graph", "import tensorflow as tf from tensorflow.python.framework import ops from collections", "from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion,", "RuntimeError('Method must be in %s' % list(attribution_methods.keys())) if isinstance(X, list):", "must be a Tensorflow Tensor object or a list of", "method_class, method_flag = attribution_methods[self.method] else: raise RuntimeError('Method must be in", "7) }) print(f'Using tf version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op,", "not None \\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else:", "constants._ENABLED_METHOD_CLASS = None constants._GRAD_OVERRIDE_CHECKFLAG = 0 self.keras_phase_placeholder = None return", "keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and", "forgot to ' '(re)create your graph within the DeepExlain context.", "a session. Use DeepExplain(session=your_session).') def __enter__(self): # Override gradient of", "Override gradient of all ops created in context self.graph_context.__enter__() self.override_context.__enter__()", "constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain detected you are trying to use", "self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS = method_class method = constants._ENABLED_METHOD_CLASS(T,", "self.graph = session.graph if graph is None else graph self.graph_context", "you are trying to use an attribution method that requires", "traceback) self.context_on = False def get_explainer(self, method, T, X, **kwargs):", "named 'keras_learning_phase' exists in the graph. This is used by", "logging from deepexplain.tf.v1_x import constants from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from", "of unsupported activation functions. This does not cover all cases", "for a in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\" Heuristically check if", "if the placeholder named 'keras_learning_phase' exists in the graph. This", "graph self.graph_context = self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None", "or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \\ and constants._GRAD_OVERRIDE_CHECKFLAG == 0: warnings.warn('DeepExplain detected", "None self.session = session self.graph = session.graph if graph is", "within a DeepExplain context.') # global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method =", "where explanation methods would fail, and must be improved in", "graph is None else graph self.graph_context = self.graph.as_default() self.override_context =", "g.get_operations(): if len(op.inputs) > 0 and not op.name.startswith('gradients'): if op.type", "= constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or", "RuntimeError('X must be a Tensorflow Tensor object or a list", "xs, ys=None, batch_size=None, **kwargs): explainer = self.get_explainer(method, T, X, **kwargs)", "(Saliency, 1), 'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients, 3), 'elrp': (EpsilonLRP,", "an attribution method that requires ' 'gradient override but the", "(ShapleySampling, 7) }) print(f'Using tf version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def", "deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1 if constants._ENABLED_METHOD_CLASS", "list, X must contain only Tensorflow Tensor objects') else: if", "in X: if 'tensor' not in str(type(x)).lower(): raise RuntimeError('If a", "does not cover all cases where explanation methods would fail,", "tensorflow.python.framework import ops from collections import OrderedDict import warnings, logging", "not in str(type(T)).lower(): raise RuntimeError('T must be a Tensorflow Tensor", "tf.compat.v1.get_default_graph() for op in g.get_operations(): if len(op.inputs) > 0 and", "constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return original_grad(op, grad) class DeepExplain(object): def __init__(self,", "in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\" Heuristically check if any op", "a list of them') if 'tensor' not in str(type(T)).lower(): raise", "the DeepExlain context. Results are not reliable!') constants._ENABLED_METHOD_CLASS = None", "x in X: if 'tensor' not in str(type(x)).lower(): raise RuntimeError('If", "if self.session is None: raise RuntimeError('DeepExplain: could not retrieve a", "= None self.session = session self.graph = session.graph if graph", "retrieve a session. Use DeepExplain(session=your_session).') def __enter__(self): # Override gradient", "explainer = self.get_explainer(method, T, X, **kwargs) return explainer.run(xs, ys, batch_size)", "constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1 if constants._ENABLED_METHOD_CLASS is not None", "'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self): \"\"\" Heuristically check", "**kwargs) return explainer.run(xs, ys, batch_size) @staticmethod def get_override_map(): return dict((a,", "constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG self.method = method if self.method in attribution_methods: method_class,", "EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods = OrderedDict({ 'zero': (DummyZero, 0),", "else graph self.graph_context = self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder =", "grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1 if constants._ENABLED_METHOD_CLASS is", "method = constants._ENABLED_METHOD_CLASS(T, X, self.session, keras_learning_phase=self.keras_phase_placeholder, **kwargs) if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale)", "have forgot to ' '(re)create your graph within the DeepExlain", "None \\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod): return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad) else: return", "(DummyZero, 0), 'saliency': (Saliency, 1), 'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients,", "be called only within a DeepExplain context.') # global constants._ENABLED_METHOD_CLASS,", "be passed in feed_dict. :return: \"\"\" g = tf.compat.v1.get_default_graph() for", "session.graph if graph is None else graph self.graph_context = self.graph.as_default()", "be a Tensorflow Tensor object') # logging.info('DeepExplain: running \"%s\" explanation", "Tensor object or a list of them') if 'tensor' not", "% (self.method, method_flag)) self._check_ops() constants._GRAD_OVERRIDE_CHECKFLAG = 0 constants._ENABLED_METHOD_CLASS = method_class", "warnings.warn('DeepExplain detected you are trying to use an attribution method", "can be called only within a DeepExplain context.') # global", "all cases where explanation methods would fail, and must be", "attribution_methods = OrderedDict({ 'zero': (DummyZero, 0), 'saliency': (Saliency, 1), 'grad*input':", "= {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad): # constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG", "get_override_map(): return dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS) def _check_ops(self):", "_GRAD_OVERRIDE_CHECKFLAG constants._GRAD_OVERRIDE_CHECKFLAG = 1 if constants._ENABLED_METHOD_CLASS is not None \\", "improved in the future. Also, check if the placeholder named", "absolute_import from __future__ import division from __future__ import print_function import", "from __future__ import print_function import tensorflow as tf from tensorflow.python.framework", "a Tensorflow Tensor object') # logging.info('DeepExplain: running \"%s\" explanation method", "needs to be passed in feed_dict. :return: \"\"\" g =", "self.keras_phase_placeholder = None self.context_on = False if self.session is None:", "if op.type in constants.UNSUPPORTED_ACTIVATIONS: warnings.warn('Detected unsupported activation (%s). ' 'This", "from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP from deepexplain.tf.v1_x.utils import original_grad from", "else: raise RuntimeError('Method must be in %s' % list(attribution_methods.keys())) if", "was used instead. You might have forgot to ' '(re)create", "is None: raise RuntimeError('DeepExplain: could not retrieve a session. Use", "get_explainer(self, method, T, X, **kwargs): if not self.context_on: raise RuntimeError('Explain", "logging.info('DeepExplain: running \"%s\" explanation method (%d)' % (self.method, method_flag)) self._check_ops()", "def _check_ops(self): \"\"\" Heuristically check if any op is in", "%s' % list(attribution_methods.keys())) if isinstance(X, list): for x in X:", "method that requires ' 'gradient override but the original gradient", "methods would fail, and must be improved in the future.", "self.graph_context = self.graph.as_default() self.override_context = self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None self.context_on", "not self.context_on: raise RuntimeError('Explain can be called only within a", "self.graph.gradient_override_map(self.get_override_map()) self.keras_phase_placeholder = None self.context_on = False if self.session is", "deepexplain.tf.v1_x import constants from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod from deepexplain.tf.v1_x.methods import", "False if self.session is None: raise RuntimeError('DeepExplain: could not retrieve", "import division from __future__ import print_function import tensorflow as tf", "called only within a DeepExplain context.') # global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG", "requires ' 'gradient override but the original gradient was used", "import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods", "ShapleySampling attribution_methods = OrderedDict({ 'zero': (DummyZero, 0), 'saliency': (Saliency, 1),", "self.override_context.__exit__(type, value, traceback) self.context_on = False def get_explainer(self, method, T,", "(DeepLIFTRescale, 5), 'occlusion': (Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7) }) print(f'Using", "1 if constants._ENABLED_METHOD_CLASS is not None \\ and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod):", "\"\"\" g = tf.compat.v1.get_default_graph() for op in g.get_operations(): if len(op.inputs)", "only Tensorflow Tensor objects') else: if 'tensor' not in str(type(X)).lower():", "% list(attribution_methods.keys())) if isinstance(X, list): for x in X: if", "IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods = OrderedDict({ 'zero': (DummyZero,", "if not self.context_on: raise RuntimeError('Explain can be called only within", "warnings, logging from deepexplain.tf.v1_x import constants from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod", "as tf from tensorflow.python.framework import ops from collections import OrderedDict", "DeepLIFTRescale, Occlusion, ShapleySampling attribution_methods = OrderedDict({ 'zero': (DummyZero, 0), 'saliency':", "def explain(self, method, T, X, xs, ys=None, batch_size=None, **kwargs): explainer", "trying to use an attribution method that requires ' 'gradient", "print(f'Using tf version = {tf.__version__}') @ops.RegisterGradient(\"DeepExplainGrad\") def deepexplain_grad(op, grad): #", "if isinstance(X, list): for x in X: if 'tensor' not" ]
[ "in GB. From https://stackoverflow.com/a/46216013. \"\"\" import numpy as np try:", "4.0 if K.floatx() == 'float16': number_size = 2.0 if K.floatx()", "import backend as K shapes_mem_count = 0 internal_model_mem_count = 0", "trainable_count + non_trainable_count) gbytes = np.round(total_memory / (1024.0 ** 3),", "= 8.0 total_memory = number_size * (batch_size * shapes_mem_count +", "if K.floatx() == 'float64': number_size = 8.0 total_memory = number_size", "(batch_size * shapes_mem_count + trainable_count + non_trainable_count) gbytes = np.round(total_memory", "\"\"\" Get the memory usage of a Keras model in", "l in model.layers: layer_type = l.__class__.__name__ if layer_type == 'Model':", "for l in model.layers: layer_type = l.__class__.__name__ if layer_type ==", "total_memory = number_size * (batch_size * shapes_mem_count + trainable_count +", "number_size = 4.0 if K.floatx() == 'float16': number_size = 2.0", "p in model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p) for p in model.non_trainable_weights])", "as K except ImportError: from tensorflow.keras import backend as K", "Keras model. From https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size, model): \"\"\" Get", "+= single_layer_mem trainable_count = np.sum([K.count_params(p) for p in model.trainable_weights]) non_trainable_count", "backend as K except ImportError: from tensorflow.keras import backend as", "<gh_stars>0 \"\"\" Get the memory usage of a Keras model.", "GB. From https://stackoverflow.com/a/46216013. \"\"\" import numpy as np try: from", "internal_model_mem_count += get_model_memory_usage(batch_size, l) single_layer_mem = 1 out_shape = l.output_shape", "https://stackoverflow.com/a/46216013. \"\"\" import numpy as np try: from keras import", "np.sum([K.count_params(p) for p in model.non_trainable_weights]) number_size = 4.0 if K.floatx()", "number_size = 2.0 if K.floatx() == 'float64': number_size = 8.0", "import numpy as np try: from keras import backend as", "if K.floatx() == 'float16': number_size = 2.0 if K.floatx() ==", "keras import backend as K except ImportError: from tensorflow.keras import", "np.sum([K.count_params(p) for p in model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p) for p", "in model.layers: layer_type = l.__class__.__name__ if layer_type == 'Model': internal_model_mem_count", "https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size, model): \"\"\" Get the memory usage", "= l.output_shape if isinstance(out_shape, list): out_shape = out_shape[0] for s", "number_size * (batch_size * shapes_mem_count + trainable_count + non_trainable_count) gbytes", "= 4.0 if K.floatx() == 'float16': number_size = 2.0 if", "2.0 if K.floatx() == 'float64': number_size = 8.0 total_memory =", "import backend as K except ImportError: from tensorflow.keras import backend", "single_layer_mem = 1 out_shape = l.output_shape if isinstance(out_shape, list): out_shape", "'Model': internal_model_mem_count += get_model_memory_usage(batch_size, l) single_layer_mem = 1 out_shape =", "== 'Model': internal_model_mem_count += get_model_memory_usage(batch_size, l) single_layer_mem = 1 out_shape", "a Keras model. From https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size, model): \"\"\"", "model.non_trainable_weights]) number_size = 4.0 if K.floatx() == 'float16': number_size =", "if s is None: continue single_layer_mem *= s shapes_mem_count +=", "= 2.0 if K.floatx() == 'float64': number_size = 8.0 total_memory", "tensorflow.keras import backend as K shapes_mem_count = 0 internal_model_mem_count =", "number_size = 8.0 total_memory = number_size * (batch_size * shapes_mem_count", "model.layers: layer_type = l.__class__.__name__ if layer_type == 'Model': internal_model_mem_count +=", "np try: from keras import backend as K except ImportError:", "out_shape = out_shape[0] for s in out_shape: if s is", "Get the memory usage of a Keras model. From https://stackoverflow.com/a/46216013.", "0 for l in model.layers: layer_type = l.__class__.__name__ if layer_type", "from tensorflow.keras import backend as K shapes_mem_count = 0 internal_model_mem_count", "= out_shape[0] for s in out_shape: if s is None:", "single_layer_mem *= s shapes_mem_count += single_layer_mem trainable_count = np.sum([K.count_params(p) for", "8.0 total_memory = number_size * (batch_size * shapes_mem_count + trainable_count", "the memory usage of a Keras model in GB. From", "+ non_trainable_count) gbytes = np.round(total_memory / (1024.0 ** 3), 3)", "From https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size, model): \"\"\" Get the memory", "== 'float16': number_size = 2.0 if K.floatx() == 'float64': number_size", "= 0 for l in model.layers: layer_type = l.__class__.__name__ if", "backend as K shapes_mem_count = 0 internal_model_mem_count = 0 for", "K shapes_mem_count = 0 internal_model_mem_count = 0 for l in", "+= get_model_memory_usage(batch_size, l) single_layer_mem = 1 out_shape = l.output_shape if", "model): \"\"\" Get the memory usage of a Keras model", "s is None: continue single_layer_mem *= s shapes_mem_count += single_layer_mem", "s shapes_mem_count += single_layer_mem trainable_count = np.sum([K.count_params(p) for p in", "of a Keras model. From https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size, model):", "def get_model_memory_usage(batch_size, model): \"\"\" Get the memory usage of a", "= np.round(total_memory / (1024.0 ** 3), 3) + internal_model_mem_count return", "trainable_count = np.sum([K.count_params(p) for p in model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p)", "for s in out_shape: if s is None: continue single_layer_mem", "try: from keras import backend as K except ImportError: from", "memory usage of a Keras model. From https://stackoverflow.com/a/46216013. \"\"\" def", "shapes_mem_count += single_layer_mem trainable_count = np.sum([K.count_params(p) for p in model.trainable_weights])", "is None: continue single_layer_mem *= s shapes_mem_count += single_layer_mem trainable_count", "model in GB. From https://stackoverflow.com/a/46216013. \"\"\" import numpy as np", "shapes_mem_count = 0 internal_model_mem_count = 0 for l in model.layers:", "list): out_shape = out_shape[0] for s in out_shape: if s", "Get the memory usage of a Keras model in GB.", "'float64': number_size = 8.0 total_memory = number_size * (batch_size *", "\"\"\" import numpy as np try: from keras import backend", "if layer_type == 'Model': internal_model_mem_count += get_model_memory_usage(batch_size, l) single_layer_mem =", "get_model_memory_usage(batch_size, l) single_layer_mem = 1 out_shape = l.output_shape if isinstance(out_shape,", "= np.sum([K.count_params(p) for p in model.non_trainable_weights]) number_size = 4.0 if", "layer_type = l.__class__.__name__ if layer_type == 'Model': internal_model_mem_count += get_model_memory_usage(batch_size,", "= number_size * (batch_size * shapes_mem_count + trainable_count + non_trainable_count)", "in out_shape: if s is None: continue single_layer_mem *= s", "shapes_mem_count + trainable_count + non_trainable_count) gbytes = np.round(total_memory / (1024.0", "+ trainable_count + non_trainable_count) gbytes = np.round(total_memory / (1024.0 **", "* (batch_size * shapes_mem_count + trainable_count + non_trainable_count) gbytes =", "l) single_layer_mem = 1 out_shape = l.output_shape if isinstance(out_shape, list):", "s in out_shape: if s is None: continue single_layer_mem *=", "== 'float64': number_size = 8.0 total_memory = number_size * (batch_size", "in model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p) for p in model.non_trainable_weights]) number_size", "= l.__class__.__name__ if layer_type == 'Model': internal_model_mem_count += get_model_memory_usage(batch_size, l)", "memory usage of a Keras model in GB. From https://stackoverflow.com/a/46216013.", "*= s shapes_mem_count += single_layer_mem trainable_count = np.sum([K.count_params(p) for p", "model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p) for p in model.non_trainable_weights]) number_size =", "single_layer_mem trainable_count = np.sum([K.count_params(p) for p in model.trainable_weights]) non_trainable_count =", "0 internal_model_mem_count = 0 for l in model.layers: layer_type =", "layer_type == 'Model': internal_model_mem_count += get_model_memory_usage(batch_size, l) single_layer_mem = 1", "p in model.non_trainable_weights]) number_size = 4.0 if K.floatx() == 'float16':", "numpy as np try: from keras import backend as K", "non_trainable_count = np.sum([K.count_params(p) for p in model.non_trainable_weights]) number_size = 4.0", "except ImportError: from tensorflow.keras import backend as K shapes_mem_count =", "K except ImportError: from tensorflow.keras import backend as K shapes_mem_count", "Keras model in GB. From https://stackoverflow.com/a/46216013. \"\"\" import numpy as", "From https://stackoverflow.com/a/46216013. \"\"\" import numpy as np try: from keras", "as np try: from keras import backend as K except", "usage of a Keras model. From https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size,", "non_trainable_count) gbytes = np.round(total_memory / (1024.0 ** 3), 3) +", "\"\"\" Get the memory usage of a Keras model. From", "from keras import backend as K except ImportError: from tensorflow.keras", "l.output_shape if isinstance(out_shape, list): out_shape = out_shape[0] for s in", "continue single_layer_mem *= s shapes_mem_count += single_layer_mem trainable_count = np.sum([K.count_params(p)", "model. From https://stackoverflow.com/a/46216013. \"\"\" def get_model_memory_usage(batch_size, model): \"\"\" Get the", "usage of a Keras model in GB. From https://stackoverflow.com/a/46216013. \"\"\"", "ImportError: from tensorflow.keras import backend as K shapes_mem_count = 0", "= np.sum([K.count_params(p) for p in model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p) for", "1 out_shape = l.output_shape if isinstance(out_shape, list): out_shape = out_shape[0]", "= 0 internal_model_mem_count = 0 for l in model.layers: layer_type", "out_shape: if s is None: continue single_layer_mem *= s shapes_mem_count", "for p in model.trainable_weights]) non_trainable_count = np.sum([K.count_params(p) for p in", "out_shape = l.output_shape if isinstance(out_shape, list): out_shape = out_shape[0] for", "for p in model.non_trainable_weights]) number_size = 4.0 if K.floatx() ==", "None: continue single_layer_mem *= s shapes_mem_count += single_layer_mem trainable_count =", "= 1 out_shape = l.output_shape if isinstance(out_shape, list): out_shape =", "if isinstance(out_shape, list): out_shape = out_shape[0] for s in out_shape:", "in model.non_trainable_weights]) number_size = 4.0 if K.floatx() == 'float16': number_size", "* shapes_mem_count + trainable_count + non_trainable_count) gbytes = np.round(total_memory /", "a Keras model in GB. From https://stackoverflow.com/a/46216013. \"\"\" import numpy", "of a Keras model in GB. From https://stackoverflow.com/a/46216013. \"\"\" import", "'float16': number_size = 2.0 if K.floatx() == 'float64': number_size =", "l.__class__.__name__ if layer_type == 'Model': internal_model_mem_count += get_model_memory_usage(batch_size, l) single_layer_mem", "np.round(total_memory / (1024.0 ** 3), 3) + internal_model_mem_count return gbytes", "out_shape[0] for s in out_shape: if s is None: continue", "isinstance(out_shape, list): out_shape = out_shape[0] for s in out_shape: if", "the memory usage of a Keras model. From https://stackoverflow.com/a/46216013. \"\"\"", "gbytes = np.round(total_memory / (1024.0 ** 3), 3) + internal_model_mem_count", "\"\"\" def get_model_memory_usage(batch_size, model): \"\"\" Get the memory usage of", "as K shapes_mem_count = 0 internal_model_mem_count = 0 for l", "K.floatx() == 'float16': number_size = 2.0 if K.floatx() == 'float64':", "internal_model_mem_count = 0 for l in model.layers: layer_type = l.__class__.__name__", "K.floatx() == 'float64': number_size = 8.0 total_memory = number_size *", "get_model_memory_usage(batch_size, model): \"\"\" Get the memory usage of a Keras" ]
[ "apply(self, xy_in): \"\"\"Apply distortion mapping\"\"\" pass @abc.abstractmethod def apply_inverse(self, xy_in):", "None @abc.abstractmethod def apply(self, xy_in): \"\"\"Apply distortion mapping\"\"\" pass @abc.abstractmethod", "mapping\"\"\" pass @abc.abstractmethod def apply_inverse(self, xy_in): \"\"\"Apply inverse distortion mapping\"\"\"", "class DistortionABC(metaclass=abc.ABCMeta): maptype = None @abc.abstractmethod def apply(self, xy_in): \"\"\"Apply", "pass @abc.abstractmethod def apply_inverse(self, xy_in): \"\"\"Apply inverse distortion mapping\"\"\" pass", "def apply(self, xy_in): \"\"\"Apply distortion mapping\"\"\" pass @abc.abstractmethod def apply_inverse(self,", "xy_in): \"\"\"Apply distortion mapping\"\"\" pass @abc.abstractmethod def apply_inverse(self, xy_in): \"\"\"Apply", "import abc class DistortionABC(metaclass=abc.ABCMeta): maptype = None @abc.abstractmethod def apply(self,", "= None @abc.abstractmethod def apply(self, xy_in): \"\"\"Apply distortion mapping\"\"\" pass", "@abc.abstractmethod def apply(self, xy_in): \"\"\"Apply distortion mapping\"\"\" pass @abc.abstractmethod def", "\"\"\"Apply distortion mapping\"\"\" pass @abc.abstractmethod def apply_inverse(self, xy_in): \"\"\"Apply inverse", "distortion mapping\"\"\" pass @abc.abstractmethod def apply_inverse(self, xy_in): \"\"\"Apply inverse distortion", "maptype = None @abc.abstractmethod def apply(self, xy_in): \"\"\"Apply distortion mapping\"\"\"", "abc class DistortionABC(metaclass=abc.ABCMeta): maptype = None @abc.abstractmethod def apply(self, xy_in):", "DistortionABC(metaclass=abc.ABCMeta): maptype = None @abc.abstractmethod def apply(self, xy_in): \"\"\"Apply distortion" ]
[ "'scikit-learn', 'seaborn'], license='MIT', version='0.1.4', description='Evaluation metrics for recommender systems', long_description=read(\"README.md\"),", "a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as f: return f.read()", "import setup def read(file_name): \"\"\"Read a text file and return", "setuptools import setup def read(file_name): \"\"\"Read a text file and", "with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as f: return f.read() setup( name='recmetrics',", "file and return the content as a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__),", "io import os from setuptools import setup def read(file_name): \"\"\"Read", "encoding='utf-8') as f: return f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>',", "'plotly', 'scikit-learn', 'seaborn'], license='MIT', version='0.1.4', description='Evaluation metrics for recommender systems',", "return the content as a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8')", "a text file and return the content as a string.\"\"\"", "as f: return f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'],", "from setuptools import setup def read(file_name): \"\"\"Read a text file", "read(file_name): \"\"\"Read a text file and return the content as", "author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas', 'plotly', 'scikit-learn', 'seaborn'], license='MIT', version='0.1.4',", "setup def read(file_name): \"\"\"Read a text file and return the", "f: return f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs',", "name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas', 'plotly', 'scikit-learn',", "as a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as f: return", "import io import os from setuptools import setup def read(file_name):", "content as a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as f:", "io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as f: return f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics',", "and return the content as a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name),", "def read(file_name): \"\"\"Read a text file and return the content", "install_requires=['funcsigs', 'numpy', 'pandas', 'plotly', 'scikit-learn', 'seaborn'], license='MIT', version='0.1.4', description='Evaluation metrics", "the content as a string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as", "string.\"\"\" with io.open(os.path.join(os.path.dirname(__file__), file_name), encoding='utf-8') as f: return f.read() setup(", "f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas',", "url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas', 'plotly', 'scikit-learn', 'seaborn'],", "setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas', 'plotly',", "import os from setuptools import setup def read(file_name): \"\"\"Read a", "author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas', 'plotly', 'scikit-learn', 'seaborn'], license='MIT',", "'pandas', 'plotly', 'scikit-learn', 'seaborn'], license='MIT', version='0.1.4', description='Evaluation metrics for recommender", "'seaborn'], license='MIT', version='0.1.4', description='Evaluation metrics for recommender systems', long_description=read(\"README.md\"), long_description_content_type=\"text/markdown\",", "license='MIT', version='0.1.4', description='Evaluation metrics for recommender systems', long_description=read(\"README.md\"), long_description_content_type=\"text/markdown\", )", "os from setuptools import setup def read(file_name): \"\"\"Read a text", "'numpy', 'pandas', 'plotly', 'scikit-learn', 'seaborn'], license='MIT', version='0.1.4', description='Evaluation metrics for", "return f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>', author_email='<EMAIL>', packages=['recmetrics'], install_requires=['funcsigs', 'numpy',", "packages=['recmetrics'], install_requires=['funcsigs', 'numpy', 'pandas', 'plotly', 'scikit-learn', 'seaborn'], license='MIT', version='0.1.4', description='Evaluation", "\"\"\"Read a text file and return the content as a", "file_name), encoding='utf-8') as f: return f.read() setup( name='recmetrics', url='https://github.com/statisticianinstilettos/recommender_metrics', author='<NAME>',", "text file and return the content as a string.\"\"\" with" ]
[ "and # limitations under the License. \"\"\"BERT finetuning runner.\"\"\" from", "logger.info(\"Writing example %d of %d\" % (ex_index, len(examples))) feature =", "drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file,", "probabilities = prediction[\"probabilities\"] if i >= num_actual_predict_examples: break output_line =", "2.0 (the \"License\"); # you may not use this file", "Running training *****\") tf.logging.info(\" Num examples = %d\", len(train_examples)) tf.logging.info(\"", "= int(num_train_steps * config.warmup_proportion) model = create_model(config=config) training_arguments = TrainingArguments(", "== 0: logger.info(\"Writing example %d of %d\" % (ex_index, len(examples)))", "len(eval_examples), num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples) tf.logging.info(\" Batch size = %d\",", "= os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\") as writer: num_written_lines =", "tf.logging.info(\" Num examples = %d\", len(train_examples)) tf.logging.info(\" Batch size =", "seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file = os.path.join(config.output_dir,", "= file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps)", "InputFeatures, Config ) from src.data_process import ( AgNewsDataProcessor ) from", "tf.logging.info(\" Batch size = %d\", config.predict_batch_size) predict_drop_remainder = True if", "0.0). while len(eval_examples) % config.eval_batch_size != 0: eval_examples.append(PaddingInputExample()) eval_file =", "eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples) if config.use_tpu: # TPU", "eval_file = os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length, tokenizer, eval_file)", "eval_steps = None # However, if running eval on the", "AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None num_train_steps = None num_warmup_steps = None", "== 0 eval_steps = int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder = True", "} if example.text_b: parameters['text_pair'] = example.text_b feature = tokenizer(**parameters) input_feature", "tf.gfile.GFile(output_predict_file, \"w\") as writer: num_written_lines = 0 tf.logging.info(\"***** Predict results", "# of examples must be a multiple of the batch", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "len(examples))) feature = convert_single_example(ex_index, example, label2id, max_seq_length, tokenizer) features.append(feature) return", "import Dict, List from transformers import ( AutoTokenizer, BertTokenizer, BertForSequenceClassification,", "eval_steps = int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder = True if config.use_tpu", "input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir,", "): \"\"\"Convert a set of `InputExample`s to a list of", "file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval: eval_examples", "%s\\n\" % (key, str(result[key]))) if config.do_predict: predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples", "import ( AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer )", "import os from typing import Dict, List from transformers import", "bert_config.num_labels = config.num_labels model = BertForSequenceClassification(bert_config) return model def create_model(config:", "the # number of steps. if config.use_tpu: assert len(eval_examples) %", "tells the estimator to run through the entire set. eval_steps", "train_examples: List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps = int( len(train_examples)", "return_outputs=False): labels = inputs.pop(\"labels\") outputs = model(**inputs) return outputs.loss def", "estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples)", "len(eval_examples) if config.use_tpu: # TPU requires a fixed batch size", "example_index: int, example: InputExample, label2id: Dict[str, int], max_seq_length: int, tokenizer:", "of examples must be a multiple of the batch size,", "{ \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True }", "= processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None num_train_steps =", "use this file except in compliance with the License. #", "in probabilities) + \"\\n\" writer.write(output_line) num_written_lines += 1 assert num_written_lines", "\"\\t\".join( str(class_probability) for class_probability in probabilities) + \"\\n\" writer.write(output_line) num_written_lines", "class_probability in probabilities) + \"\\n\" writer.write(output_line) num_written_lines += 1 assert", "len(predict_examples) if config.use_tpu: # TPU requires a fixed batch size", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", ") -> InputFeatures: \"\"\"Converts a single `InputExample` into a single", "models[config.model_name](config) def convert_examples_to_features( examples, label_list: List[str], max_seq_length: int, tokenizer: PreTrainedTokenizer", "to a list of `InputFeatures`.\"\"\" label2id = {label: index for", "requires a fixed batch size for all batches, therefore the", "\"\"\"Creates a classification model.\"\"\" models = { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, }", "License. # You may obtain a copy of the License", "model = BertForSequenceClassification(bert_config) return model def create_model(config: Config): \"\"\"Creates a", "not config.do_train and not config.do_eval and not config.do_predict: raise ValueError(", "examples = %d\", len(train_examples)) tf.logging.info(\" Batch size = %d\", config.train_batch_size)", "weight, and these get a weight of 0.0). while len(eval_examples)", "# coding=utf-8 # Copyright 2018 The Google AI Language Team", "trainer = SequenceClassificationTrainer( model=model, ) # If TPU is not", "config.eval_batch_size) eval_drop_remainder = True if config.use_tpu else False eval_input_fn =", "else False predict_input_fn = file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result", "if config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples) if config.use_tpu:", "under the License is distributed on an \"AS IS\" BASIS,", "if config.do_train: train_file = os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length,", "License for the specific language governing permissions and # limitations", "`do_eval` or `do_predict' must be True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) #", "model = create_model(config=config) training_arguments = TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, ) trainer", "train_file = os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length, tokenizer, train_file)", "number of steps. if config.use_tpu: assert len(eval_examples) % config.eval_batch_size ==", "key in sorted(result.keys()): tf.logging.info(\" %s = %s\", key, str(result[key])) writer.write(\"%s", "= processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps = int( len(train_examples) / config.train_batch_size", "predict_drop_remainder = True if config.use_tpu else False predict_input_fn = file_based_input_fn_builder(", "Batch size = %d\", config.eval_batch_size) # This tells the estimator", "size, or else examples # will get dropped. So we", "training_arguments = TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, ) trainer = SequenceClassificationTrainer( model=model,", "num_actual_predict_examples) tf.logging.info(\" Batch size = %d\", config.predict_batch_size) predict_drop_remainder = True", "__future__ import annotations, absolute_import import os from typing import Dict,", "the TPU, you will need to specify the # number", "to specify the # number of steps. if config.use_tpu: assert", "tokenizer, train_file) tf.logging.info(\"***** Running training *****\") tf.logging.info(\" Num examples =", "= BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels model = BertForSequenceClassification(bert_config) return model", "example.text_b: parameters['text_pair'] = example.text_b feature = tokenizer(**parameters) input_feature = InputFeatures(", "writer: num_written_lines = 0 tf.logging.info(\"***** Predict results *****\") for (i,", "probabilities) + \"\\n\" writer.write(output_line) num_written_lines += 1 assert num_written_lines ==", "tf.logging.info(\"***** Eval results *****\") for key in sorted(result.keys()): tf.logging.info(\" %s", "`InputExample` into a single `InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\" parameters =", "\"\"\" parameters = { \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True,", "return model def create_model(config: Config): \"\"\"Creates a classification model.\"\"\" models", "Example End ***************************') return input_feature def create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig", "Running evaluation *****\") tf.logging.info(\" Num examples = %d (%d actual,", "to normal Estimator on CPU # or GPUs if config.do_train:", "sorted(result.keys()): tf.logging.info(\" %s = %s\", key, str(result[key])) writer.write(\"%s = %s\\n\"", "( InputExample, InputFeatures, Config ) from src.data_process import ( AgNewsDataProcessor", "max_seq_length: int, tokenizer: BertTokenizer ) -> InputFeatures: \"\"\"Converts a single", "However, if running eval on the TPU, you will need", ") # If TPU is not available, this will fall", "len(train_examples)) tf.logging.info(\" Batch size = %d\", config.train_batch_size) tf.logging.info(\" Num steps", "fall back to normal Estimator on CPU # or GPUs", "= {label: index for index, label in enumerate(label_list)} features =", "in compliance with the License. # You may obtain a", "return outputs.loss def main(): # processors need to be updated", "examples # will get dropped. So we pad with fake", "estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\") as", "software # distributed under the License is distributed on an", "of the batch size, or else examples # will get", "\"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True } if example.text_b:", "PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name = config.task_name.lower() if task_name not in", "inputs, return_outputs=False): labels = inputs.pop(\"labels\") outputs = model(**inputs) return outputs.loss", "a fixed batch size for all batches, therefore the number", "int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder = True if config.use_tpu else False", "config.use_tpu else False predict_input_fn = file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder)", "Num examples = %d (%d actual, %d padding)\", len(predict_examples), num_actual_predict_examples,", "num_written_lines = 0 tf.logging.info(\"***** Predict results *****\") for (i, prediction)", "BertTokenizer ) -> InputFeatures: \"\"\"Converts a single `InputExample` into a", "AI Language Team Authors. # # Licensed under the Apache", "permissions and # limitations under the License. \"\"\"BERT finetuning runner.\"\"\"", "* config.epochs ) num_warmup_steps = int(num_train_steps * config.warmup_proportion) model =", "eval_examples, label_list, config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"***** Running evaluation *****\") tf.logging.info(\"", "`InputExample`s to a list of `InputFeatures`.\"\"\" label2id = {label: index", "or GPUs if config.do_train: train_file = os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples,", "input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True ) if example_index < 5:", "= InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True ) if example_index", "GPUs if config.do_train: train_file = os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list,", "for (ex_index, example) in enumerate(examples): if ex_index % 200 ==", "num_actual_predict_examples: break output_line = \"\\t\".join( str(class_probability) for class_probability in probabilities)", "num_train_steps = int( len(train_examples) / config.train_batch_size * config.epochs ) num_warmup_steps", "a list of `InputFeatures`.\"\"\" label2id = {label: index for index,", "= %d\", config.eval_batch_size) # This tells the estimator to run", "= file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn) output_predict_file", "of `do_train`, `do_eval` or `do_predict' must be True.\") bert_config =", "***************************') return input_feature def create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name)", "run through the entire set. eval_steps = None # However,", "fake examples which are ignored # later on. while len(predict_examples)", "config.do_train: train_examples: List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps = int(", "AgNewsDataProcessor, } config: Config = Config.instance() if not config.do_train and", "PretrainedConfig from src.schema import ( InputExample, InputFeatures, Config ) from", "index, label in enumerate(label_list)} features = [] for (ex_index, example)", "# If TPU is not available, this will fall back", "import ( AgNewsDataProcessor ) from config import create_logger logger =", "finetuning runner.\"\"\" from __future__ import annotations, absolute_import import os from", "= %d (%d actual, %d padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples) -", "on. while len(predict_examples) % config.predict_batch_size != 0: predict_examples.append(PaddingInputExample()) predict_file =", "actual, %d padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples) tf.logging.info(\" Batch", "= TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, ) trainer = SequenceClassificationTrainer( model=model, )", "with tf.gfile.GFile(output_predict_file, \"w\") as writer: num_written_lines = 0 tf.logging.info(\"***** Predict", "= model(**inputs) return outputs.loss def main(): # processors need to", "(all tf.metrics # support a per-instance weight, and these get", "if i >= num_actual_predict_examples: break output_line = \"\\t\".join( str(class_probability) for", "= %d\", num_train_steps) train_input_fn = file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True)", "train_dataset_loader = num_train_steps = int( len(train_examples) / config.train_batch_size * config.epochs", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "config.do_predict: raise ValueError( \"At least one of `do_train`, `do_eval` or", "enumerate(result): probabilities = prediction[\"probabilities\"] if i >= num_actual_predict_examples: break output_line", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "results *****\") for (i, prediction) in enumerate(result): probabilities = prediction[\"probabilities\"]", "def convert_examples_to_features( examples, label_list: List[str], max_seq_length: int, tokenizer: PreTrainedTokenizer ):", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "Team Authors. # # Licensed under the Apache License, Version", "into a single `InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\" parameters = {", "to in writing, software # distributed under the License is", "ValueError(\"Task not found: %s\" % (task_name)) processor = processors[task_name]() label_list", "200 == 0: logger.info(\"Writing example %d of %d\" % (ex_index,", "return features class SequenceClassificationTrainer(Trainer): def compute_loss(self, model, inputs, return_outputs=False): labels", "if config.do_predict: predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples) if config.use_tpu:", "# See the License for the specific language governing permissions", "Copyright 2018 The Google AI Language Team Authors. # #", "= %d\", config.predict_batch_size) predict_drop_remainder = True if config.use_tpu else False", "= processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples) if config.use_tpu: # TPU requires", "training *****\") tf.logging.info(\" Num examples = %d\", len(train_examples)) tf.logging.info(\" Batch", "# However, if running eval on the TPU, you will", "file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file", "or agreed to in writing, software # distributed under the", "len(eval_examples) % config.eval_batch_size != 0: eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir, \"eval.tf_record\")", "required by applicable law or agreed to in writing, software", "max_steps=num_train_steps) if config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples) if", "(ex_index, example) in enumerate(examples): if ex_index % 200 == 0:", "= processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples) if config.use_tpu: # TPU requires", "= len(eval_examples) if config.use_tpu: # TPU requires a fixed batch", "these get a weight of 0.0). while len(eval_examples) % config.eval_batch_size", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "label in enumerate(label_list)} features = [] for (ex_index, example) in", "= 0 tf.logging.info(\"***** Predict results *****\") for (i, prediction) in", "example.text_b feature = tokenizer(**parameters) input_feature = InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'],", "with the License. # You may obtain a copy of", "seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir)", "not config.do_predict: raise ValueError( \"At least one of `do_train`, `do_eval`", "batch size, or else examples # will get dropped. So", "%d\" % (ex_index, len(examples))) feature = convert_single_example(ex_index, example, label2id, max_seq_length,", "# will get dropped. So we pad with fake examples", "*****\") tf.logging.info(\" Num examples = %d\", len(train_examples)) tf.logging.info(\" Batch size", "the entire set. eval_steps = None # However, if running", "Example {example_index} ***************************') logger.info(example) logger.info(input_feature) logger.info('*************************** Example End ***************************') return", "through the entire set. eval_steps = None # However, if", "the License. \"\"\"BERT finetuning runner.\"\"\" from __future__ import annotations, absolute_import", "is not available, this will fall back to normal Estimator", "config.do_predict: predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples) if config.use_tpu: #", "for class_probability in probabilities) + \"\\n\" writer.write(output_line) num_written_lines += 1", "import PretrainedConfig from src.schema import ( InputExample, InputFeatures, Config )", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples) tf.logging.info(\" Batch size =", "config import create_logger logger = create_logger() def convert_single_example( example_index: int,", "parameters = { \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True,", "str(class_probability) for class_probability in probabilities) + \"\\n\" writer.write(output_line) num_written_lines +=", "distributed under the License is distributed on an \"AS IS\"", "tf.logging.info(\" Num steps = %d\", num_train_steps) train_input_fn = file_based_input_fn_builder( input_file=train_file,", "0 eval_steps = int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder = True if", "Config ) from src.data_process import ( AgNewsDataProcessor ) from config", ") if example_index < 5: logger.info(f'*************************** Example {example_index} ***************************') logger.info(example)", "results *****\") for key in sorted(result.keys()): tf.logging.info(\" %s = %s\",", "Config): bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels model =", "tf.logging.info(\"***** Running training *****\") tf.logging.info(\" Num examples = %d\", len(train_examples))", "= example.text_b feature = tokenizer(**parameters) input_feature = InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'],", "create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels model", "raise ValueError( \"At least one of `do_train`, `do_eval` or `do_predict'", "// config.eval_batch_size) eval_drop_remainder = True if config.use_tpu else False eval_input_fn", "express or implied. # See the License for the specific", "%d padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples) tf.logging.info(\" Batch size", "except in compliance with the License. # You may obtain", "on the TPU, you will need to specify the #", "num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples) tf.logging.info(\" Batch size = %d\", config.eval_batch_size)", "# limitations under the License. \"\"\"BERT finetuning runner.\"\"\" from __future__", "parameters['text_pair'] = example.text_b feature = tokenizer(**parameters) input_feature = InputFeatures( input_ids=feature['token_ids'],", "file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"***** Running evaluation *****\")", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "from transformers import ( AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig, Trainer, TrainingArguments,", "if ex_index % 200 == 0: logger.info(\"Writing example %d of", "So we pad with fake examples which are ignored #", "not use this file except in compliance with the License.", "padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples) tf.logging.info(\" Batch size =", "BertForSequenceClassification, BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer ) from transformers.configuration_utils import PretrainedConfig", "= SequenceClassificationTrainer( model=model, ) # If TPU is not available,", "writing, software # distributed under the License is distributed on", "# 根据不同的任务,处理不同的数据集 task_name = config.task_name.lower() if task_name not in processors:", "ex_index % 200 == 0: logger.info(\"Writing example %d of %d\"", "you may not use this file except in compliance with", "label2id, max_seq_length, tokenizer) features.append(feature) return features class SequenceClassificationTrainer(Trainer): def compute_loss(self,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "task_name not in processors: raise ValueError(\"Task not found: %s\" %", "later on. while len(predict_examples) % config.predict_batch_size != 0: predict_examples.append(PaddingInputExample()) predict_file", "eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length, tokenizer,", "examples = %d (%d actual, %d padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples)", "be updated processors = { 'agnews-processor': AgNewsDataProcessor, } config: Config", "a multiple of the batch size, or else examples #", "num_written_lines += 1 assert num_written_lines == num_actual_predict_examples if __name__ ==", "Config.instance() if not config.do_train and not config.do_eval and not config.do_predict:", "tf.metrics # support a per-instance weight, and these get a", "all batches, therefore the number # of examples must be", "`do_train`, `do_eval` or `do_predict' must be True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name)", "= estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\")", "eval_input_fn = file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn,", "= { \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True", "from transformers.configuration_utils import PretrainedConfig from src.schema import ( InputExample, InputFeatures,", "# later on. while len(predict_examples) % config.predict_batch_size != 0: predict_examples.append(PaddingInputExample())", "weight of 0.0). while len(eval_examples) % config.eval_batch_size != 0: eval_examples.append(PaddingInputExample())", "CONDITIONS OF ANY KIND, either express or implied. # See", "feature = tokenizer(**parameters) input_feature = InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label],", "of %d\" % (ex_index, len(examples))) feature = convert_single_example(ex_index, example, label2id,", "= convert_single_example(ex_index, example, label2id, max_seq_length, tokenizer) features.append(feature) return features class", "} config: Config = Config.instance() if not config.do_train and not", "eval_drop_remainder = True if config.use_tpu else False eval_input_fn = file_based_input_fn_builder(", "processors need to be updated processors = { 'agnews-processor': AgNewsDataProcessor,", "%s\", key, str(result[key])) writer.write(\"%s = %s\\n\" % (key, str(result[key]))) if", "one of `do_train`, `do_eval` or `do_predict' must be True.\") bert_config", "os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\") as writer: num_written_lines = 0", "( AgNewsDataProcessor ) from config import create_logger logger = create_logger()", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "overwrite_output_dir=True, ) trainer = SequenceClassificationTrainer( model=model, ) # If TPU", "model(**inputs) return outputs.loss def main(): # processors need to be", "feature = convert_single_example(ex_index, example, label2id, max_seq_length, tokenizer) features.append(feature) return features", "back to normal Estimator on CPU # or GPUs if", "logger = create_logger() def convert_single_example( example_index: int, example: InputExample, label2id:", "config.do_train: train_file = os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length, tokenizer,", "= os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"***** Running", "'agnews-processor': AgNewsDataProcessor, } config: Config = Config.instance() if not config.do_train", "0: logger.info(\"Writing example %d of %d\" % (ex_index, len(examples))) feature", "coding=utf-8 # Copyright 2018 The Google AI Language Team Authors.", "num_actual_eval_examples) tf.logging.info(\" Batch size = %d\", config.eval_batch_size) # This tells", "*****\") for (i, prediction) in enumerate(result): probabilities = prediction[\"probabilities\"] if", "result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file,", "InputExample, label2id: Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer ) ->", "int, tokenizer: BertTokenizer ) -> InputFeatures: \"\"\"Converts a single `InputExample`", "prediction) in enumerate(result): probabilities = prediction[\"probabilities\"] if i >= num_actual_predict_examples:", "result = estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\")", "= BertForSequenceClassification(bert_config) return model def create_model(config: Config): \"\"\"Creates a classification", "the estimator to run through the entire set. eval_steps =", "fake examples which are ignored # later on. These do", "\"\"\"BERT finetuning runner.\"\"\" from __future__ import annotations, absolute_import import os", "= \"\\t\".join( str(class_probability) for class_probability in probabilities) + \"\\n\" writer.write(output_line)", "compute_loss(self, model, inputs, return_outputs=False): labels = inputs.pop(\"labels\") outputs = model(**inputs)", "batch size for all batches, therefore the number # of", "found: %s\" % (task_name)) processor = processors[task_name]() label_list = processor.get_labels()", "tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None num_train_steps = None num_warmup_steps", "output_line = \"\\t\".join( str(class_probability) for class_probability in probabilities) + \"\\n\"", "pad with fake examples which are ignored # later on.", "int], max_seq_length: int, tokenizer: BertTokenizer ) -> InputFeatures: \"\"\"Converts a", "features class SequenceClassificationTrainer(Trainer): def compute_loss(self, model, inputs, return_outputs=False): labels =", "a classification model.\"\"\" models = { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return", "are ignored # later on. while len(predict_examples) % config.predict_batch_size !=", "\"w\") as writer: num_written_lines = 0 tf.logging.info(\"***** Predict results *****\")", "OR CONDITIONS OF ANY KIND, either express or implied. #", "transformers.configuration_utils import PretrainedConfig from src.schema import ( InputExample, InputFeatures, Config", "bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name = config.task_name.lower() if task_name", "inputs.pop(\"labels\") outputs = model(**inputs) return outputs.loss def main(): # processors", "processors[task_name]() label_list = processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None", "TPU is not available, this will fall back to normal", "0 tf.logging.info(\"***** Predict results *****\") for (i, prediction) in enumerate(result):", "the License is distributed on an \"AS IS\" BASIS, #", "model def create_model(config: Config): \"\"\"Creates a classification model.\"\"\" models =", "AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer ) from transformers.configuration_utils", "# support a per-instance weight, and these get a weight", "label2id: Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer ) -> InputFeatures:", "\"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\") as writer: num_written_lines = 0 tf.logging.info(\"*****", "governing permissions and # limitations under the License. \"\"\"BERT finetuning", "+= 1 assert num_written_lines == num_actual_predict_examples if __name__ == \"__main__\":", "\"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True } if", "len(train_examples) / config.train_batch_size * config.epochs ) num_warmup_steps = int(num_train_steps *", "%d (%d actual, %d padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples)", "The Google AI Language Team Authors. # # Licensed under", "Config = Config.instance() if not config.do_train and not config.do_eval and", "in enumerate(result): probabilities = prediction[\"probabilities\"] if i >= num_actual_predict_examples: break", "config: Config = Config.instance() if not config.do_train and not config.do_eval", "steps = %d\", num_train_steps) train_input_fn = file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True,", "len(predict_examples) % config.predict_batch_size != 0: predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir, \"predict.tf_record\")", "True if config.use_tpu else False eval_input_fn = file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length,", "label_list: List[str], max_seq_length: int, tokenizer: PreTrainedTokenizer ): \"\"\"Convert a set", "predict_file = os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"*****", "annotations, absolute_import import os from typing import Dict, List from", ") trainer = SequenceClassificationTrainer( model=model, ) # If TPU is", "if config.use_tpu else False eval_input_fn = file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False,", "This tells the estimator to run through the entire set.", "of `InputFeatures`.\"\"\" label2id = {label: index for index, label in", "create_model(config=config) training_arguments = TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, ) trainer = SequenceClassificationTrainer(", ") from config import create_logger logger = create_logger() def convert_single_example(", "tokenizer(**parameters) input_feature = InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True )", "TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, ) trainer = SequenceClassificationTrainer( model=model, ) #", "size for all batches, therefore the number # of examples", "of 0.0). while len(eval_examples) % config.eval_batch_size != 0: eval_examples.append(PaddingInputExample()) eval_file", "break output_line = \"\\t\".join( str(class_probability) for class_probability in probabilities) +", "List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps = int( len(train_examples) /", "with tf.gfile.GFile(output_eval_file, \"w\") as writer: tf.logging.info(\"***** Eval results *****\") for", "therefore the number # of examples must be a multiple", "enumerate(label_list)} features = [] for (ex_index, example) in enumerate(examples): if", "law or agreed to in writing, software # distributed under", "not in processors: raise ValueError(\"Task not found: %s\" % (task_name))", "running eval on the TPU, you will need to specify", "import ( InputExample, InputFeatures, Config ) from src.data_process import (", "= num_train_steps = int( len(train_examples) / config.train_batch_size * config.epochs )", "i >= num_actual_predict_examples: break output_line = \"\\t\".join( str(class_probability) for class_probability", "the metric (all tf.metrics # support a per-instance weight, and", "train_file) tf.logging.info(\"***** Running training *****\") tf.logging.info(\" Num examples = %d\",", "list of `InputFeatures`.\"\"\" label2id = {label: index for index, label", "= os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length, tokenizer, train_file) tf.logging.info(\"*****", "src.schema import ( InputExample, InputFeatures, Config ) from src.data_process import", "= int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder = True if config.use_tpu else", "processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples) if config.use_tpu: # TPU requires a", "config.predict_batch_size != 0: predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list,", "input_feature def create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels =", "= file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval:", "model=model, ) # If TPU is not available, this will", "config.train_batch_size * config.epochs ) num_warmup_steps = int(num_train_steps * config.warmup_proportion) model", "\"return_length\":True, \"verbose\":True } if example.text_b: parameters['text_pair'] = example.text_b feature =", "multiple of the batch size, or else examples # will", "# TPU requires a fixed batch size for all batches,", "SequenceClassificationTrainer(Trainer): def compute_loss(self, model, inputs, return_outputs=False): labels = inputs.pop(\"labels\") outputs", "num_train_steps) train_input_fn = file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps)", "config.eval_batch_size == 0 eval_steps = int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder =", "predict_input_fn = file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn)", "%d\", config.train_batch_size) tf.logging.info(\" Num steps = %d\", num_train_steps) train_input_fn =", "int, example: InputExample, label2id: Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer", "int( len(train_examples) / config.train_batch_size * config.epochs ) num_warmup_steps = int(num_train_steps", "examples which are ignored # later on. These do NOT", "tokenizer: PreTrainedTokenizer ): \"\"\"Convert a set of `InputExample`s to a", "metric (all tf.metrics # support a per-instance weight, and these", "convert_single_example(ex_index, example, label2id, max_seq_length, tokenizer) features.append(feature) return features class SequenceClassificationTrainer(Trainer):", "bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels model = BertForSequenceClassification(bert_config)", "may obtain a copy of the License at # #", "= PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name = config.task_name.lower() if task_name not", "len(predict_examples) - num_actual_predict_examples) tf.logging.info(\" Batch size = %d\", config.predict_batch_size) predict_drop_remainder", "if not config.do_train and not config.do_eval and not config.do_predict: raise", "\"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length, tokenizer, train_file) tf.logging.info(\"***** Running training", "output_dir=config.output_dir, overwrite_output_dir=True, ) trainer = SequenceClassificationTrainer( model=model, ) # If", "examples = %d (%d actual, %d padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples)", "Running prediction*****\") tf.logging.info(\" Num examples = %d (%d actual, %d", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "example_index < 5: logger.info(f'*************************** Example {example_index} ***************************') logger.info(example) logger.info(input_feature) logger.info('***************************", "size = %d\", config.eval_batch_size) # This tells the estimator to", "example, label2id, max_seq_length, tokenizer) features.append(feature) return features class SequenceClassificationTrainer(Trainer): def", "= None if config.do_train: train_examples: List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader =", "TrainingArguments, PreTrainedTokenizer ) from transformers.configuration_utils import PretrainedConfig from src.schema import", "\"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True } if example.text_b: parameters['text_pair'] = example.text_b feature", "label2id = {label: index for index, label in enumerate(label_list)} features", "may not use this file except in compliance with the", "\"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\"", "= processors[task_name]() label_list = processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples =", "= Config.instance() if not config.do_train and not config.do_eval and not", "= %s\", key, str(result[key])) writer.write(\"%s = %s\\n\" % (key, str(result[key])))", "under the License. \"\"\"BERT finetuning runner.\"\"\" from __future__ import annotations,", "# processors need to be updated processors = { 'agnews-processor':", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "%d\", len(train_examples)) tf.logging.info(\" Batch size = %d\", config.train_batch_size) tf.logging.info(\" Num", "from typing import Dict, List from transformers import ( AutoTokenizer,", "this file except in compliance with the License. # You", "End ***************************') return input_feature def create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig =", "on. These do NOT count towards the metric (all tf.metrics", "from src.data_process import ( AgNewsDataProcessor ) from config import create_logger", "List[str], max_seq_length: int, tokenizer: PreTrainedTokenizer ): \"\"\"Convert a set of", "根据不同的任务,处理不同的数据集 task_name = config.task_name.lower() if task_name not in processors: raise", "updated processors = { 'agnews-processor': AgNewsDataProcessor, } config: Config =", "% (task_name)) processor = processors[task_name]() label_list = processor.get_labels() tokenizer =", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "%d padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples) tf.logging.info(\" Batch size", "if example_index < 5: logger.info(f'*************************** Example {example_index} ***************************') logger.info(example) logger.info(input_feature)", "int, tokenizer: PreTrainedTokenizer ): \"\"\"Convert a set of `InputExample`s to", "from config import create_logger logger = create_logger() def convert_single_example( example_index:", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "< 5: logger.info(f'*************************** Example {example_index} ***************************') logger.info(example) logger.info(input_feature) logger.info('*************************** Example", "examples must be a multiple of the batch size, or", "processors = { 'agnews-processor': AgNewsDataProcessor, } config: Config = Config.instance()", "examples, label_list: List[str], max_seq_length: int, tokenizer: PreTrainedTokenizer ): \"\"\"Convert a", "tf.logging.info(\" Num examples = %d (%d actual, %d padding)\", len(eval_examples),", "features = [] for (ex_index, example) in enumerate(examples): if ex_index", "in enumerate(label_list)} features = [] for (ex_index, example) in enumerate(examples):", "os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"***** Running prediction*****\")", "%d\", num_train_steps) train_input_fn = file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "size = %d\", config.predict_batch_size) predict_drop_remainder = True if config.use_tpu else", "processor = processors[task_name]() label_list = processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples", "to run through the entire set. eval_steps = None #", "classification model.\"\"\" models = { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return models[config.model_name](config)", "index for index, label in enumerate(label_list)} features = [] for", "0: eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length,", "ValueError( \"At least one of `do_train`, `do_eval` or `do_predict' must", "train_examples, label_list, config.max_seq_length, tokenizer, train_file) tf.logging.info(\"***** Running training *****\") tf.logging.info(\"", "output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\") as writer: tf.logging.info(\"*****", "# Copyright 2018 The Google AI Language Team Authors. #", "BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels model = BertForSequenceClassification(bert_config) return", "you will need to specify the # number of steps.", "= True if config.use_tpu else False eval_input_fn = file_based_input_fn_builder( input_file=eval_file,", "labels = inputs.pop(\"labels\") outputs = model(**inputs) return outputs.loss def main():", "for all batches, therefore the number # of examples must", "= True if config.use_tpu else False predict_input_fn = file_based_input_fn_builder( input_file=predict_file,", "0: predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer,", "for (i, prediction) in enumerate(result): probabilities = prediction[\"probabilities\"] if i", "\"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\") as writer: tf.logging.info(\"***** Eval results *****\")", "config.task_name.lower() if task_name not in processors: raise ValueError(\"Task not found:", "( AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer ) from", "specify the # number of steps. if config.use_tpu: assert len(eval_examples)", "# later on. These do NOT count towards the metric", "num_actual_predict_examples = len(predict_examples) if config.use_tpu: # TPU requires a fixed", "2018 The Google AI Language Team Authors. # # Licensed", "(%d actual, %d padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples) tf.logging.info(\"", "config.max_seq_length, tokenizer, train_file) tf.logging.info(\"***** Running training *****\") tf.logging.info(\" Num examples", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps = int( len(train_examples) / config.train_batch_size *", "models = { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return models[config.model_name](config) def convert_examples_to_features(", "eval_file) tf.logging.info(\"***** Running evaluation *****\") tf.logging.info(\" Num examples = %d", "ignored # later on. while len(predict_examples) % config.predict_batch_size != 0:", "len(predict_examples), num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples) tf.logging.info(\" Batch size = %d\",", "in processors: raise ValueError(\"Task not found: %s\" % (task_name)) processor", "size = %d\", config.train_batch_size) tf.logging.info(\" Num steps = %d\", num_train_steps)", "用于展示example中的前几例数据 \"\"\" parameters = { \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True,", "% 200 == 0: logger.info(\"Writing example %d of %d\" %", "or implied. # See the License for the specific language", "logger.info(f'*************************** Example {example_index} ***************************') logger.info(example) logger.info(input_feature) logger.info('*************************** Example End ***************************')", "tokenizer) features.append(feature) return features class SequenceClassificationTrainer(Trainer): def compute_loss(self, model, inputs,", "{ 'agnews-processor': AgNewsDataProcessor, } config: Config = Config.instance() if not", "len(eval_examples) % config.eval_batch_size == 0 eval_steps = int(len(eval_examples) // config.eval_batch_size)", "= %s\\n\" % (key, str(result[key]))) if config.do_predict: predict_examples = processor.get_test_examples(config.data_dir)", "return models[config.model_name](config) def convert_examples_to_features( examples, label_list: List[str], max_seq_length: int, tokenizer:", "is_real_example=True ) if example_index < 5: logger.info(f'*************************** Example {example_index} ***************************')", "AgNewsDataProcessor ) from config import create_logger logger = create_logger() def", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "= None num_train_steps = None num_warmup_steps = None if config.do_train:", "max_seq_length, tokenizer) features.append(feature) return features class SequenceClassificationTrainer(Trainer): def compute_loss(self, model,", "support a per-instance weight, and these get a weight of", "os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\") as writer: tf.logging.info(\"***** Eval results", "main(): # processors need to be updated processors = {", "def convert_single_example( example_index: int, example: InputExample, label2id: Dict[str, int], max_seq_length:", "attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True ) if example_index < 5: logger.info(f'***************************", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= { 'agnews-processor': AgNewsDataProcessor, } config: Config = Config.instance() if", "%d\", config.eval_batch_size) # This tells the estimator to run through", "single `InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\" parameters = { \"text\":example.text_a, \"add_special_tokens\":True,", "writer.write(output_line) num_written_lines += 1 assert num_written_lines == num_actual_predict_examples if __name__", "\"w\") as writer: tf.logging.info(\"***** Eval results *****\") for key in", "must be True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name =", "a per-instance weight, and these get a weight of 0.0).", "tf.logging.info(\" Batch size = %d\", config.eval_batch_size) # This tells the", "towards the metric (all tf.metrics # support a per-instance weight,", "\"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True } if example.text_b: parameters['text_pair'] =", "num_actual_eval_examples = len(eval_examples) if config.use_tpu: # TPU requires a fixed", "def compute_loss(self, model, inputs, return_outputs=False): labels = inputs.pop(\"labels\") outputs =", "`InputFeatures`.\"\"\" label2id = {label: index for index, label in enumerate(label_list)}", "of steps. if config.use_tpu: assert len(eval_examples) % config.eval_batch_size == 0", "(the \"License\"); # you may not use this file except", "not found: %s\" % (task_name)) processor = processors[task_name]() label_list =", "# you may not use this file except in compliance", "normal Estimator on CPU # or GPUs if config.do_train: train_file", "label_id=label2id[example.label], is_real_example=True ) if example_index < 5: logger.info(f'*************************** Example {example_index}", "transformers import ( AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer", "or `do_predict' must be True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集", "os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"***** Running", "= [] for (ex_index, example) in enumerate(examples): if ex_index %", "tf.logging.info(\" %s = %s\", key, str(result[key])) writer.write(\"%s = %s\\n\" %", "label_list, config.max_seq_length, tokenizer, train_file) tf.logging.info(\"***** Running training *****\") tf.logging.info(\" Num", "example %d of %d\" % (ex_index, len(examples))) feature = convert_single_example(ex_index,", "BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer ) from transformers.configuration_utils import PretrainedConfig from", "segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True ) if example_index < 5: logger.info(f'*************************** Example", "\"padding\":True, \"max_length\":max_seq_length, \"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True } if example.text_b: parameters['text_pair']", "`InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\" parameters = { \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True,", "config.train_batch_size) tf.logging.info(\" Num steps = %d\", num_train_steps) train_input_fn = file_based_input_fn_builder(", "# # Unless required by applicable law or agreed to", "drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples =", "len(eval_examples) - num_actual_eval_examples) tf.logging.info(\" Batch size = %d\", config.eval_batch_size) #", "True if config.use_tpu else False predict_input_fn = file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length,", "if config.use_tpu: assert len(eval_examples) % config.eval_batch_size == 0 eval_steps =", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "[] for (ex_index, example) in enumerate(examples): if ex_index % 200", "config.num_labels model = BertForSequenceClassification(bert_config) return model def create_model(config: Config): \"\"\"Creates", "% (key, str(result[key]))) if config.do_predict: predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples =", "dropped. So we pad with fake examples which are ignored", "import annotations, absolute_import import os from typing import Dict, List", "Version 2.0 (the \"License\"); # you may not use this", "= None # However, if running eval on the TPU,", "/ config.train_batch_size * config.epochs ) num_warmup_steps = int(num_train_steps * config.warmup_proportion)", "%d of %d\" % (ex_index, len(examples))) feature = convert_single_example(ex_index, example,", "%d\", config.predict_batch_size) predict_drop_remainder = True if config.use_tpu else False predict_input_fn", "- num_actual_eval_examples) tf.logging.info(\" Batch size = %d\", config.eval_batch_size) # This", "if task_name not in processors: raise ValueError(\"Task not found: %s\"", "processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples) if config.use_tpu: # TPU requires a", "processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None num_train_steps = None", "\"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return models[config.model_name](config) def convert_examples_to_features( examples, label_list: List[str],", "= config.task_name.lower() if task_name not in processors: raise ValueError(\"Task not", "tf.logging.info(\"***** Running evaluation *****\") tf.logging.info(\" Num examples = %d (%d", "Num examples = %d (%d actual, %d padding)\", len(eval_examples), num_actual_eval_examples,", "License. \"\"\"BERT finetuning runner.\"\"\" from __future__ import annotations, absolute_import import", "create_model(config: Config): \"\"\"Creates a classification model.\"\"\" models = { \"bert-for-sequence-classification\":", "= os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"*****", ") from transformers.configuration_utils import PretrainedConfig from src.schema import ( InputExample,", "key, str(result[key])) writer.write(\"%s = %s\\n\" % (key, str(result[key]))) if config.do_predict:", "if config.use_tpu: # TPU requires a fixed batch size for", "else False eval_input_fn = file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result", "Predict results *****\") for (i, prediction) in enumerate(result): probabilities =", "None if config.do_train: train_examples: List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps", "language governing permissions and # limitations under the License. \"\"\"BERT", "label_list, config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"***** Running evaluation *****\") tf.logging.info(\" Num", "config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"***** Running evaluation *****\") tf.logging.info(\" Num examples", "do NOT count towards the metric (all tf.metrics # support", "implied. # See the License for the specific language governing", "Config): \"\"\"Creates a classification model.\"\"\" models = { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model,", "{example_index} ***************************') logger.info(example) logger.info(input_feature) logger.info('*************************** Example End ***************************') return input_feature", "under the Apache License, Version 2.0 (the \"License\"); # you", "outputs = model(**inputs) return outputs.loss def main(): # processors need", "for key in sorted(result.keys()): tf.logging.info(\" %s = %s\", key, str(result[key]))", "input_feature = InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True ) if", "steps=eval_steps) output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\") as writer:", "prediction[\"probabilities\"] if i >= num_actual_predict_examples: break output_line = \"\\t\".join( str(class_probability)", "which are ignored # later on. These do NOT count", "of `InputExample`s to a list of `InputFeatures`.\"\"\" label2id = {label:", "by applicable law or agreed to in writing, software #", "class SequenceClassificationTrainer(Trainer): def compute_loss(self, model, inputs, return_outputs=False): labels = inputs.pop(\"labels\")", "number # of examples must be a multiple of the", "file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\" Num", "+ \"\\n\" writer.write(output_line) num_written_lines += 1 assert num_written_lines == num_actual_predict_examples", "processors: raise ValueError(\"Task not found: %s\" % (task_name)) processor =", "InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True ) if example_index <", "} return models[config.model_name](config) def convert_examples_to_features( examples, label_list: List[str], max_seq_length: int,", "os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length, tokenizer, train_file) tf.logging.info(\"***** Running", "*****\") tf.logging.info(\" Num examples = %d (%d actual, %d padding)\",", "\"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list, config.max_seq_length, tokenizer, eval_file) tf.logging.info(\"***** Running evaluation", "return input_feature def create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels", "will get dropped. So we pad with fake examples which", "predict_file) tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\" Num examples = %d (%d", "If TPU is not available, this will fall back to", "train_examples = None num_train_steps = None num_warmup_steps = None if", "\"\\n\" writer.write(output_line) num_written_lines += 1 assert num_written_lines == num_actual_predict_examples if", "src.data_process import ( AgNewsDataProcessor ) from config import create_logger logger", "num_warmup_steps = int(num_train_steps * config.warmup_proportion) model = create_model(config=config) training_arguments =", "PreTrainedTokenizer ) from transformers.configuration_utils import PretrainedConfig from src.schema import (", "input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval: eval_examples =", "predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples) if config.use_tpu: # TPU", "num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples) tf.logging.info(\" Batch size = %d\", config.predict_batch_size)", "config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\" Num examples =", "TPU requires a fixed batch size for all batches, therefore", "typing import Dict, List from transformers import ( AutoTokenizer, BertTokenizer,", "Num examples = %d\", len(train_examples)) tf.logging.info(\" Batch size = %d\",", "a single `InputExample` into a single `InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\"", "= estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\") as", "the batch size, or else examples # will get dropped.", "absolute_import import os from typing import Dict, List from transformers", "{ \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return models[config.model_name](config) def convert_examples_to_features( examples, label_list:", "Eval results *****\") for key in sorted(result.keys()): tf.logging.info(\" %s =", "eval on the TPU, you will need to specify the", "and these get a weight of 0.0). while len(eval_examples) %", "str(result[key])) writer.write(\"%s = %s\\n\" % (key, str(result[key]))) if config.do_predict: predict_examples", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "# number of steps. if config.use_tpu: assert len(eval_examples) % config.eval_batch_size", "5: logger.info(f'*************************** Example {example_index} ***************************') logger.info(example) logger.info(input_feature) logger.info('*************************** Example End", "model.\"\"\" models = { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return models[config.model_name](config) def", "and not config.do_eval and not config.do_predict: raise ValueError( \"At least", "Unless required by applicable law or agreed to in writing,", "***************************') logger.info(example) logger.info(input_feature) logger.info('*************************** Example End ***************************') return input_feature def", "evaluation *****\") tf.logging.info(\" Num examples = %d (%d actual, %d", "\"\"\"Converts a single `InputExample` into a single `InputFeatures`. example_index: 用于展示example中的前几例数据", "and not config.do_predict: raise ValueError( \"At least one of `do_train`,", "\"\"\"Convert a set of `InputExample`s to a list of `InputFeatures`.\"\"\"", "per-instance weight, and these get a weight of 0.0). while", "the specific language governing permissions and # limitations under the", "example: InputExample, label2id: Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer )", "this will fall back to normal Estimator on CPU #", "later on. These do NOT count towards the metric (all", "CPU # or GPUs if config.do_train: train_file = os.path.join(config.output_dir, \"train.tf_record\")", "ignored # later on. These do NOT count towards the", "available, this will fall back to normal Estimator on CPU", "are ignored # later on. These do NOT count towards", "config.eval_batch_size) # This tells the estimator to run through the", "with fake examples which are ignored # later on. while", "applicable law or agreed to in writing, software # distributed", "fixed batch size for all batches, therefore the number #", "seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\")", "tokenizer: BertTokenizer ) -> InputFeatures: \"\"\"Converts a single `InputExample` into", "def create_bert_for_sequence_classification_model(config: Config): bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels", "or else examples # will get dropped. So we pad", "output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\") as writer: num_written_lines", "BertTokenizer, BertForSequenceClassification, BertConfig, Trainer, TrainingArguments, PreTrainedTokenizer ) from transformers.configuration_utils import", "file_based_convert_examples_to_features( train_examples, label_list, config.max_seq_length, tokenizer, train_file) tf.logging.info(\"***** Running training *****\")", "while len(eval_examples) % config.eval_batch_size != 0: eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir,", "tf.logging.info(\"***** Predict results *****\") for (i, prediction) in enumerate(result): probabilities", "config.eval_batch_size != 0: eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples,", "PreTrainedTokenizer ): \"\"\"Convert a set of `InputExample`s to a list", "Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer ) -> InputFeatures: \"\"\"Converts", "prediction*****\") tf.logging.info(\" Num examples = %d (%d actual, %d padding)\",", "def create_model(config: Config): \"\"\"Creates a classification model.\"\"\" models = {", "else examples # will get dropped. So we pad with", "in writing, software # distributed under the License is distributed", "label_list = processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None num_train_steps", "on CPU # or GPUs if config.do_train: train_file = os.path.join(config.output_dir,", "config.predict_batch_size) predict_drop_remainder = True if config.use_tpu else False predict_input_fn =", "tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\" Num examples = %d (%d actual,", "limitations under the License. \"\"\"BERT finetuning runner.\"\"\" from __future__ import", "= prediction[\"probabilities\"] if i >= num_actual_predict_examples: break output_line = \"\\t\".join(", "= int( len(train_examples) / config.train_batch_size * config.epochs ) num_warmup_steps =", "= len(predict_examples) if config.use_tpu: # TPU requires a fixed batch", "need to specify the # number of steps. if config.use_tpu:", "False predict_input_fn = file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result =", "train_input_fn = file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length, is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if", "Batch size = %d\", config.predict_batch_size) predict_drop_remainder = True if config.use_tpu", "num_warmup_steps = None if config.do_train: train_examples: List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader", "tf.gfile.GFile(output_eval_file, \"w\") as writer: tf.logging.info(\"***** Eval results *****\") for key", "config.use_tpu: # TPU requires a fixed batch size for all", "(i, prediction) in enumerate(result): probabilities = prediction[\"probabilities\"] if i >=", "% config.predict_batch_size != 0: predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples,", "set. eval_steps = None # However, if running eval on", "the number # of examples must be a multiple of", "BertConfig.from_pretrained(config.pretrained_model_name) bert_config.num_labels = config.num_labels model = BertForSequenceClassification(bert_config) return model def", "% config.eval_batch_size == 0 eval_steps = int(len(eval_examples) // config.eval_batch_size) eval_drop_remainder", "Authors. # # Licensed under the Apache License, Version 2.0", "assert len(eval_examples) % config.eval_batch_size == 0 eval_steps = int(len(eval_examples) //", "outputs.loss def main(): # processors need to be updated processors", "will need to specify the # number of steps. if", "os from typing import Dict, List from transformers import (", "if config.use_tpu else False predict_input_fn = file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False,", "a single `InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\" parameters = { \"text\":example.text_a,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "in enumerate(examples): if ex_index % 200 == 0: logger.info(\"Writing example", "tf.logging.info(\" Batch size = %d\", config.train_batch_size) tf.logging.info(\" Num steps =", "License, Version 2.0 (the \"License\"); # you may not use", "import create_logger logger = create_logger() def convert_single_example( example_index: int, example:", "logger.info('*************************** Example End ***************************') return input_feature def create_bert_for_sequence_classification_model(config: Config): bert_config:", "if config.do_train: train_examples: List[InputExample] = processor.get_train_examples(config.data_dir) train_dataset_loader = num_train_steps =", "predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length, tokenizer, predict_file)", "# You may obtain a copy of the License at", "be True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name = config.task_name.lower()", "to be updated processors = { 'agnews-processor': AgNewsDataProcessor, } config:", "if example.text_b: parameters['text_pair'] = example.text_b feature = tokenizer(**parameters) input_feature =", "NOT count towards the metric (all tf.metrics # support a", "example_index: 用于展示example中的前几例数据 \"\"\" parameters = { \"text\":example.text_a, \"add_special_tokens\":True, \"padding\":True, \"max_length\":max_seq_length,", "(ex_index, len(examples))) feature = convert_single_example(ex_index, example, label2id, max_seq_length, tokenizer) features.append(feature)", "features.append(feature) return features class SequenceClassificationTrainer(Trainer): def compute_loss(self, model, inputs, return_outputs=False):", "= AutoTokenizer.from_pretrained(config.pretrained_model_name) train_examples = None num_train_steps = None num_warmup_steps =", "tokenizer, eval_file) tf.logging.info(\"***** Running evaluation *****\") tf.logging.info(\" Num examples =", "config.use_tpu else False eval_input_fn = file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder)", "Google AI Language Team Authors. # # Licensed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "- num_actual_predict_examples) tf.logging.info(\" Batch size = %d\", config.predict_batch_size) predict_drop_remainder =", "for index, label in enumerate(label_list)} features = [] for (ex_index,", "InputExample, InputFeatures, Config ) from src.data_process import ( AgNewsDataProcessor )", "if running eval on the TPU, you will need to", "is_training=True, drop_remainder=True) estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) if config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples", "is_training=False, drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\") with", "Batch size = %d\", config.train_batch_size) tf.logging.info(\" Num steps = %d\",", "will fall back to normal Estimator on CPU # or", "writer.write(\"%s = %s\\n\" % (key, str(result[key]))) if config.do_predict: predict_examples =", "num_train_steps = None num_warmup_steps = None if config.do_train: train_examples: List[InputExample]", "= config.num_labels model = BertForSequenceClassification(bert_config) return model def create_model(config: Config):", "raise ValueError(\"Task not found: %s\" % (task_name)) processor = processors[task_name]()", "single `InputExample` into a single `InputFeatures`. example_index: 用于展示example中的前几例数据 \"\"\" parameters", "None num_warmup_steps = None if config.do_train: train_examples: List[InputExample] = processor.get_train_examples(config.data_dir)", "the License for the specific language governing permissions and #", "be a multiple of the batch size, or else examples", "TPU, you will need to specify the # number of", "Apache License, Version 2.0 (the \"License\"); # you may not", "a weight of 0.0). while len(eval_examples) % config.eval_batch_size != 0:", "either express or implied. # See the License for the", "tf.logging.info(\" Num examples = %d (%d actual, %d padding)\", len(predict_examples),", "\"return_attention_mask\":True, \"return_token_type_ids\":True, \"return_length\":True, \"verbose\":True } if example.text_b: parameters['text_pair'] = example.text_b", "SequenceClassificationTrainer( model=model, ) # If TPU is not available, this", "examples which are ignored # later on. while len(predict_examples) %", "= %d\", config.train_batch_size) tf.logging.info(\" Num steps = %d\", num_train_steps) train_input_fn", "entire set. eval_steps = None # However, if running eval", "set of `InputExample`s to a list of `InputFeatures`.\"\"\" label2id =", "InputFeatures: \"\"\"Converts a single `InputExample` into a single `InputFeatures`. example_index:", "Trainer, TrainingArguments, PreTrainedTokenizer ) from transformers.configuration_utils import PretrainedConfig from src.schema", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= %d (%d actual, %d padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples) -", "Language Team Authors. # # Licensed under the Apache License,", "not config.do_eval and not config.do_predict: raise ValueError( \"At least one", "= os.path.join(config.output_dir, \"eval_results.txt\") with tf.gfile.GFile(output_eval_file, \"w\") as writer: tf.logging.info(\"***** Eval", "False eval_input_fn = file_based_input_fn_builder( input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result =", "from __future__ import annotations, absolute_import import os from typing import", "\"verbose\":True } if example.text_b: parameters['text_pair'] = example.text_b feature = tokenizer(**parameters)", ") num_warmup_steps = int(num_train_steps * config.warmup_proportion) model = create_model(config=config) training_arguments", "with fake examples which are ignored # later on. These", "estimator to run through the entire set. eval_steps = None", "drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\") with", "config.epochs ) num_warmup_steps = int(num_train_steps * config.warmup_proportion) model = create_model(config=config)", "create_logger() def convert_single_example( example_index: int, example: InputExample, label2id: Dict[str, int],", "logger.info(input_feature) logger.info('*************************** Example End ***************************') return input_feature def create_bert_for_sequence_classification_model(config: Config):", "must be a multiple of the batch size, or else", "!= 0: eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features( eval_examples, label_list,", "* config.warmup_proportion) model = create_model(config=config) training_arguments = TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True,", "= { \"bert-for-sequence-classification\": create_bert_for_sequence_classification_model, } return models[config.model_name](config) def convert_examples_to_features( examples,", "# This tells the estimator to run through the entire", "in sorted(result.keys()): tf.logging.info(\" %s = %s\", key, str(result[key])) writer.write(\"%s =", ">= num_actual_predict_examples: break output_line = \"\\t\".join( str(class_probability) for class_probability in", "file_based_input_fn_builder( input_file=predict_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=predict_drop_remainder) result = estimator.predict(input_fn=predict_input_fn) output_predict_file =", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "from src.schema import ( InputExample, InputFeatures, Config ) from src.data_process", "least one of `do_train`, `do_eval` or `do_predict' must be True.\")", "None # However, if running eval on the TPU, you", "steps. if config.use_tpu: assert len(eval_examples) % config.eval_batch_size == 0 eval_steps", "# or GPUs if config.do_train: train_file = os.path.join(config.output_dir, \"train.tf_record\") file_based_convert_examples_to_features(", "(task_name)) processor = processors[task_name]() label_list = processor.get_labels() tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name)", "= None num_warmup_steps = None if config.do_train: train_examples: List[InputExample] =", "(key, str(result[key]))) if config.do_predict: predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples)", "convert_single_example( example_index: int, example: InputExample, label2id: Dict[str, int], max_seq_length: int,", "(%d actual, %d padding)\", len(eval_examples), num_actual_eval_examples, len(eval_examples) - num_actual_eval_examples) tf.logging.info(\"", "List from transformers import ( AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig, Trainer,", "tokenizer, predict_file) tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\" Num examples = %d", "!= 0: predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir, \"predict.tf_record\") file_based_convert_examples_to_features(predict_examples, label_list, config.max_seq_length,", "{label: index for index, label in enumerate(label_list)} features = []", "config.do_eval and not config.do_predict: raise ValueError( \"At least one of", "Dict, List from transformers import ( AutoTokenizer, BertTokenizer, BertForSequenceClassification, BertConfig,", "max_seq_length: int, tokenizer: PreTrainedTokenizer ): \"\"\"Convert a set of `InputExample`s", "task_name = config.task_name.lower() if task_name not in processors: raise ValueError(\"Task", "estimator.predict(input_fn=predict_input_fn) output_predict_file = os.path.join(config.output_dir, \"test_results.tsv\") with tf.gfile.GFile(output_predict_file, \"w\") as writer:", "convert_examples_to_features( examples, label_list: List[str], max_seq_length: int, tokenizer: PreTrainedTokenizer ): \"\"\"Convert", "= inputs.pop(\"labels\") outputs = model(**inputs) return outputs.loss def main(): #", "1 assert num_written_lines == num_actual_predict_examples if __name__ == \"__main__\": main()", "config.use_tpu: assert len(eval_examples) % config.eval_batch_size == 0 eval_steps = int(len(eval_examples)", "\"License\"); # you may not use this file except in", "= tokenizer(**parameters) input_feature = InputFeatures( input_ids=feature['token_ids'], attention_mask=feature['attention_mask'], segment_ids=feature['token_type_ids'], label_id=label2id[example.label], is_real_example=True", "True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name = config.task_name.lower() if", "Num steps = %d\", num_train_steps) train_input_fn = file_based_input_fn_builder( input_file=train_file, seq_length=config.max_seq_length,", "a set of `InputExample`s to a list of `InputFeatures`.\"\"\" label2id", "= create_model(config=config) training_arguments = TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, ) trainer =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "config.warmup_proportion) model = create_model(config=config) training_arguments = TrainingArguments( output_dir=config.output_dir, overwrite_output_dir=True, )", "str(result[key]))) if config.do_predict: predict_examples = processor.get_test_examples(config.data_dir) num_actual_predict_examples = len(predict_examples) if", "count towards the metric (all tf.metrics # support a per-instance", "Estimator on CPU # or GPUs if config.do_train: train_file =", "`do_predict' must be True.\") bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name) # 根据不同的任务,处理不同的数据集 task_name", "we pad with fake examples which are ignored # later", "get a weight of 0.0). while len(eval_examples) % config.eval_batch_size !=", "# distributed under the License is distributed on an \"AS", "% config.eval_batch_size != 0: eval_examples.append(PaddingInputExample()) eval_file = os.path.join(config.output_dir, \"eval.tf_record\") file_based_convert_examples_to_features(", "= create_logger() def convert_single_example( example_index: int, example: InputExample, label2id: Dict[str,", "BertForSequenceClassification(bert_config) return model def create_model(config: Config): \"\"\"Creates a classification model.\"\"\"", "# Unless required by applicable law or agreed to in", "logger.info(example) logger.info(input_feature) logger.info('*************************** Example End ***************************') return input_feature def create_bert_for_sequence_classification_model(config:", "is_training=False, drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file = os.path.join(config.output_dir, \"eval_results.txt\")", "as writer: tf.logging.info(\"***** Eval results *****\") for key in sorted(result.keys()):", "config.do_train and not config.do_eval and not config.do_predict: raise ValueError( \"At", "create_logger logger = create_logger() def convert_single_example( example_index: int, example: InputExample,", "not available, this will fall back to normal Estimator on", "\"At least one of `do_train`, `do_eval` or `do_predict' must be", "def main(): # processors need to be updated processors =", "which are ignored # later on. while len(predict_examples) % config.predict_batch_size", "actual, %d padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples) tf.logging.info(\" Batch", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "None num_train_steps = None num_warmup_steps = None if config.do_train: train_examples:", "create_bert_for_sequence_classification_model, } return models[config.model_name](config) def convert_examples_to_features( examples, label_list: List[str], max_seq_length:", "= %d\", len(train_examples)) tf.logging.info(\" Batch size = %d\", config.train_batch_size) tf.logging.info(\"", "batches, therefore the number # of examples must be a", "label_list, config.max_seq_length, tokenizer, predict_file) tf.logging.info(\"***** Running prediction*****\") tf.logging.info(\" Num examples", "as writer: num_written_lines = 0 tf.logging.info(\"***** Predict results *****\") for", "config.do_eval: eval_examples = processor.get_dev_examples(config.data_dir) num_actual_eval_examples = len(eval_examples) if config.use_tpu: #", "You may obtain a copy of the License at #", "-> InputFeatures: \"\"\"Converts a single `InputExample` into a single `InputFeatures`.", "enumerate(examples): if ex_index % 200 == 0: logger.info(\"Writing example %d", ") from src.data_process import ( AgNewsDataProcessor ) from config import", "%s = %s\", key, str(result[key])) writer.write(\"%s = %s\\n\" % (key,", "writer: tf.logging.info(\"***** Eval results *****\") for key in sorted(result.keys()): tf.logging.info(\"", "runner.\"\"\" from __future__ import annotations, absolute_import import os from typing", "int(num_train_steps * config.warmup_proportion) model = create_model(config=config) training_arguments = TrainingArguments( output_dir=config.output_dir,", "while len(predict_examples) % config.predict_batch_size != 0: predict_examples.append(PaddingInputExample()) predict_file = os.path.join(config.output_dir,", "need to be updated processors = { 'agnews-processor': AgNewsDataProcessor, }", "input_file=eval_file, seq_length=config.max_seq_length, is_training=False, drop_remainder=eval_drop_remainder) result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) output_eval_file =", "% (ex_index, len(examples))) feature = convert_single_example(ex_index, example, label2id, max_seq_length, tokenizer)", "%s\" % (task_name)) processor = processors[task_name]() label_list = processor.get_labels() tokenizer", "the Apache License, Version 2.0 (the \"License\"); # you may", "get dropped. So we pad with fake examples which are", "example) in enumerate(examples): if ex_index % 200 == 0: logger.info(\"Writing", "model, inputs, return_outputs=False): labels = inputs.pop(\"labels\") outputs = model(**inputs) return", "These do NOT count towards the metric (all tf.metrics #", "%d (%d actual, %d padding)\", len(predict_examples), num_actual_predict_examples, len(predict_examples) - num_actual_predict_examples)", "*****\") for key in sorted(result.keys()): tf.logging.info(\" %s = %s\", key," ]
[ "* FROM charactercreator_character LIMIT 10').fetchall() print(characters) create_character_table_query = ''' CREATE", "import os import psycopg2 from dotenv import load_dotenv load_dotenv() DB_NAME2", "DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2", "import load_dotenv load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2", "cursor.execute(create_character_table_query) conn.commit() for character in characters: insert_query = f''' INSERT", "IF NOT EXISTS rpg_characters ( character_id SERIAL PRIMARY KEY, name", "psycopg2 from dotenv import load_dotenv load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2", "VARCHAR(30), level INT, exp INT, hp INT, strength INT, intelligence", "import psycopg2 from dotenv import load_dotenv load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\")", "''' cursor.execute(create_character_table_query) conn.commit() for character in characters: insert_query = f'''", "INT, dexterity INT, wisdom INT ) ''' cursor.execute(create_character_table_query) conn.commit() for", "FROM charactercreator_character LIMIT 10').fetchall() print(characters) create_character_table_query = ''' CREATE TABLE", "DB_HOST2 = os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor", "= os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>,", "hp, strength, intelligence, dexterity, wisdom) VALUES {character} ''' cursor.execute(insert_query) conn.commit()", "conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor = conn.cursor() sl_conn", "dotenv import load_dotenv load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\")", "sl_conn.cursor() characters = sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall() print(characters)", ") ''' cursor.execute(create_character_table_query) conn.commit() for character in characters: insert_query =", "DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\") conn", "INT, hp INT, strength INT, intelligence INT, dexterity INT, wisdom", "= f''' INSERT INTO rpg_characters (character_id, name, level, exp, hp,", "rpg_characters ( character_id SERIAL PRIMARY KEY, name VARCHAR(30), level INT,", "= sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor() characters = sl_cursor.execute('SELECT * FROM", "= os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\") conn =", "sl_cursor = sl_conn.cursor() characters = sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT", "INT, exp INT, hp INT, strength INT, intelligence INT, dexterity", "sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor() characters = sl_cursor.execute('SELECT * FROM charactercreator_character", "<reponame>TobyChen320/DS-Unit-3-Sprint-2-SQL-and-Databases import sqlite3 import os import psycopg2 from dotenv import", "= os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2 =", "os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\")", "sqlite3 import os import psycopg2 from dotenv import load_dotenv load_dotenv()", "dexterity INT, wisdom INT ) ''' cursor.execute(create_character_table_query) conn.commit() for character", "create_character_table_query = ''' CREATE TABLE IF NOT EXISTS rpg_characters (", "characters = sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall() print(characters) create_character_table_query", "INSERT INTO rpg_characters (character_id, name, level, exp, hp, strength, intelligence,", "load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\")", "character in characters: insert_query = f''' INSERT INTO rpg_characters (character_id,", "level INT, exp INT, hp INT, strength INT, intelligence INT,", "CREATE TABLE IF NOT EXISTS rpg_characters ( character_id SERIAL PRIMARY", "10').fetchall() print(characters) create_character_table_query = ''' CREATE TABLE IF NOT EXISTS", "TABLE IF NOT EXISTS rpg_characters ( character_id SERIAL PRIMARY KEY,", "KEY, name VARCHAR(30), level INT, exp INT, hp INT, strength", "INT, strength INT, intelligence INT, dexterity INT, wisdom INT )", "for character in characters: insert_query = f''' INSERT INTO rpg_characters", "= conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor() characters =", "user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor = conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor", "= os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor =", "rpg_characters (character_id, name, level, exp, hp, strength, intelligence, dexterity, wisdom)", "INT, wisdom INT ) ''' cursor.execute(create_character_table_query) conn.commit() for character in", "wisdom INT ) ''' cursor.execute(create_character_table_query) conn.commit() for character in characters:", "name VARCHAR(30), level INT, exp INT, hp INT, strength INT,", "load_dotenv load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2 = os.getenv(\"DB_USER3\") DB_PASS2 =", "level, exp, hp, strength, intelligence, dexterity, wisdom) VALUES {character} '''", "SERIAL PRIMARY KEY, name VARCHAR(30), level INT, exp INT, hp", "sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor() characters = sl_cursor.execute('SELECT *", "NOT EXISTS rpg_characters ( character_id SERIAL PRIMARY KEY, name VARCHAR(30),", "( character_id SERIAL PRIMARY KEY, name VARCHAR(30), level INT, exp", "hp INT, strength INT, intelligence INT, dexterity INT, wisdom INT", "= ''' CREATE TABLE IF NOT EXISTS rpg_characters ( character_id", "EXISTS rpg_characters ( character_id SERIAL PRIMARY KEY, name VARCHAR(30), level", "characters: insert_query = f''' INSERT INTO rpg_characters (character_id, name, level,", "insert_query = f''' INSERT INTO rpg_characters (character_id, name, level, exp,", "os.getenv(\"DB_USER3\") DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2,", "exp INT, hp INT, strength INT, intelligence INT, dexterity INT,", "name, level, exp, hp, strength, intelligence, dexterity, wisdom) VALUES {character}", "INT ) ''' cursor.execute(create_character_table_query) conn.commit() for character in characters: insert_query", "INT, intelligence INT, dexterity INT, wisdom INT ) ''' cursor.execute(create_character_table_query)", "os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor = conn.cursor()", "from dotenv import load_dotenv load_dotenv() DB_NAME2 = os.getenv(\"DB_NAME3\") DB_USER2 =", "strength, intelligence, dexterity, wisdom) VALUES {character} ''' cursor.execute(insert_query) conn.commit() cursor.close()", "= sl_conn.cursor() characters = sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall()", "LIMIT 10').fetchall() print(characters) create_character_table_query = ''' CREATE TABLE IF NOT", "sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall() print(characters) create_character_table_query = '''", "intelligence, dexterity, wisdom) VALUES {character} ''' cursor.execute(insert_query) conn.commit() cursor.close() conn.close()", "import sqlite3 import os import psycopg2 from dotenv import load_dotenv", "host=DB_HOST2) cursor = conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor()", "= sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall() print(characters) create_character_table_query =", "conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor() characters = sl_cursor.execute('SELECT", "password=<PASSWORD>, host=DB_HOST2) cursor = conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor =", "(character_id, name, level, exp, hp, strength, intelligence, dexterity, wisdom) VALUES", "print(characters) create_character_table_query = ''' CREATE TABLE IF NOT EXISTS rpg_characters", "INTO rpg_characters (character_id, name, level, exp, hp, strength, intelligence, dexterity,", "psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor = conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\")", "f''' INSERT INTO rpg_characters (character_id, name, level, exp, hp, strength,", "''' CREATE TABLE IF NOT EXISTS rpg_characters ( character_id SERIAL", "os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2)", "strength INT, intelligence INT, dexterity INT, wisdom INT ) '''", "conn.commit() for character in characters: insert_query = f''' INSERT INTO", "cursor = conn.cursor() sl_conn = sqlite3.connect(\"rpg_db.sqlite3\") sl_cursor = sl_conn.cursor() characters", "character_id SERIAL PRIMARY KEY, name VARCHAR(30), level INT, exp INT,", "exp, hp, strength, intelligence, dexterity, wisdom) VALUES {character} ''' cursor.execute(insert_query)", "intelligence INT, dexterity INT, wisdom INT ) ''' cursor.execute(create_character_table_query) conn.commit()", "in characters: insert_query = f''' INSERT INTO rpg_characters (character_id, name,", "PRIMARY KEY, name VARCHAR(30), level INT, exp INT, hp INT,", "= psycopg2.connect(dbname=DB_NAME2, user=DB_USER2, password=<PASSWORD>, host=DB_HOST2) cursor = conn.cursor() sl_conn =", "os import psycopg2 from dotenv import load_dotenv load_dotenv() DB_NAME2 =", "charactercreator_character LIMIT 10').fetchall() print(characters) create_character_table_query = ''' CREATE TABLE IF", "DB_PASS2 = os.getenv(\"DB_PASS3\") DB_HOST2 = os.getenv(\"DB_HOST3\") conn = psycopg2.connect(dbname=DB_NAME2, user=DB_USER2," ]
[ "|lastvsscore2=\\n' prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool += '|tbd", "up the D and W that way instead # Default", "key=itemgetter(4, 0)) return teams def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True):", "match_line += '|date=\\n' if 'teamID' in match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'],", "'|image=' + team_info['image'] + '\\n' for idx, player in enumerate(data['teams'][team[0]]['players']):", "if not match_line: continue try: rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])] =", "'|map4=' + '\\n' sidebar += '|map5=' + '\\n' sidebar +=", "teams_table += '\\n' # teams_table += '|c= |cflag=\\n' # teams_table", "handle double elimination brackets # set up team number trackers", "import battlefy_data import battlefy_wiki_linkings from datetime import datetime from operator", "# {'tab_name': 'Top 16', # 'count': 16}, # {'tab_name': 'Top", "'D' if match['matchType'] == 'winner': round_match_offset = -2 * round_max_win_match_count[match['roundNumber']", "'{{MatchMaps\\n' match_line += '|date=\\n' if 'teamID' in match['top']: team_top =", "type is defined by match['next'] # Not exactly sure how", "'count': 32}, # {'tab_name': 'Other Notable Participants', # 'count': -1},", "stage['bracket']['type'] == \"elimination\": numGames = 0 rounds = 0 for", "data['stages']: for place, standing in enumerate(stage['standings']): if 'place' in standing:", "== 0: teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n'", "team_previous_round[match['bottom']['teamID']]: bracket_type = 'W' else: bracket_type = 'D' else: bracket_type", "create_swiss_table(stage, bw_teams): dropped_style = 'drop' swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount'])", "+= bracket_indicator + 'win=1 ' team_previous_round[match['top']['teamID']] = True else: team_previous_round[match['top']['teamID']]", "+= '|platform=' + data['platform'] + '\\n' sidebar += '|country=' +", "swiss_table += '\\n' for rank, record in enumerate(stage['standings']): if record['disqualified']:", "sidebar += '|team_number=' + str(len(data['teams'])) + '\\n' sidebar += '|previous='", "+= '|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool += '}}\\n' prize_pool +=", "if team_previous_round[match['top']['teamID']]: bracket_type = 'W' else: bracket_type = 'D' else:", "'}}\\n' prize_pool += '{{prize pool slot |place=5-8 |usdprize=0\\n' prize_pool +=", "standing_id == standing['_id']: # if standing['disqualified']: # has_drop = True", "'|' + player_tag + '=' + player_info['name'] \\ + '", "+ str(rank+1) + '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table +=", "str(stage['bracket']['roundsCount']) + '|diff=false\\n' for i in range(stage['bracket']['teamsCount']): swiss_table += '|pbg'", "round_team_number, in a 8 team DE the third winners bracket", "str(pos + 1) + '=down' for standing in stage['standings']: if", "+= '|games1=' if match['top']['winner']: match_line += 'W' else: match_line +=", "= world_cup_wiki participant_tabs = [ # {'tab_name': 'Top 16', #", "'double': bracket = '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else: print('Unknown", "columns start|cols=5|height=250}}\\n' for team_num, team in enumerate(teams): if dynamic: if", "+ str(i) + ' Matches|matchsection=Round ' \\ + str(i) +", "https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType'] == 'winner':", "sidebar += '|map1=' + '\\n' sidebar += '|map2=' + '\\n'", "sort_place=True): header = '{{TeamCardToggleButton}}\\n' teams_ordered = '' # Use prior", "set up round-match count trackers round_max_win_match_count = [1] * (len(stage['bracket']['series'])", "sidebar += '|series=' + '\\n' sidebar += '|organizer=' + data['organization']['name']", "teams, bw_teams, wiki_name, include_matches=True): tables = '' for idx, group", "= 'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id wiki_name = world_cup_wiki participant_tabs =", "max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if not 'teamID' in match['top']: continue if", "|lastscore4= |lastvsscore4=\\n' prize_pool += '}}\\n' prize_pool += '{{Prize pool end}}\\n'", "numGames = match['numGames'] else: rounds += 1 if rounds: event_format", "rounds += 1 if rounds: event_format += '** ' +", "f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n') prize_pool", "swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif stage['bracket']['type'] == 'elimination':", "str(numGames) + '\\n' rounds = 1 numGames = match['numGames'] else:", "== 'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R' +", "|place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool", "for match in stage['bracket']['series']: if match['numGames'] != numGames: if rounds:", "+ str(i) + '|hide=false}}\\n' for match in rounds[str(i)]: swiss_match_table +=", "|lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool", "data['organization']['slug'] + '/' + data['slug'] + '/' \\ + data['_id']", "place = data['teams'][team_id]['place'] else: place = 0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'],", "R2W5 and R2D2 vs R2W6 # Might want to key", "'|map3=' + '\\n' sidebar += '|map4=' + '\\n' sidebar +=", "= battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE REDUCE TEAMS event_data.reduce_teams() event_path =", "the same place at the end teams = rank_teams(data, bw_teams,", "'|localcurrency=' + '\\n' sidebar += '|prizepool=' + data['prizes'] + '\\n'", "+ '' else: swiss_table += '|bg' + str(rank + 1)", "1) + '=down' for standing in stage['standings']: if standing_id ==", "1) + '=' + team_info['teamteamplate'] swiss_table += '|temp_tie' + str(rank+1)", "+ stage['name'] + '===\\n') round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name,", "'\\n' header += '{{TeamCard columns start|cols=5|height=250}}\\n' for team_num, team in", "+ player_info['flag'] if player_info['link']: teams_table += ' |' + player_tag", "team number trackers team_previous_round = dict() # set up round-match", "# Might want to key off match['inConsolationBracket'] # May also", "prize_pool += '{{prize pool start}}\\n' prize_pool += '{{prize pool slot", "rounds where D vs L happen such as R2D1 vs", "+ '=down|team' + str(pos + 1) + \"=\" \\ +", "32}, # {'tab_name': 'Other Notable Participants', # 'count': -1}, ]", "bracket += bracket_indicator + 'win=1 ' team_previous_round[match['top']['teamID']] = True else:", "if 'teamID' in match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']:", "'' if not match['isComplete']: return match_line match_line = '{{MatchMaps\\n' match_line", "sidebar += '|discord=' + '\\n' sidebar += '|map1=' + '\\n'", "+ '\\n' teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' else: if team_num", "f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams", "winners bracket round is # called the 4th round and", "'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']] = False bracket += '\\n' bracket", "match['top'] and match['top']['winner']: match_line += '|winner=1\\n' elif 'winner' in match['bottom']", "create_event_format(data): event_format = '' for stage in data['stages']: event_format +=", "'|previous=' + '\\n' sidebar += '|next=' + '\\n' sidebar +=", "swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i) + ' Matches|matchsection=Round '", "team_previous_round = dict() # set up round-match count trackers round_max_win_match_count", "columns end}}\\n' if dynamic: footer += '}}\\n' return header +", "KeyError: rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line) for i in range(1, len(rounds)", "match_line += '|games1=' + str(match['top']['score']) match_line += '|games2=' + str(match['bottom']['score'])", "from datetime import datetime from operator import itemgetter from pathlib", "(len(stage['bracket']['series']) + 1) round_max_win_match_count[0] = 0 round_max_loss_match_count = [1] *", "'|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' except KeyError: sidebar", "= -2 * round_max_win_match_count[match['roundNumber'] - 1] else: round_match_offset = -2", "match_line += '|vod=\\n' match_line += '}}\\n' match_line += '}}\\n' return", "'|R' + str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber'] * 2", "place teams = list() for team_id in data['teams']: if 'place'", "same place at the end teams = rank_teams(data, bw_teams, sort_place)", "team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' +", "+ str(pos + 1) + '=down' for standing in stage['standings']:", "team_previous_round[match['bottom']['teamID']] = True elif 'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']] = False", "+ wiki_name + '}}\\n' return sidebar def create_event_format(data): event_format =", "|place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool", "'=down' if (i + 1) % 8 == 0: swiss_table", "# round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) # elif match['matchType'] ==", "team DE the third winners bracket round is # called", "'|qualifier=\\n' teams_table += '}}\\n' teams_ordered += teams_table footer = '{{TeamCard", "'BYE') match_line += '|team1=' + team_top['teamteamplate'] match_line += '|team2=' +", "(1 - 1 / data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] = standing['place'] else:", "+= bracket_indicator + 'literal=BYE ' if 'score' in match['bottom']: bracket", "'}}\\n' return sidebar def create_event_format(data): event_format = '' for stage", "if 'place' in standing: if 'place' not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place']", "1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered += '|content'", "'{{box|end}}\\n' return swiss_match_table def create_elim_bracket(stage, teams, bw_teams): if stage['bracket']['style'] ==", "team_info = bw_teams.get_team_info(team[3], team[1]) teams_table += '|team=' + team_info['name'] +", "DE the third winners bracket round is # called the", "sidebar += '|icon=' + '\\n' sidebar += '|series=' + '\\n'", "= True elif 'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']] = False bracket", "'\\n' sidebar += '}}\\n' sidebar += '{{Upcoming matches tournament|' +", "'\\n' sidebar += '|name=' + data['name'] + '\\n' sidebar +=", "'\\n' sidebar += '|next=' + '\\n' sidebar += '}}\\n' sidebar", "= bw_players.get_player_info(player['userID'], player['inGameName']) teams_table += '|' + player_tag + '='", "sidebar += '|previous=' + '\\n' sidebar += '|next=' + '\\n'", "'teamID' in match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator", "if team_num == dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard columns end}}\\n' dynamic_idx", "+= '====={{HiddenSort|Round ' + str(i) + '}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round", "bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' + team_name +", "'win=1 ' team_previous_round[match['top']['teamID']] = True else: team_previous_round[match['top']['teamID']] = False bracket", "by match['next'] # Not exactly sure how to address round_team_number,", "match_table = '{{MatchListStart|title=Group ' + group['name'] + ' Matches|width=450px|hide=true}}\\n' for", "stage['bracket']['type'] + '\\n' elif stage['bracket']['type'] == \"elimination\": numGames = 0", "Results====\\n') swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif stage['bracket']['type'] ==", "* 2 + round_match_offset) if 'teamID' in match['bottom']: team_name =", "match_table += match_line tables += match_table tables += '{{MatchListEnd}}\\n' tables", "group_table += '|bg' + str(pos + 1) + '=down|team' +", "matches = sorted(stage['matches'], key=itemgetter('matchNumber')) matches = stage['matches'] for match in", "dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered += '|content' + str(dynamic_idx+1) + '='", "the 4th round and in a 16 team DE the", "+= '{{MatchListEnd}}\\n' tables += '{{box|end}}\\n' return tables def create_prize_pool(prize): prize_pool", "for match in group['matches']: match_line = create_match_maps(match, teams, bw_teams) match_table", "with open(filename, 'w+', newline='\\n', encoding='utf-8') as f: display = '{{DISPLAYTITLE:'", "the previous round # In DE brackest W means the", "+= '|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n' match_line += '|twitch= |youtube=\\n' match_line", "(len(stage['bracket']['series']) + 1) round_max_loss_match_count[0] = 0 # matches = sorted(stage['matches'],", "In DE brackest W means the team won the previous", "+ '\\n' sidebar += '|patch=' + '\\n' sidebar += '|sdate='", "' ' if 'winner' in match['bottom'] and match['bottom']['winner']: bracket +=", "== 'elimination': f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif", "REDUCE TEAMS event_data.reduce_teams() event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename =", "end}}\\n' dynamic_idx += 1 teams_ordered += '|name' + str(dynamic_idx +", "and R2D2 vs R2W6 # Might want to key off", "W that way instead # Default first round to D", "'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']]", "'score' in match['bottom']: bracket += bracket_indicator + 'score=' + str(match['bottom']['score'])", "team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' +", "|tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=2", "stage['matches'] for match in matches: # TODO: this will need", "- 1] \\ + (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] - 1]) *", "battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE REDUCE TEAMS event_data.reduce_teams() event_path = event_data.get_tournament_data_path()", "import datetime from operator import itemgetter from pathlib import Path", "# teams_table += '|qualifier=\\n' teams_table += '}}\\n' teams_ordered += teams_table", "in matches: # TODO: this will need to get updated", "match['matchType'] == 'loser': # round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if", "* 2 # Increment for next time if match['matchType'] ==", "# has_drop = True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table +=", "return # todo handle double elimination brackets # set up", "bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif stage['bracket']['type'] == 'roundrobin':", "+ 'literal=BYE ' if 'score' in match['bottom']: bracket += bracket_indicator", "if match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType']", "+ str(numGames) + '\\n' rounds = 1 numGames = match['numGames']", "+= '* ' + stage['name'] + '\\n' if stage['bracket']['type'] ==", "for idx, player in enumerate(data['teams'][team[0]]['players']): player_tag = 'p' + str(idx", "# TODO: this will need to get updated for non", "tables += '{{MatchListEnd}}\\n' tables += '{{box|end}}\\n' return tables def create_prize_pool(prize):", "'{{prize pool slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool", "'\\n' not in swiss_table[-1]: swiss_table += '\\n' for rank, record", "data['organization']['name'] + '\\n' sidebar += '|organizer-link=' + '\\n' sidebar +=", "in match['bottom']: team_previous_round[match['bottom']['teamID']] = False bracket += '\\n' bracket +=", "'}}\\n' teams_ordered += teams_table footer = '{{TeamCard columns end}}\\n' if", "number trackers team_previous_round = dict() # set up round-match count", "+ '/' + data['slug'] + '/' \\ + data['_id'] +", "place at the end teams = rank_teams(data, bw_teams, sort_place) dynamic_idx", "|lastscore3= |lastvsscore3=\\n' prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool +=", "(round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] - 1]) * 2 # Increment for", "= prize + '\\n' prize_pool += '{{prize pool start}}\\n' prize_pool", "round-match count trackers round_max_win_match_count = [1] * (len(stage['bracket']['series']) + 1)", "sidebar += '|liquipediatier=' + '\\n' sidebar += '|name=' + data['name']", "prize_pool += '{{prize pool slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1=", "Path import calcup_roster_tracking def create_sidebar(data, wiki_name): sidebar = '{{Infobox league'", "+= '}}\\n' prize_pool += '{{Prize pool end}}\\n' return prize_pool def", "bracket += '\\n' if 'teamID' in match['bottom']: if match['bottom']['teamID'] in", "prize_pool += '{{prize pool slot |place=3-4 |usdprize=0\\n' prize_pool += '|tbd", "match in stage['bracket']['series']: if match['numGames'] != numGames: if rounds: event_format", "= max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type", "'|pbg' + str(i + 1) + '=down' if (i +", "= 'drop' swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\\n' for", "+ '\\n' sidebar += '|prizepool=' + data['prizes'] + '\\n' sidebar", "+= '|walkover=1' match_line += '|games1=' if match['top']['winner']: match_line += 'W'", "else: rounds += 1 if rounds: event_format += '** '", "# In DE brackest W means the team won the", "+ '\\n' sidebar += '|rulebook=' + '\\n' sidebar += '|twitter='", "team_previous_round[match['top']['teamID']]: bracket_type = 'W' else: bracket_type = 'D' else: bracket_type", "match_line += '|team1=' + team_top['teamteamplate'] match_line += '|team2=' + team_bot['teamteamplate']", "bracket += bracket_indicator + 'score=' + str(match['top']['score']) + ' '", "+= \"}}\\n\" if include_matches: match_table = '{{MatchListStart|title=Group ' + group['name']", "+= '|map5=' + '\\n' sidebar += '|team_number=' + str(len(data['teams'])) +", "+ str(i) + '}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i)", "'' for stage in data['stages']: event_format += '* ' +", "team_num, team in enumerate(teams): if dynamic: if team_num == dynamic[dynamic_idx]['count']:", "+= '|winner=0\\n' elif 'winner' in match['top'] and match['top']['winner']: match_line +=", "in team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type = 'W' else: bracket_type =", "columns start|cols=5|height=250}}\\n' else: if team_num == 0: teams_ordered += '{{TeamCard", "event_data.tournament_data['name'] + '.wiki') with open(filename, 'w+', newline='\\n', encoding='utf-8') as f:", "+ round_match_offset) if 'teamID' in match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate']", "round_max_win_match_count[match['roundNumber'] - 1] else: round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] -", "matches: # TODO: this will need to get updated for", "= '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\\n' for i in range(stage['bracket']['teamsCount']):", "'D' bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \\ +", "'|bg' + str(rank + 1) + '=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'],", "'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868'", "need to get updated for non SE16 templates # In", "third winners bracket round is # called the 4th round", "prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '}}\\n' prize_pool", "return tables def create_prize_pool(prize): prize_pool = prize + '\\n' prize_pool", "event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables) else: print('Unsupported bracket type of:", "+= '|pbg' + str(pos + 1) + '=down' for standing", "f.write(bracket) elif stage['bracket']['type'] == 'roundrobin': f.write('===' + stage['name'] + '===\\n')", "+ data['platform'] + '\\n' sidebar += '|country=' + '\\n' sidebar", "= '' rounds = dict() for match in matches: match_line", "'FF' match_line += '|games2=' if 'winner' in match['bottom'] and match['bottom']['winner']:", "set up team number trackers team_previous_round = dict() # set", "str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif stage['bracket']['style'] == 'double': bracket = '{{'", "+= '{{TeamCard columns end}}\\n' dynamic_idx += 1 teams_ordered += '|name'", "in standing: if 'place' not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings'])", "+ 1) + '=down|team' + str(pos + 1) + \"=\"", "create_match_maps(match, teams, bw_teams) if not match_line: continue try: rounds[str(match['roundNumber'])].append(match_line) except", "'Top 32', # 'count': 32}, # {'tab_name': 'Other Notable Participants',", "= '' for pos, standing_id in enumerate(group['standingIDs']): group_header += '|pbg'", "1]) * 2 # Increment for next time if match['matchType']", "+ '\\n' if stage['bracket']['type'] == \"swiss\": event_format += '** '", "range(1, len(rounds) + 1): if i == 1: swiss_match_table +=", "+= 'W' else: match_line += 'FF' match_line += '|games2=' if", "'\\n' sidebar += '|platform=' + data['platform'] + '\\n' sidebar +=", "prior rounds as a tiebreaker for when multiple teams have", "create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n') for stage in", "event_format = create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n') prize_pool =", "'W' else: bracket_type = 'D' else: bracket_type = 'D' if", "DE brackets D means the team dropped down from the", "group['name'] + '}}====\\n' tables += '{{GroupTableLeague|title=Group ' + group['name'] +", "continue if match['top']['teamID'] in team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type = 'W'", "f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif stage['bracket']['type'] ==", "'/' \\ + data['_id'] + '/bracket-list' + '\\n' sidebar +=", "'\\n' if '\\n' not in swiss_table[-1]: swiss_table += '\\n' for", "team_num == 0: teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' teams_table =", "open(filename, 'w+', newline='\\n', encoding='utf-8') as f: display = '{{DISPLAYTITLE:' +", "in enumerate(stage['standings']): if 'place' in standing: if 'place' not in", "brackest W means the team won the previous round #", "if rounds: event_format += '** ' + str(rounds) + '-round", "bracket_indicator + 'win=2 ' team_previous_round[match['bottom']['teamID']] = True elif 'teamID' in", "return event_format def rank_teams(data, bw_teams, sort_place=True, break_ties=False): for stage in", "f.write('==Results==\\n') for stage in event_data.tournament_data['stages']: if stage['bracket']['type'] == 'swiss': f.write('===Swiss", "sidebar += '|next=' + '\\n' sidebar += '}}\\n' sidebar +=", "return sidebar def create_event_format(data): event_format = '' for stage in", "+ str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type'] + '\\n' elif", "'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb'", "= world_cup_id wiki_name = world_cup_wiki participant_tabs = [ # {'tab_name':", "teams_table += '|team=' + team_info['name'] + '\\n' teams_table += '|image='", "+= '|tickername=' + data['name'] + '\\n' sidebar += '|image=' +", "== 'loser': # round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if not", "'}}\\n' return match_line def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True): tables", "+ '=' + dropped_style + '' else: swiss_table += '|bg'", "+ 1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered +=", "+= '|team_number=' + str(len(data['teams'])) + '\\n' sidebar += '|previous=' +", "+= '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool += '|tbd |lastvs2= |lastscore2=", "= bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table += '|team' + str(rank + 1)", "+ '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table += '}}\\n' return", "player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'], player['inGameName'])", "Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data, bw_players,", "{'tab_name': 'Other Notable Participants', # 'count': -1}, ] bw_teams =", "16 team DE the 4th winners bracket round is called", "+ 1) % 8 == 0: swiss_table += '\\n' if", "calcup_roster_tracking def create_sidebar(data, wiki_name): sidebar = '{{Infobox league' + '\\n'", "# if match['matchType'] == 'winner': # round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], #", "'|content' + str(dynamic_idx+1) + '=' + '\\n' header += '{{TeamCard", "if team_previous_round[match['bottom']['teamID']]: bracket_type = 'W' else: bracket_type = 'D' else:", "+= '|country=' + '\\n' sidebar += '|format=' + '\\n' sidebar", "participant_tabs = [ # {'tab_name': 'Top 16', # 'count': 16},", "event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE REDUCE TEAMS event_data.reduce_teams() event_path", "'|team1=' + team_top['teamteamplate'] match_line += '|team2=' + team_bot['teamteamplate'] if 'isTie'", "round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']] =", "32', # 'count': 32}, # {'tab_name': 'Other Notable Participants', #", "sidebar += '|country=' + '\\n' sidebar += '|format=' + '\\n'", "False bracket += '\\n' if 'teamID' in match['bottom']: if match['bottom']['teamID']", "data['slug'] + '/' \\ + data['_id'] + '/bracket-list' + '\\n'", "'|games2=' + str(match['bottom']['score']) + '\\n' match_line += '|details={{BracketMatchSummary\\n' match_line +=", "for when multiple teams have the same place at the", "# set up team number trackers team_previous_round = dict() #", "list() for team_id in data['teams']: if 'place' in data['teams'][team_id]: place", "swiss_match_table += match swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n' return", "+ 'score=' + str(match['bottom']['score']) + ' ' if 'winner' in", "in swiss_table[-1]: swiss_table += '\\n' for rank, record in enumerate(stage['standings']):", "'|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n'", "|lastvsscore2=\\n' prize_pool += '}}\\n' prize_pool += '{{prize pool slot |place=5-8", "'' rounds = dict() for match in matches: match_line =", "4, 0)) else: teams = sorted(teams, key=itemgetter(4, 0)) return teams", "+ place teams = list() for team_id in data['teams']: if", "columns end}}\\n' dynamic_idx += 1 teams_ordered += '|name' + str(dynamic_idx", "bw_players, bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n') for stage in event_data.tournament_data['stages']:", "in match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot =", "'6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup'", "str(numGames) + '\\n' return event_format def rank_teams(data, bw_teams, sort_place=True, break_ties=False):", "+= '|team=' + team_info['name'] + '\\n' teams_table += '|image=' +", "sidebar += '|localcurrency=' + '\\n' sidebar += '|prizepool=' + data['prizes']", "'|type=Online' + '\\n' sidebar += '|platform=' + data['platform'] + '\\n'", "in match['top'] and match['top']['winner']: match_line += '|winner=1\\n' elif 'winner' in", ")) if sort_place: teams = sorted(teams, key=itemgetter(2, 4, 0)) else:", "+ team_info['teamteamplate'] + '\\n' group_header += '|tiebreaker1=series\\n' tables += group_header", "else: if team_num == 0: teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n'", "def main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id =", "round_max_win_match_count[match['roundNumber']]) elif match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator", "16', # 'count': 16}, # {'tab_name': 'Top 32', # 'count':", "has_drop = True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table += '|bg'", "bw_teams.get_team_info(team[3], team[1]) teams_table += '|team=' + team_info['name'] + '\\n' teams_table", "stage['bracket']['seriesStyle'] + str(numGames) + '\\n' return event_format def rank_teams(data, bw_teams,", "way instead # Default first round to D and then", "= 'p' + str(idx + 1) if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid:", "bracket = '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else: print('Unknown stage", "+= '|This=1\\n' header += '|content' + str(dynamic_idx+1) + '=' +", "+ data['slug'] + '/' \\ + data['_id'] + '/bracket-list' +", "tables def create_prize_pool(prize): prize_pool = prize + '\\n' prize_pool +=", "+ data['name'] + '\\n' sidebar += '|shortname=' + data['name'] +", "# May also just need to keep track of match['next']", "True else: team_previous_round[match['top']['teamID']] = False bracket += '\\n' if 'teamID'", "+ stage['bracket']['style']) return # todo handle double elimination brackets #", "|tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=3-4", "match['matchType'] == 'winner': # round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) #", "def create_sidebar(data, wiki_name): sidebar = '{{Infobox league' + '\\n' sidebar", "str(len(data['teams'])) + '\\n' sidebar += '|previous=' + '\\n' sidebar +=", "happen such as R2D1 vs R2W5 and R2D2 vs R2W6", "+ player_tag + 'flag=' + player_info['flag'] if player_info['link']: teams_table +=", "rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line) for i in range(1, len(rounds) +", "tables += '|tournament=' + wiki_name + '\\n' group_header = ''", "= create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data) f.write(event_format)", "' + stage['name'] + '\\n' if stage['bracket']['type'] == \"swiss\": event_format", "swiss_match_table += '====={{HiddenSort|Round ' + str(i) + '}}=====\\n' swiss_match_table +=", "'|winner=1\\n' elif 'winner' in match['bottom'] and match['bottom']['winner']: match_line += '|winner=2\\n'", "'|patch=' + '\\n' sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d')", "f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n')", "bw_teams.get_team_info('0', 'BYE') match_line += '|team1=' + team_top['teamteamplate'] match_line += '|team2='", "+= '|rulebook=' + '\\n' sidebar += '|twitter=' + '\\n' sidebar", "match['bottom'] and match['bottom']['winner']: bracket += bracket_indicator + 'win=2 ' team_previous_round[match['bottom']['teamID']]", "'drop' swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\\n' for i", "ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki", "for team_id in data['teams']: if 'place' in data['teams'][team_id]: place =", "rounds[str(match['roundNumber'])].append(match_line) for i in range(1, len(rounds) + 1): if i", "+ 'score=' + str(match['top']['score']) + ' ' if 'winner' in", "round_max_win_match_count[match['roundNumber']]) # elif match['matchType'] == 'loser': # round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'],", "str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber'] * 2 - 1", "+ str(match['bottom']['score']) + ' ' if 'winner' in match['bottom'] and", "|lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=3-4 |usdprize=0\\n' prize_pool", "'|games1=' + str(match['top']['score']) match_line += '|games2=' + str(match['bottom']['score']) + '\\n'", "team_previous_round[match['bottom']['teamID']] = False bracket += '\\n' bracket += '}}\\n' return", "and build up the D and W that way instead", "team_previous_round[match['top']['teamID']] = False bracket += '\\n' if 'teamID' in match['bottom']:", "'\\n' sidebar += '|previous=' + '\\n' sidebar += '|next=' +", "+= '|date=\\n' if 'teamID' in match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])", "data['name'] + '\\n' sidebar += '|shortname=' + data['name'] + '\\n'", "teams_ordered += '|name' + str(dynamic_idx + 1) + '=' +", "if standing['disqualified']: # has_drop = True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name'])", "count trackers round_max_win_match_count = [1] * (len(stage['bracket']['series']) + 1) round_max_win_match_count[0]", "+ data['name'] + '\\n' sidebar += '|image=' + '\\n' sidebar", "group_table tables += \"}}\\n\" if include_matches: match_table = '{{MatchListStart|title=Group '", "'|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n' match_line += '|twitch= |youtube=\\n' match_line +=", "start|cols=5|height=250}}\\n' else: if team_num == 0: teams_ordered += '{{TeamCard columns", "'\\n' bracket += '}}\\n' return bracket def create_match_maps(match, teams, bw_teams):", "= bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' + team_name", "print('Unknown stage style: ' + stage['bracket']['style']) return # todo handle", "+ datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' try: sidebar += '|edate='", "bw_teams, sort_place=True, break_ties=False): for stage in data['stages']: for place, standing", "round # In DE brackest W means the team won", "pool slot |place=3-4 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n'", "ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki", "encoding='utf-8') as f: display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\\n'", "= bw_teams.get_team_info('0', 'BYE') if 'teamID' in match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'],", "def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True): tables = '' for", "= 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id =", "'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf'", "numGames = 0 rounds = 0 for match in stage['bracket']['series']:", "= 'D' else: bracket_type = 'D' else: bracket_type = 'D'", "+ '=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table += '|team' +", "if team_num == 0: teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' teams_table", "data['teams'][team_id]: place = data['teams'][team_id]['place'] else: place = 0 team_info =", "header += '{{TeamCard columns start|cols=5|height=250}}\\n' for team_num, team in enumerate(teams):", "= bw_teams.get_team_info('0', 'BYE') match_line += '|team1=' + team_top['teamteamplate'] match_line +=", "match['top']: continue if match['top']['teamID'] in team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type =", "ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id", "bracket_type = 'D' else: bracket_type = 'D' else: bracket_type =", "and match['bottom']['winner']: match_line += 'W' else: match_line += 'FF' else:", "'|country=' + '\\n' sidebar += '|format=' + '\\n' sidebar +=", "* (len(stage['bracket']['series']) + 1) round_max_loss_match_count[0] = 0 # matches =", "+ '\\n' sidebar += '|team_number=' + str(len(data['teams'])) + '\\n' sidebar", "calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'], player['inGameName']) teams_table +=", "+ stage['bracket']['seriesStyle'] + str(numGames) + '\\n' rounds = 1 numGames", "'place' not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place else:", "player_info = bw_players.get_player_info(player['userID'], player['inGameName']) teams_table += '|' + player_tag +", "+= '|bg' + str(rank + 1) + '=' + dropped_style", "+= '}}\\n' return match_line def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True):", "== 0: swiss_table += '\\n' if '\\n' not in swiss_table[-1]:", "= '60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki =", "'\\n' sidebar += '|map5=' + '\\n' sidebar += '|team_number=' +", "return match_line def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True): tables =", "+ (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] - 1]) * 2 # Increment", "if 'place' not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place", "str(match['top']['score']) + ' ' if 'winner' in match['top'] and match['top']['winner']:", "for idx, group in enumerate(stage['groups']): if idx == 1: tables", "teams = create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n') for", "'|This=1\\n' header += '|content' + str(dynamic_idx+1) + '=' + '\\n'", "= \\ standing['place'] + (1 - 1 / data['teams'][standing['team']['_id']]['place']) else:", "+= '|series=' + '\\n' sidebar += '|organizer=' + data['organization']['name'] +", "'{{prize pool slot |place=5-8 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1=", "for i in range(1, len(rounds) + 1): if i ==", "bracket += '}}\\n' return bracket def create_match_maps(match, teams, bw_teams): match_line", "bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE REDUCE", "swiss_table += '|bg' + str(rank + 1) + '=down' team_info", "'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd'", "= len(stage['standings']) + place else: if break_ties: data['teams'][standing['team']['_id']]['place'] = \\", "'=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table += '}}\\n' return swiss_table", "== standing['_id']: # if standing['disqualified']: # has_drop = True team_info", "'\\n' sidebar += '|twitter=' + '\\n' sidebar += '|twitch=' +", "itemgetter from pathlib import Path import calcup_roster_tracking def create_sidebar(data, wiki_name):", "if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'],", "data['name'] + '\\n' sidebar += '|tickername=' + data['name'] + '\\n'", "+ 'win=1 ' team_previous_round[match['top']['teamID']] = True else: team_previous_round[match['top']['teamID']] = False", "= '' group_table = '' for pos, standing_id in enumerate(group['standingIDs']):", "'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d'", "'\\n' sidebar += '|country=' + '\\n' sidebar += '|format=' +", "range(stage['bracket']['teamsCount']): swiss_table += '|pbg' + str(i + 1) + '=down'", "'|map5=' + '\\n' sidebar += '|team_number=' + str(len(data['teams'])) + '\\n'", "dynamic_idx = 0 if dynamic: header += '{{tabs dynamic\\n' header", "Default first round to D and then future bracket type", "trackers round_max_win_match_count = [1] * (len(stage['bracket']['series']) + 1) round_max_win_match_count[0] =", "for rank, record in enumerate(stage['standings']): if record['disqualified']: swiss_table += '|bg'", "player_tag = 'p' + str(idx + 1) if player['_id'] in", "|lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=2 |usdprize=0", "!= numGames: if rounds: event_format += '** ' + str(rounds)", "ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id", "bracket_type = 'D' else: bracket_type = 'D' if match['matchType'] ==", "sorted(stage['matches'], key=itemgetter('matchNumber')) matches = stage['matches'] for match in matches: #", "'winner': # round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) # elif match['matchType']", "import battlefy_wiki_linkings from datetime import datetime from operator import itemgetter", "ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id", "event_path.mkdir(parents=True, exist_ok=True) filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki') with open(filename,", "group in enumerate(stage['groups']): if idx == 1: tables += '{{box|start|padding=2em}}\\n'", "+ '-round ' + stage['bracket']['type'] + '\\n' elif stage['bracket']['type'] ==", "standing['disqualified']: # has_drop = True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table", "= '{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3], team[1]) teams_table += '|team=' +", "+= '|games2=' if 'winner' in match['bottom'] and match['bottom']['winner']: match_line +=", "# In DE brackets D means the team dropped down", "+ 1) + '=down' for standing in stage['standings']: if standing_id", "elif stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams)", "key=itemgetter('matchNumber')) matches = stage['matches'] for match in matches: # TODO:", "standing['_id']: # if standing['disqualified']: # has_drop = True team_info =", "else: bracket_type = 'D' else: bracket_type = 'D' if match['matchType']", "if include_matches: match_table = '{{MatchListStart|title=Group ' + group['name'] + '", "'|date=\\n' if 'teamID' in match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif", "8 team DE the third winners bracket round is #", "battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE", "include_matches=True) f.write(round_robin_tables) else: print('Unsupported bracket type of: ' + stage['bracket']['type'])", "+= '{{box|end}}\\n' return swiss_match_table def create_elim_bracket(stage, teams, bw_teams): if stage['bracket']['style']", "prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data, bw_players, bw_teams,", "str(rank + 1) + '=' + dropped_style + '' else:", "= '' for stage in data['stages']: event_format += '* '", "battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE REDUCE TEAMS event_data.reduce_teams()", "+ str(rank + 1) + '=' + team_info['teamteamplate'] swiss_table +=", "match_line += '|twitch= |youtube=\\n' match_line += '|vod=\\n' match_line += '}}\\n'", "1) + '=down' if (i + 1) % 8 ==", "dynamic\\n' header += '|name' + str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name']", "there are rounds where D vs L happen such as", "match in matches: match_line = create_match_maps(match, teams, bw_teams) if not", "'=down' for standing in stage['standings']: if standing_id == standing['_id']: #", "'.wiki') with open(filename, 'w+', newline='\\n', encoding='utf-8') as f: display =", "'D' else: bracket_type = 'D' bracket_indicator = '|R' + str(match['roundNumber'])", "+ 1) round_max_loss_match_count[0] = 0 # matches = sorted(stage['matches'], key=itemgetter('matchNumber'))", "True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table += '|bg' + str(pos", "include_matches=True): tables = '' for idx, group in enumerate(stage['groups']): if", "'=down|team' + str(pos + 1) + \"=\" \\ + team_info['teamteamplate']", "'\\n' group_header = '' group_table = '' for pos, standing_id", "+= '}}\\n' teams_ordered += teams_table footer = '{{TeamCard columns end}}\\n'", "match['matchType'] == 'winner': round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] - 1]", "+ '\\n' sidebar += '|instagram=' + '\\n' sidebar += '|discord='", "+= 'FF' else: match_line += '|games1=' + str(match['top']['score']) match_line +=", "bracket += bracket_indicator + 'team=' + team_name + ' '", "match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team='", "In DE brackets D means the team dropped down from", "team_info['teamteamplate'] swiss_table += '|temp_tie' + str(rank+1) + '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage'])", "type of: ' + stage['bracket']['type']) if __name__ == '__main__': main()", "swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n' return swiss_match_table def create_elim_bracket(stage,", "create_prize_pool(prize): prize_pool = prize + '\\n' prize_pool += '{{prize pool", "instead # Default first round to D and then future", "'-round ' \\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n' rounds", "+ '\\n' teams_table += '|image=' + team_info['image'] + '\\n' for", "match['inConsolationBracket'] # May also just need to keep track of", "ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki", "'|sponsor=' + '\\n' sidebar += '|localcurrency=' + '\\n' sidebar +=", "group_header += '|tiebreaker1=series\\n' tables += group_header tables += group_table tables", "data['prizes'] + '\\n' sidebar += '|type=Online' + '\\n' sidebar +=", "create_match_maps(match, teams, bw_teams) match_table += match_line tables += match_table tables", "in stage['standings']: if standing_id == standing['_id']: # if standing['disqualified']: #", "'D' else: bracket_type = 'D' if match['matchType'] == 'winner': round_match_offset", "teams = rank_teams(data, bw_teams, sort_place) dynamic_idx = 0 if dynamic:", "+ team_info['teamteamplate'] swiss_table += '|temp_tie' + str(rank+1) + '=' +", "# Not exactly sure how to address round_team_number, in a", "2 # Increment for next time if match['matchType'] == 'winner':", "round # So there are rounds where D vs L", "+= '}}\\n' match_line += '}}\\n' return match_line def create_round_robin_tables(stage, teams,", "event_format += '** ' + str(rounds) + '-round ' \\", "sidebar = create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data)", "8 == 0: swiss_table += '\\n' if '\\n' not in", "start|cols=5|height=250}}\\n' for team_num, team in enumerate(teams): if dynamic: if team_num", "'|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' + data['slug'] + '/' \\", "match in matches: # TODO: this will need to get", "teams_table += '|qualifier=\\n' teams_table += '}}\\n' teams_ordered += teams_table footer", "end}}\\n' if dynamic: footer += '}}\\n' return header + teams_ordered", "elif 'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']] = False bracket += '\\n'", "dropped_style = 'drop' swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\\n'", "'|organizer-link=' + '\\n' sidebar += '|sponsor=' + '\\n' sidebar +=", "for match in matches: # TODO: this will need to", "in match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']: team_top =", "in stage['bracket']['series']: if match['numGames'] != numGames: if rounds: event_format +=", "in team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type = 'W' else: bracket_type =", "'6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor'", "in enumerate(stage['groups']): if idx == 1: tables += '{{box|start|padding=2em}}\\n' else:", "header + teams_ordered + footer def create_swiss_table(stage, bw_teams): dropped_style =", "def create_swiss_matches(matches, teams, bw_teams): swiss_match_table = '' rounds = dict()", "'|twitter=' + '\\n' sidebar += '|twitch=' + '\\n' sidebar +=", "+= '|map3=' + '\\n' sidebar += '|map4=' + '\\n' sidebar", "|lastscore2= |lastvsscore2=\\n' prize_pool += '}}\\n' prize_pool += '{{prize pool slot", "str(pos + 1) + \"=\" \\ + team_info['teamteamplate'] + '\\n'", "+ '\\n' sidebar += '|map5=' + '\\n' sidebar += '|team_number='", "+ '\\n' sidebar += '|platform=' + data['platform'] + '\\n' sidebar", "+= '{{TeamCard columns start|cols=5|height=250}}\\n' for team_num, team in enumerate(teams): if", "import calcup_roster_tracking def create_sidebar(data, wiki_name): sidebar = '{{Infobox league' +", "ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id", "'}}\\n' prize_pool += '{{Prize pool end}}\\n' return prize_pool def main():", "'\\n' teams_table += '|image=' + team_info['image'] + '\\n' for idx,", "bw_teams.get_team_info('0', 'BYE') if 'teamID' in match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])", "'|platform=' + data['platform'] + '\\n' sidebar += '|country=' + '\\n'", "'\\n' sidebar += '|organizer-link=' + '\\n' sidebar += '|sponsor=' +", "+ str(numGames) + '\\n' return event_format def rank_teams(data, bw_teams, sort_place=True,", "1) + '=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table += '|team'", "[1] * (len(stage['bracket']['series']) + 1) round_max_loss_match_count[0] = 0 # matches", "team_id in data['teams']: if 'place' in data['teams'][team_id]: place = data['teams'][team_id]['place']", "# So there are rounds where D vs L happen", "tables += '===={{HiddenSort|Group ' + group['name'] + '}}====\\n' tables +=", "for match in matches: match_line = create_match_maps(match, teams, bw_teams) if", "'\\n' sidebar += '|series=' + '\\n' sidebar += '|organizer=' +", "bracket_type = 'W' else: bracket_type = 'D' else: bracket_type =", "bw_teams, wiki_name, include_matches=True): tables = '' for idx, group in", "'{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\\n' for i in range(stage['bracket']['teamsCount']): swiss_table", "bracket_type = 'D' bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type", "if match['isBye']: match_line += '|walkover=1' match_line += '|games1=' if match['top']['winner']:", "if stage['bracket']['style'] == 'single': bracket = '{{' + str(stage['bracket']['teamsCount']) +", "if sort_place: teams = sorted(teams, key=itemgetter(2, 4, 0)) else: teams", "'|liquipediatier=' + '\\n' sidebar += '|name=' + data['name'] + '\\n'", "' if 'score' in match['top']: bracket += bracket_indicator + 'score='", "brackets D means the team dropped down from the previous", "= 0 for match in stage['bracket']['series']: if match['numGames'] != numGames:", "the team won the previous round # So there are", "= 'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup' twin_suns_tourny_id =", "- 1] else: round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] - 1]", "elif match['isBye']: team_bot = bw_teams.get_team_info('0', 'BYE') match_line += '|team1=' +", "'winner' in match['top'] and match['top']['winner']: bracket += bracket_indicator + 'win=1", "in match['top'] and match['top']['winner']: bracket += bracket_indicator + 'win=1 '", "'{{box|end}}\\n' return tables def create_prize_pool(prize): prize_pool = prize + '\\n'", "'\\n' sidebar += '|map3=' + '\\n' sidebar += '|map4=' +", "Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki') with open(filename, 'w+', newline='\\n', encoding='utf-8') as", "match_line += '}}\\n' match_line += '}}\\n' return match_line def create_round_robin_tables(stage,", "if standing_id == standing['_id']: # if standing['disqualified']: # has_drop =", "match in rounds[str(i)]: swiss_match_table += match swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table", "# round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if not 'teamID' in", "+= '}}\\n' return swiss_table def create_swiss_matches(matches, teams, bw_teams): swiss_match_table =", "match_line += '|winner=2\\n' else: match_line += '|winner=0\\n' if match['isBye']: match_line", "+ str(dynamic_idx+1) + '=' + '\\n' header += '{{TeamCard columns", "== 'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table = create_swiss_table(stage, bw_teams)", "ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id", "'|instagram=' + '\\n' sidebar += '|discord=' + '\\n' sidebar +=", "+ 1) + '=' + dropped_style + '' else: swiss_table", "'\\n' sidebar += '|patch=' + '\\n' sidebar += '|sdate=' +", "'|map2=' + '\\n' sidebar += '|map3=' + '\\n' sidebar +=", "+= bracket_indicator + 'literal=BYE ' if 'score' in match['top']: bracket", "def create_prize_pool(prize): prize_pool = prize + '\\n' prize_pool += '{{prize", "0 for match in stage['bracket']['series']: if match['numGames'] != numGames: if", "+ str(pos + 1) + '=down|team' + str(pos + 1)", "str(match['top']['score']) match_line += '|games2=' + str(match['bottom']['score']) + '\\n' match_line +=", "<reponame>moff-wildfire/sws-battlefy import battlefy_data import battlefy_wiki_linkings from datetime import datetime from", "len(rounds) + 1): if i == 1: swiss_match_table += '{{box|start|padding=2em}}\\n'", "KeyError: sidebar += '|edate=\\n' sidebar += '|web=' + '\\n' sidebar", "dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard columns end}}\\n' dynamic_idx += 1 teams_ordered", "'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e'", "+ ' ' else: bracket += bracket_indicator + 'literal=BYE '", "in match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator +", "+ 'SETeamBracket\\n' elif stage['bracket']['style'] == 'double': bracket = '{{' +", "teams have the same place at the end teams =", "= '' if not match['isComplete']: return match_line match_line = '{{MatchMaps\\n'", "+= '}}\\n' return bracket def create_match_maps(match, teams, bw_teams): match_line =", "templates # In DE brackets D means the team dropped", "'\\n' sidebar += '|liquipediatier=' + '\\n' sidebar += '|name=' +", "match_line += '|team2=' + team_bot['teamteamplate'] if 'isTie' in match and", "bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data()", "else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place teams = list() for", "+ str(rank + 1) + '=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name'])", "sort_place: teams = sorted(teams, key=itemgetter(2, 4, 0)) else: teams =", "\\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n' return event_format def", "= bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']: team_top = bw_teams.get_team_info('0', 'BYE') if", "== 1: swiss_match_table += '{{box|start|padding=2em}}\\n' else: swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table", "for pos, standing_id in enumerate(group['standingIDs']): group_header += '|pbg' + str(pos", "stage['name'] + '===\\n') round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True)", "and in a 16 team DE the 4th winners bracket", "+= '|liquipediatier=' + '\\n' sidebar += '|name=' + data['name'] +", "round_max_win_match_count = [1] * (len(stage['bracket']['series']) + 1) round_max_win_match_count[0] = 0", "\\ + str(match['matchNumber'] * 2 - 1 + round_match_offset) if", "match['bottom']: bracket += bracket_indicator + 'score=' + str(match['bottom']['score']) + '", "event_data.tournament_data['stages']: if stage['bracket']['type'] == 'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table", "prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool += '|tbd |lastvs2=", "+ '=' + '\\n' header += '{{TeamCard columns start|cols=5|height=250}}\\n' for", "dynamic: header += '{{tabs dynamic\\n' header += '|name' + str(dynamic_idx+1)", "D vs L happen such as R2D1 vs R2W5 and", "tournament_id = world_cup_id wiki_name = world_cup_wiki participant_tabs = [ #", "bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table += '|bg' + str(pos + 1) +", "[ # {'tab_name': 'Top 16', # 'count': 16}, # {'tab_name':", "create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n') bracket", "slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize", "bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n') for stage in event_data.tournament_data['stages']: if", "+ player_info['name'] \\ + ' |' + player_tag + 'flag='", "'|temp_tie' + str(rank+1) + '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table", "f.write(swiss_table) f.write('====Swiss Match Results====\\n') swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches)", "round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] - 1] else: round_match_offset =", "sort_place=True, break_ties=False): for stage in data['stages']: for place, standing in", "end teams = rank_teams(data, bw_teams, sort_place) dynamic_idx = 0 if", "'elimination': f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif stage['bracket']['type']", "else: place = 0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'],", "True elif 'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']] = False bracket +=", "'{{Upcoming matches tournament|' + wiki_name + '}}\\n' return sidebar def", "|lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool += '}}\\n' prize_pool += '{{Prize pool", "tournament|' + wiki_name + '}}\\n' return sidebar def create_event_format(data): event_format", "bw_teams) match_table += match_line tables += match_table tables += '{{MatchListEnd}}\\n'", "'=' + team_info['teamteamplate'] swiss_table += '|temp_tie' + str(rank+1) + '='", "get updated for non SE16 templates # In DE brackets", "match['bottom']['teamID'] in team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type = 'W' else: bracket_type", "def create_swiss_table(stage, bw_teams): dropped_style = 'drop' swiss_table = '{{SwissTableLeague|rounds=' +", "the previous round # So there are rounds where D", "+= '|team2=' + team_bot['teamteamplate'] if 'isTie' in match and match['isTie']:", "else: match_line += '|winner=0\\n' if match['isBye']: match_line += '|walkover=1' match_line", "+ team_info['name'] + '\\n' teams_table += '|image=' + team_info['image'] +", "'\\n' sidebar += '|rulebook=' + '\\n' sidebar += '|twitter=' +", "for standing in stage['standings']: if standing_id == standing['_id']: # if", "print('Unsupported bracket type of: ' + stage['bracket']['type']) if __name__ ==", "player_tag + 'flag=' + player_info['flag'] if player_info['link']: teams_table += '", "prize_pool = prize + '\\n' prize_pool += '{{prize pool start}}\\n'", "teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if sort_place: teams =", "previous round # In DE brackest W means the team", "is called the 6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc #", "match_line += '}}\\n' return match_line def create_round_robin_tables(stage, teams, bw_teams, wiki_name,", "rounds = 0 for match in stage['bracket']['series']: if match['numGames'] !=", "1: tables += '{{box|start|padding=2em}}\\n' else: tables += '{{box|break|padding=2em}}\\n' tables +=", "+ '-round ' \\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n'", "' team_previous_round[match['bottom']['teamID']] = True elif 'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']] =", "round_max_loss_match_count[match['roundNumber'] - 1] \\ + (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] - 1])", "1) % 8 == 0: swiss_table += '\\n' if '\\n'", "stage in data['stages']: for place, standing in enumerate(stage['standings']): if 'place'", "teams_ordered = '' # Use prior rounds as a tiebreaker", "from pathlib import Path import calcup_roster_tracking def create_sidebar(data, wiki_name): sidebar", "+ 1) + '=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table +=", "+ '\\n' sidebar += '|next=' + '\\n' sidebar += '}}\\n'", "round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if not 'teamID' in match['top']:", "# elif match['matchType'] == 'loser': # round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], #", "try: sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n'", "'|bg' + str(pos + 1) + '=down|team' + str(pos +", "i in range(stage['bracket']['teamsCount']): swiss_table += '|pbg' + str(i + 1)", "= dict() # set up round-match count trackers round_max_win_match_count =", "if match['matchType'] == 'winner': round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] -", "= 0 if dynamic: header += '{{tabs dynamic\\n' header +=", "'60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1'", "match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType'] ==", "stage['bracket']['type'] == 'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table = create_swiss_table(stage,", "+ str(idx + 1) if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] =", "= 'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id =", "'\\n' header += '|This=1\\n' header += '|content' + str(dynamic_idx+1) +", "header += '|content' + str(dynamic_idx+1) + '=' + '\\n' header", "'{{prize pool slot |place=3-4 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1=", "+= match_table tables += '{{MatchListEnd}}\\n' tables += '{{box|end}}\\n' return tables", "means the team won the previous round # So there", "+ '\\n' group_header = '' group_table = '' for pos,", "match_line tables += match_table tables += '{{MatchListEnd}}\\n' tables += '{{box|end}}\\n'", "else: bracket += bracket_indicator + 'literal=BYE ' if 'score' in", "# {'tab_name': 'Other Notable Participants', # 'count': -1}, ] bw_teams", "for i in range(stage['bracket']['teamsCount']): swiss_table += '|pbg' + str(i +", "record['disqualified']: swiss_table += '|bg' + str(rank + 1) + '='", "+ data['name'] + '\\n' sidebar += '|tickername=' + data['name'] +", "+= '|map4=' + '\\n' sidebar += '|map5=' + '\\n' sidebar", "+= '}}\\n' return header + teams_ordered + footer def create_swiss_table(stage,", "player_info['flag'] if player_info['link']: teams_table += ' |' + player_tag +", "event_data.tournament_data['name'] + '}}\\n' f.write(display) sidebar = create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n')", "sort_place) dynamic_idx = 0 if dynamic: header += '{{tabs dynamic\\n'", "create_sidebar(data, wiki_name): sidebar = '{{Infobox league' + '\\n' sidebar +=", "in a 16 team DE the 4th winners bracket round", "match['bottom']: team_previous_round[match['bottom']['teamID']] = False bracket += '\\n' bracket += '}}\\n'", "'|content' + str(dynamic_idx+1) + '=' + '\\n' teams_ordered += '{{TeamCard", "a tiebreaker for when multiple teams have the same place", "'5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major'", "'|name' + str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' header", "'}}====\\n' tables += '{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables", "not 'teamID' in match['top']: continue if match['top']['teamID'] in team_previous_round: if", "such as R2D1 vs R2W5 and R2D2 vs R2W6 #", "sidebar += '{{Upcoming matches tournament|' + wiki_name + '}}\\n' return", "+ '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered += '|content' +", "== 'double': bracket = '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else:", "+= '|name' + str(dynamic_idx + 1) + '=' + dynamic[dynamic_idx]['tab_name']", "+= bracket_indicator + 'win=2 ' team_previous_round[match['bottom']['teamID']] = True elif 'teamID'", "match_line += '|winner=0\\n' if match['isBye']: match_line += '|walkover=1' match_line +=", "W means the team won the previous round # So", "'loser': # round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if not 'teamID'", "= len(stage['standings']) + place teams = list() for team_id in", "else: match_line += 'FF' match_line += '|games2=' if 'winner' in", "sorted(teams, key=itemgetter(4, 0)) return teams def create_participants(data, bw_players, bw_teams, dynamic=[],", "'|diff=false\\n' for i in range(stage['bracket']['teamsCount']): swiss_table += '|pbg' + str(i", "= create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs,", "' \\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n' return event_format", "'|twitch=' + '\\n' sidebar += '|instagram=' + '\\n' sidebar +=", "# FORCE REDUCE TEAMS event_data.reduce_teams() event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True)", "'' group_table = '' for pos, standing_id in enumerate(group['standingIDs']): group_header", "if 'winner' in match['top'] and match['top']['winner']: bracket += bracket_indicator +", "sort_place=True) f.write(teams) f.write('==Results==\\n') for stage in event_data.tournament_data['stages']: if stage['bracket']['type'] ==", "sidebar += '|map2=' + '\\n' sidebar += '|map3=' + '\\n'", "f.write(swiss_matches) elif stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage, event_data.tournament_data['teams'],", "stage style: ' + stage['bracket']['style']) return # todo handle double", "'literal=BYE ' if 'score' in match['top']: bracket += bracket_indicator +", "player['inGameName']) teams_table += '|' + player_tag + '=' + player_info['name']", "+= '|image=' + '\\n' sidebar += '|icon=' + '\\n' sidebar", "1 numGames = match['numGames'] else: rounds += 1 if rounds:", "'|tournament=' + wiki_name + '\\n' group_header = '' group_table =", "team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot = bw_teams.get_team_info('0', 'BYE')", "bw_teams): dropped_style = 'drop' swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) +", "'teamID' in match['bottom']: if match['bottom']['teamID'] in team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type", "= 'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id =", "place, standing in enumerate(stage['standings']): if 'place' in standing: if 'place'", "data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if sort_place: teams", "teams_ordered += '{{TeamCard columns end}}\\n' dynamic_idx += 1 teams_ordered +=", "0: swiss_table += '\\n' if '\\n' not in swiss_table[-1]: swiss_table", "standing in stage['standings']: if standing_id == standing['_id']: # if standing['disqualified']:", "match_line = '{{MatchMaps\\n' match_line += '|date=\\n' if 'teamID' in match['top']:", "f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n')", "footer def create_swiss_table(stage, bw_teams): dropped_style = 'drop' swiss_table = '{{SwissTableLeague|rounds='", "bw_teams, dynamic=[], sort_place=True): header = '{{TeamCardToggleButton}}\\n' teams_ordered = '' #", "data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if sort_place: teams = sorted(teams, key=itemgetter(2, 4,", "= '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki =", "swiss_match_table def create_elim_bracket(stage, teams, bw_teams): if stage['bracket']['style'] == 'single': bracket", "teams_table += '}}\\n' teams_ordered += teams_table footer = '{{TeamCard columns", "player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'], player['inGameName']) teams_table += '|'", "'count': -1}, ] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data", "create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables) else: print('Unsupported bracket type", "+ '\\n' sidebar += '|map4=' + '\\n' sidebar += '|map5='", "' Matches|width=450px|hide=true}}\\n' for match in group['matches']: match_line = create_match_maps(match, teams,", "https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType'] == 'winner': # round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'],", "1 + round_match_offset) if 'teamID' in match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'],", "if player_info['link']: teams_table += ' |' + player_tag + 'link='", "+= '|team' + str(rank + 1) + '=' + team_info['teamteamplate']", "team_name + ' ' else: bracket += bracket_indicator + 'literal=BYE", "+= '|shortname=' + data['name'] + '\\n' sidebar += '|tickername=' +", "'|map1=' + '\\n' sidebar += '|map2=' + '\\n' sidebar +=", "+ '\\n' teams_ordered += '|content' + str(dynamic_idx+1) + '=' +", "prize + '\\n' prize_pool += '{{prize pool start}}\\n' prize_pool +=", "rank_teams(data, bw_teams, sort_place) dynamic_idx = 0 if dynamic: header +=", "elimination brackets # set up team number trackers team_previous_round =", "= 0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'],", "in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'], player['inGameName']) teams_table", "swiss_table[-1]: swiss_table += '\\n' for rank, record in enumerate(stage['standings']): if", "'{{prize pool slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool", "bw_teams, sort_place) dynamic_idx = 0 if dynamic: header += '{{tabs", "team dropped down from the previous round # In DE", "need to keep track of match['next'] and build up the", "the 4th winners bracket round is called the 6th round", "= '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\\n' f.write(display) sidebar = create_sidebar(event_data.tournament_data,", "+ '=down' for standing in stage['standings']: if standing_id == standing['_id']:", "if dynamic: header += '{{tabs dynamic\\n' header += '|name' +", "'{{TeamCard columns start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3], team[1])", "' ' else: bracket += bracket_indicator + 'literal=BYE ' if", "match_line += '|winner=1\\n' elif 'winner' in match['bottom'] and match['bottom']['winner']: match_line", "match_line += '|games2=' if 'winner' in match['bottom'] and match['bottom']['winner']: match_line", "'|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' try: sidebar +=", "+ '\\n' sidebar += '|organizer-link=' + '\\n' sidebar += '|sponsor='", "stage['name'] + '\\n' if stage['bracket']['type'] == \"swiss\": event_format += '**", "'|format=' + '\\n' sidebar += '|patch=' + '\\n' sidebar +=", "create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss Match Results====\\n') swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'],", "pool end}}\\n' return prize_pool def main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki", "twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki", "+= '|localcurrency=' + '\\n' sidebar += '|prizepool=' + data['prizes'] +", "|lastscore1= |lastvsscore1=\\n' prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool +=", "in event_data.tournament_data['stages']: if stage['bracket']['type'] == 'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n')", "bw_players, bw_teams, dynamic=[], sort_place=True): header = '{{TeamCardToggleButton}}\\n' teams_ordered = ''", "+ datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' except KeyError: sidebar +=", "team_top = bw_teams.get_team_info('0', 'BYE') if 'teamID' in match['bottom']: team_bot =", "= event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki')", "style: ' + stage['bracket']['style']) return # todo handle double elimination", "bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot = bw_teams.get_team_info('0', 'BYE') match_line +=", "in match['top']: continue if match['top']['teamID'] in team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type", "sidebar def create_event_format(data): event_format = '' for stage in data['stages']:", "match_line def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True): tables = ''", "stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket)", "= match['numGames'] else: rounds += 1 if rounds: event_format +=", "|place=5-8 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool +=", "'\\n' sidebar += '|twitch=' + '\\n' sidebar += '|instagram=' +", "in data['stages']: for place, standing in enumerate(stage['standings']): if 'place' in", "match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R'", "FORCE REDUCE TEAMS event_data.reduce_teams() event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename", "= False bracket += '\\n' if 'teamID' in match['bottom']: if", "match['top'] and match['top']['winner']: bracket += bracket_indicator + 'win=1 ' team_previous_round[match['top']['teamID']]", "R2D1 vs R2W5 and R2D2 vs R2W6 # Might want", "'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table = create_swiss_table(stage, bw_teams) f.write(swiss_table)", "wiki_name, include_matches=True): tables = '' for idx, group in enumerate(stage['groups']):", "== 'winner': round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] - 1] else:", "datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' try: sidebar += '|edate=' +", "+ '\\n' sidebar += '|liquipediatier=' + '\\n' sidebar += '|name='", "str(match['matchNumber'] * 2 - 1 + round_match_offset) if 'teamID' in", "+= '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' try: sidebar", "'-round ' \\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n' return", "enumerate(teams): if dynamic: if team_num == dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard", "'\\n' sidebar += '|map2=' + '\\n' sidebar += '|map3=' +", "match_line = '' if not match['isComplete']: return match_line match_line =", "+ '\\n' sidebar += '|country=' + '\\n' sidebar += '|format='", "= 0 round_max_loss_match_count = [1] * (len(stage['bracket']['series']) + 1) round_max_loss_match_count[0]", "winners bracket round is called the 6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc", "rounds = dict() for match in matches: match_line = create_match_maps(match,", "'}}\\n' return bracket def create_match_maps(match, teams, bw_teams): match_line = ''", "ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki", "bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table += '|team' + str(rank + 1) +", "# https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType'] ==", "' + stage['bracket']['style']) return # todo handle double elimination brackets", "+= '|' + player_tag + '=' + player_info['name'] \\ +", "'link=' + player_info['link'] teams_table += '\\n' # teams_table += '|c=", "'}}\\n' f.write(display) sidebar = create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format", "data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if sort_place: teams = sorted(teams,", "+= '|organizer=' + data['organization']['name'] + '\\n' sidebar += '|organizer-link=' +", "'|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament=' + wiki_name + '\\n' group_header =", "not match_line: continue try: rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])] = list()", "matches tournament|' + wiki_name + '}}\\n' return sidebar def create_event_format(data):", "round is # called the 4th round and in a", "'%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' try: sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'],", "+= '|prizepool=' + data['prizes'] + '\\n' sidebar += '|type=Online' +", "team won the previous round # So there are rounds", "match['isBye']: match_line += '|walkover=1' match_line += '|games1=' if match['top']['winner']: match_line", "+ '\\n' return event_format def rank_teams(data, bw_teams, sort_place=True, break_ties=False): for", "'{{Infobox league' + '\\n' sidebar += '|liquipediatier=' + '\\n' sidebar", "1: swiss_match_table += '{{box|start|padding=2em}}\\n' else: swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table +=", "bracket_type \\ + str(match['matchNumber'] * 2 - 1 + round_match_offset)", "-1}, ] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data =", "'W' else: match_line += 'FF' match_line += '|games2=' if 'winner'", "1): if i == 1: swiss_match_table += '{{box|start|padding=2em}}\\n' else: swiss_match_table", "= bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table += '|bg' + str(pos + 1)", "+ '\\n' sidebar += '|image=' + '\\n' sidebar += '|icon='", "+ '\\n' sidebar += '|organizer=' + data['organization']['name'] + '\\n' sidebar", "'603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major'", "else: bracket_type = 'D' bracket_indicator = '|R' + str(match['roundNumber']) +", "'|icon=' + '\\n' sidebar += '|series=' + '\\n' sidebar +=", "header += '|This=1\\n' header += '|content' + str(dynamic_idx+1) + '='", "+= '** ' + str(rounds) + '-round ' \\ +", "slot |place=5-8 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool", "\\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n' rounds = 1", "'\\n' sidebar += '|prizepool=' + data['prizes'] + '\\n' sidebar +=", "bracket_indicator + 'team=' + team_name + ' ' else: bracket", "'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64'", "- 1 / data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] = standing['place'] else: data['teams'][standing['team']['_id']]['place']", "f.write(display) sidebar = create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format =", "return prize_pool def main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor'", "data['teams'][standing['team']['_id']]['place'] = standing['place'] else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place teams", "+ wiki_name + '\\n' group_header = '' group_table = ''", "'Top 16', # 'count': 16}, # {'tab_name': 'Top 32', #", "1 / data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] = standing['place'] else: data['teams'][standing['team']['_id']]['place'] =", "'{{tabs dynamic\\n' header += '|name' + str(dynamic_idx+1) + '=' +", "in match and match['isTie']: match_line += '|winner=0\\n' elif 'winner' in", "|lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool", "'score' in match['top']: bracket += bracket_indicator + 'score=' + str(match['top']['score'])", "data['platform'] + '\\n' sidebar += '|country=' + '\\n' sidebar +=", "world_cup_wiki participant_tabs = [ # {'tab_name': 'Top 16', # 'count':", "footer += '}}\\n' return header + teams_ordered + footer def", "player in enumerate(data['teams'][team[0]]['players']): player_tag = 'p' + str(idx + 1)", "if not 'teamID' in match['top']: continue if match['top']['teamID'] in team_previous_round:", "for stage in data['stages']: event_format += '* ' + stage['name']", "+= '===={{HiddenSort|Group ' + group['name'] + '}}====\\n' tables += '{{GroupTableLeague|title=Group", "'|edate=\\n' sidebar += '|web=' + '\\n' sidebar += '|bracket=https://battlefy.com/' +", "want to key off match['inConsolationBracket'] # May also just need", "battlefy_wiki_linkings from datetime import datetime from operator import itemgetter from", "sidebar += '}}\\n' sidebar += '{{Upcoming matches tournament|' + wiki_name", "not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place else: if", "+= '|edate=\\n' sidebar += '|web=' + '\\n' sidebar += '|bracket=https://battlefy.com/'", "'5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id wiki_name = world_cup_wiki", "1 teams_ordered += '|name' + str(dynamic_idx + 1) + '='", "is defined by match['next'] # Not exactly sure how to", "+= '{{Prize pool end}}\\n' return prize_pool def main(): ccs_winter_minor_id =", "'\\n' sidebar += '|tickername=' + data['name'] + '\\n' sidebar +=", "for next time if match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'],", "= 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id =", "Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table = create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss Match", "else: tables += '{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group ' + group['name']", "+= match_line tables += match_table tables += '{{MatchListEnd}}\\n' tables +=", "# teams_table += '|c= |cflag=\\n' # teams_table += '|qualifier=\\n' teams_table", "'* ' + stage['name'] + '\\n' if stage['bracket']['type'] == \"swiss\":", "for place, standing in enumerate(stage['standings']): if 'place' in standing: if", "' + group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament=' + wiki_name", "bracket_indicator + 'win=1 ' team_previous_round[match['top']['teamID']] = True else: team_previous_round[match['top']['teamID']] =", "match_line += 'FF' match_line += '|games2=' if 'winner' in match['bottom']", "+= '|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool += '|tbd |lastvs4= |lastscore4=", "+ team_name + ' ' else: bracket += bracket_indicator +", "+ stage['name'] + '\\n' if stage['bracket']['type'] == \"swiss\": event_format +=", "are rounds where D vs L happen such as R2D1", "matches = stage['matches'] for match in matches: # TODO: this", "+= '{{box|end}}\\n' return tables def create_prize_pool(prize): prize_pool = prize +", "'isTie' in match and match['isTie']: match_line += '|winner=0\\n' elif 'winner'", "gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id wiki_name = world_cup_wiki participant_tabs", "'61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship'", "swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\\n' for i in", "= '6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki =", "tables += \"}}\\n\" if include_matches: match_table = '{{MatchListStart|title=Group ' +", "stage['bracket']['type'] == 'roundrobin': f.write('===' + stage['name'] + '===\\n') round_robin_tables =", "= '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else: print('Unknown stage style:", "'}}\\n' return swiss_table def create_swiss_matches(matches, teams, bw_teams): swiss_match_table = ''", "create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True): tables = '' for idx,", "teams_table += '|' + player_tag + '=' + player_info['name'] \\", "'\\n' sidebar += '|localcurrency=' + '\\n' sidebar += '|prizepool=' +", "bracket_type \\ + str(match['matchNumber'] * 2 + round_match_offset) if 'teamID'", "= create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n')", "'|tiebreaker1=series\\n' tables += group_header tables += group_table tables += \"}}\\n\"", "won the previous round # So there are rounds where", "'roundrobin': f.write('===' + stage['name'] + '===\\n') round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'],", "0)) return teams def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True): header", "-2 * round_max_win_match_count[match['roundNumber'] - 1] else: round_match_offset = -2 *", "+= 1 if rounds: event_format += '** ' + str(rounds)", "'\\n' elif stage['bracket']['type'] == \"elimination\": numGames = 0 rounds =", "+= '|icon=' + '\\n' sidebar += '|series=' + '\\n' sidebar", "'====={{HiddenSort|Round ' + str(i) + '}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round '", "event_format def rank_teams(data, bw_teams, sort_place=True, break_ties=False): for stage in data['stages']:", "'\\n' for idx, player in enumerate(data['teams'][team[0]]['players']): player_tag = 'p' +", "' \\ + str(i) + '|hide=false}}\\n' for match in rounds[str(i)]:", "match['next'] and build up the D and W that way", "round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \\ +", "ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki", "= max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']]) if not 'teamID' in match['top']: continue", "# Increment for next time if match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']]", "'|games2=' if 'winner' in match['bottom'] and match['bottom']['winner']: match_line += 'W'", "+ teams_ordered + footer def create_swiss_table(stage, bw_teams): dropped_style = 'drop'", "= create_match_maps(match, teams, bw_teams) if not match_line: continue try: rounds[str(match['roundNumber'])].append(match_line)", "+ str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else: print('Unknown stage style: ' +", "'%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' except KeyError: sidebar += '|edate=\\n' sidebar", "'single': bracket = '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif stage['bracket']['style']", "sidebar += '|sponsor=' + '\\n' sidebar += '|localcurrency=' + '\\n'", "'}}\\n' sidebar += '{{Upcoming matches tournament|' + wiki_name + '}}\\n'", "+= '|pbg' + str(i + 1) + '=down' if (i", "str(match['bottom']['score']) + '\\n' match_line += '|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n' match_line", "\"elimination\": numGames = 0 rounds = 0 for match in", "R2W6 # Might want to key off match['inConsolationBracket'] # May", "'{{TeamCard columns start|cols=5|height=250}}\\n' else: if team_num == 0: teams_ordered +=", "match_line += '|winner=0\\n' elif 'winner' in match['top'] and match['top']['winner']: match_line", "1) if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info =", "+ '/' \\ + data['_id'] + '/bracket-list' + '\\n' sidebar", "down from the previous round # In DE brackest W", "idx, player in enumerate(data['teams'][team[0]]['players']): player_tag = 'p' + str(idx +", "Match Results====\\n') swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif stage['bracket']['type']", "Notable Participants', # 'count': -1}, ] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players", "dynamic: if team_num == dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard columns end}}\\n'", "'\\n' for rank, record in enumerate(stage['standings']): if record['disqualified']: swiss_table +=", "'' # Use prior rounds as a tiebreaker for when", "event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n') bracket =", "+ '/bracket-list' + '\\n' sidebar += '|rulebook=' + '\\n' sidebar", "+ str(dynamic_idx+1) + '=' + '\\n' teams_ordered += '{{TeamCard columns", "teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' else: if team_num == 0:", "if 'isTie' in match and match['isTie']: match_line += '|winner=0\\n' elif", "== 'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType'] == 'loser':", "swiss_match_table += '{{box|start|padding=2em}}\\n' else: swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round", "i in range(1, len(rounds) + 1): if i == 1:", "next time if match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']])", "+= '{{TeamCard columns start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3],", "slot |place=3-4 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool", "+ '|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament=' + wiki_name + '\\n' group_header", "'\\n' sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' + data['slug']", "+ '\\n' match_line += '|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n' match_line +=", "round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables) else: print('Unsupported", "+= '{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament='", "twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id", "= 1 numGames = match['numGames'] else: rounds += 1 if", "if match['bottom']['teamID'] in team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type = 'W' else:", "'|team_number=' + str(len(data['teams'])) + '\\n' sidebar += '|previous=' + '\\n'", "data['teams'][team_id]['place'] else: place = 0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id,", "team in enumerate(teams): if dynamic: if team_num == dynamic[dynamic_idx]['count']: teams_ordered", "bracket_indicator + 'score=' + str(match['bottom']['score']) + ' ' if 'winner'", "'{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\\n' f.write(display) sidebar = create_sidebar(event_data.tournament_data, wiki_name)", "match_line += 'FF' else: match_line += '|games1=' + str(match['top']['score']) match_line", "= '61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki =", "f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table = create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss", "match_table tables += '{{MatchListEnd}}\\n' tables += '{{box|end}}\\n' return tables def", "tables += match_table tables += '{{MatchListEnd}}\\n' tables += '{{box|end}}\\n' return", "in enumerate(stage['standings']): if record['disqualified']: swiss_table += '|bg' + str(rank +", "called the 4th round and in a 16 team DE", "track of match['next'] and build up the D and W", "bw_teams) f.write(swiss_matches) elif stage['bracket']['type'] == 'elimination': f.write('===Playoffs===\\n') bracket = create_elim_bracket(stage,", "f.write(prize_pool) f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams)", "sidebar += '|rulebook=' + '\\n' sidebar += '|twitter=' + '\\n'", "+ '\\n' sidebar += '|map1=' + '\\n' sidebar += '|map2='", "tables = '' for idx, group in enumerate(stage['groups']): if idx", "'\\n' sidebar += '|organizer=' + data['organization']['name'] + '\\n' sidebar +=", "= data['teams'][team_id]['place'] else: place = 0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name'])", "+ 'team=' + team_name + ' ' else: bracket +=", "str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' header += '|This=1\\n'", "'{{Prize pool end}}\\n' return prize_pool def main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55'", "+ str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' header +=", "+ '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' header += '|This=1\\n' header", "a 8 team DE the third winners bracket round is", "sidebar += '|shortname=' + data['name'] + '\\n' sidebar += '|tickername='", "'{{TeamCard columns start|cols=5|height=250}}\\n' for team_num, team in enumerate(teams): if dynamic:", "then future bracket type is defined by match['next'] # Not", "enumerate(stage['standings']): if record['disqualified']: swiss_table += '|bg' + str(rank + 1)", "str(dynamic_idx + 1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered", "create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast", "0 rounds = 0 for match in stage['bracket']['series']: if match['numGames']", "brackets # set up team number trackers team_previous_round = dict()", "previous round # So there are rounds where D vs", "+= '|tournament=' + wiki_name + '\\n' group_header = '' group_table", "+ '}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i) + '", "time if match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif", "= [1] * (len(stage['bracket']['series']) + 1) round_max_loss_match_count[0] = 0 #", "-2 * round_max_loss_match_count[match['roundNumber'] - 1] \\ + (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber']", "= 'W' else: bracket_type = 'D' else: bracket_type = 'D'", "'60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor'", "+ str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber'] * 2 +", "{'tab_name': 'Top 32', # 'count': 32}, # {'tab_name': 'Other Notable", "= '' for idx, group in enumerate(stage['groups']): if idx ==", "from the previous round # In DE brackest W means", "= bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot = bw_teams.get_team_info('0', 'BYE') match_line", "address round_team_number, in a 8 team DE the third winners", "teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' + team_name + '", "str(i) + '}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i) +", "str(i) + '|hide=false}}\\n' for match in rounds[str(i)]: swiss_match_table += match", "team_previous_round[match['top']['teamID']] = True else: team_previous_round[match['top']['teamID']] = False bracket += '\\n'", "+ '\\n' sidebar += '}}\\n' sidebar += '{{Upcoming matches tournament|'", "= '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki =", "if idx == 1: tables += '{{box|start|padding=2em}}\\n' else: tables +=", "+= '{{TeamCard columns start|cols=5|height=250}}\\n' else: if team_num == 0: teams_ordered", "else: print('Unknown stage style: ' + stage['bracket']['style']) return # todo", "if (i + 1) % 8 == 0: swiss_table +=", "bw_players.get_player_info(player['userID'], player['inGameName']) teams_table += '|' + player_tag + '=' +", "header = '{{TeamCardToggleButton}}\\n' teams_ordered = '' # Use prior rounds", "bracket def create_match_maps(match, teams, bw_teams): match_line = '' if not", "and match['bottom']['winner']: match_line += '|winner=2\\n' else: match_line += '|winner=0\\n' if", "a 16 team DE the 4th winners bracket round is", "as R2D1 vs R2W5 and R2D2 vs R2W6 # Might", "+= '|type=Online' + '\\n' sidebar += '|platform=' + data['platform'] +", "else: match_line += '|games1=' + str(match['top']['score']) match_line += '|games2=' +", "'\\n' try: sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') +", "bracket round is # called the 4th round and in", "if 'winner' in match['bottom'] and match['bottom']['winner']: bracket += bracket_indicator +", "wiki_name = world_cup_wiki participant_tabs = [ # {'tab_name': 'Top 16',", "if 'score' in match['top']: bracket += bracket_indicator + 'score=' +", "pool start}}\\n' prize_pool += '{{prize pool slot |place=1 |usdprize=0 |tbd", "'** ' + str(rounds) + '-round ' \\ + stage['bracket']['seriesStyle']", "in match['bottom'] and match['bottom']['winner']: bracket += bracket_indicator + 'win=2 '", "stage['bracket']['type'] == \"swiss\": event_format += '** ' + str(stage['bracket']['roundsCount']) +", "in enumerate(group['standingIDs']): group_header += '|pbg' + str(pos + 1) +", "sidebar += '|instagram=' + '\\n' sidebar += '|discord=' + '\\n'", "# 'count': 32}, # {'tab_name': 'Other Notable Participants', # 'count':", "display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\\n' f.write(display) sidebar =", "for stage in data['stages']: for place, standing in enumerate(stage['standings']): if", "'|winner=2\\n' else: match_line += '|winner=0\\n' if match['isBye']: match_line += '|walkover=1'", "0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name']", "+ data['organization']['name'] + '\\n' sidebar += '|organizer-link=' + '\\n' sidebar", "group_header += '|pbg' + str(pos + 1) + '=down' for", "bracket += bracket_indicator + 'literal=BYE ' if 'score' in match['bottom']:", "datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' except KeyError: sidebar += '|edate=\\n'", "bracket_type = 'D' if match['matchType'] == 'winner': round_match_offset = -2", "team_info['name'] + '\\n' teams_table += '|image=' + team_info['image'] + '\\n'", "round_match_offset) if 'teamID' in match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket", "player_tag + '=' + player_info['name'] \\ + ' |' +", "1] \\ + (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] - 1]) * 2", "+= '|qualifier=\\n' teams_table += '}}\\n' teams_ordered += teams_table footer =", "teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot = bw_teams.get_team_info('0', 'BYE') match_line += '|team1='", "+= '|date=|finished=true\\n' match_line += '|twitch= |youtube=\\n' match_line += '|vod=\\n' match_line", "'|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '}}\\n' prize_pool += '{{prize", "'\\n' sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n'", "== 'single': bracket = '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif", "bracket_indicator + 'literal=BYE ' if 'score' in match['bottom']: bracket +=", "'/' + data['slug'] + '/' \\ + data['_id'] + '/bracket-list'", "return header + teams_ordered + footer def create_swiss_table(stage, bw_teams): dropped_style", "+ '\\n' sidebar += '|format=' + '\\n' sidebar += '|patch='", "in rounds[str(i)]: swiss_match_table += match swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table +=", "+= '|twitch= |youtube=\\n' match_line += '|vod=\\n' match_line += '}}\\n' match_line", "'flag=' + player_info['flag'] if player_info['link']: teams_table += ' |' +", "teams = list() for team_id in data['teams']: if 'place' in", "'' for idx, group in enumerate(stage['groups']): if idx == 1:", "round_max_loss_match_count = [1] * (len(stage['bracket']['series']) + 1) round_max_loss_match_count[0] = 0", "\\ + ' |' + player_tag + 'flag=' + player_info['flag']", "+= '|previous=' + '\\n' sidebar += '|next=' + '\\n' sidebar", "'\\n' return event_format def rank_teams(data, bw_teams, sort_place=True, break_ties=False): for stage", "build up the D and W that way instead #", "if 'teamID' in match['bottom']: if match['bottom']['teamID'] in team_previous_round: if team_previous_round[match['bottom']['teamID']]:", "place else: if break_ties: data['teams'][standing['team']['_id']]['place'] = \\ standing['place'] + (1", "\"}}\\n\" if include_matches: match_table = '{{MatchListStart|title=Group ' + group['name'] +", "# if standing['disqualified']: # has_drop = True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'],", "+ '\\n' sidebar += '|type=Online' + '\\n' sidebar += '|platform='", "(i + 1) % 8 == 0: swiss_table += '\\n'", "else: print('Unsupported bracket type of: ' + stage['bracket']['type']) if __name__", "battlefy_data import battlefy_wiki_linkings from datetime import datetime from operator import", "if 'teamID' in match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']:", "'team=' + team_name + ' ' else: bracket += bracket_indicator", "* round_max_win_match_count[match['roundNumber'] - 1] else: round_match_offset = -2 * round_max_loss_match_count[match['roundNumber']", "match['bottom']['winner']: bracket += bracket_indicator + 'win=2 ' team_previous_round[match['bottom']['teamID']] = True", "+ group['name'] + ' Matches|width=450px|hide=true}}\\n' for match in group['matches']: match_line", "prize_pool += '{{prize pool slot |place=5-8 |usdprize=0\\n' prize_pool += '|tbd", "'winner' in match['bottom'] and match['bottom']['winner']: bracket += bracket_indicator + 'win=2", "str(rank + 1) + '=' + team_info['teamteamplate'] swiss_table += '|temp_tie'", "sidebar = '{{Infobox league' + '\\n' sidebar += '|liquipediatier=' +", "Standings====\\n') swiss_table = create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss Match Results====\\n') swiss_matches", "+ '\\n' header += '{{TeamCard columns start|cols=5|height=250}}\\n' for team_num, team", "'{{TeamCard columns end}}\\n' if dynamic: footer += '}}\\n' return header", "'** ' + str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type'] +", "'|bg' + str(rank + 1) + '=' + dropped_style +", "'place' in data['teams'][team_id]: place = data['teams'][team_id]['place'] else: place = 0", "str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber'] * 2 + round_match_offset)", "+= '|winner=0\\n' if match['isBye']: match_line += '|walkover=1' match_line += '|games1='", "= [ # {'tab_name': 'Top 16', # 'count': 16}, #", "'{{MatchListStart|width=450px|title=Round ' + str(i) + ' Matches|matchsection=Round ' \\ +", "= False bracket += '\\n' bracket += '}}\\n' return bracket", "= '60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki =", "numGames: if rounds: event_format += '** ' + str(rounds) +", "L happen such as R2D1 vs R2W5 and R2D2 vs", "'60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major'", "= '5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id wiki_name =", "newline='\\n', encoding='utf-8') as f: display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] +", "if not match['isComplete']: return match_line match_line = '{{MatchMaps\\n' match_line +=", "1) round_max_loss_match_count[0] = 0 # matches = sorted(stage['matches'], key=itemgetter('matchNumber')) matches", "columns start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3], team[1]) teams_table", "standing in enumerate(stage['standings']): if 'place' in standing: if 'place' not", "Talent===\\n') f.write('===Prize Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams =", "teams_table += '|image=' + team_info['image'] + '\\n' for idx, player", "group_header = '' group_table = '' for pos, standing_id in", "'{{box|start|padding=2em}}\\n' else: swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round ' +", "elif stage['bracket']['type'] == \"elimination\": numGames = 0 rounds = 0", "teams = sorted(teams, key=itemgetter(2, 4, 0)) else: teams = sorted(teams,", "'|winner=0\\n' if match['isBye']: match_line += '|walkover=1' match_line += '|games1=' if", "bracket type is defined by match['next'] # Not exactly sure", "'|walkover=1' match_line += '|games1=' if match['top']['winner']: match_line += 'W' else:", "if match['numGames'] != numGames: if rounds: event_format += '** '", "'\\n' # teams_table += '|c= |cflag=\\n' # teams_table += '|qualifier=\\n'", "double elimination brackets # set up team number trackers team_previous_round", "+ str(match['top']['score']) + ' ' if 'winner' in match['top'] and", "1) round_max_win_match_count[0] = 0 round_max_loss_match_count = [1] * (len(stage['bracket']['series']) +", "if 'teamID' in match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket +=", "'611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament'", "called the 6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc", "match and match['isTie']: match_line += '|winner=0\\n' elif 'winner' in match['top']", "event_format = '' for stage in data['stages']: event_format += '*", "ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id", "match_line += '|games1=' if match['top']['winner']: match_line += 'W' else: match_line", "str(idx + 1) if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']]", "prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool += '}}\\n' prize_pool", "ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki", "= battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() #", "= '{{Infobox league' + '\\n' sidebar += '|liquipediatier=' + '\\n'", "group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament=' + wiki_name + '\\n'", "bw_teams): if stage['bracket']['style'] == 'single': bracket = '{{' + str(stage['bracket']['teamsCount'])", "round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType']", "dropped_style + '' else: swiss_table += '|bg' + str(rank +", "if stage['bracket']['type'] == \"swiss\": event_format += '** ' + str(stage['bracket']['roundsCount'])", "elif 'winner' in match['bottom'] and match['bottom']['winner']: match_line += '|winner=2\\n' else:", "f.write('===' + stage['name'] + '===\\n') round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams,", "+= group_header tables += group_table tables += \"}}\\n\" if include_matches:", "D and W that way instead # Default first round", "match['numGames'] else: rounds += 1 if rounds: event_format += '**", "list() rounds[str(match['roundNumber'])].append(match_line) for i in range(1, len(rounds) + 1): if", "+ '=' + team_info['teamteamplate'] swiss_table += '|temp_tie' + str(rank+1) +", "= '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif stage['bracket']['style'] == 'double':", "if match['matchType'] == 'winner': # round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']])", "= 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id =", "sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' try:", "in data['teams'][team_id]: place = data['teams'][team_id]['place'] else: place = 0 team_info", "rank, record in enumerate(stage['standings']): if record['disqualified']: swiss_table += '|bg' +", "+= '|twitch=' + '\\n' sidebar += '|instagram=' + '\\n' sidebar", "group_table = '' for pos, standing_id in enumerate(group['standingIDs']): group_header +=", "prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool += '|tbd |lastvs4=", "+= '** ' + str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type']", "0 # matches = sorted(stage['matches'], key=itemgetter('matchNumber')) matches = stage['matches'] for", "elif match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator =", "' + str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type'] + '\\n'", "'=' + '\\n' header += '{{TeamCard columns start|cols=5|height=250}}\\n' for team_num,", "to key off match['inConsolationBracket'] # May also just need to", "'{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament=' +", "bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']: team_top = bw_teams.get_team_info('0', 'BYE') if 'teamID'", "pool slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool +=", "' if 'winner' in match['top'] and match['top']['winner']: bracket += bracket_indicator", "bracket += bracket_indicator + 'win=2 ' team_previous_round[match['bottom']['teamID']] = True elif", "round_max_win_match_count[match['roundNumber'] - 1]) * 2 # Increment for next time", "'|team' + str(rank + 1) + '=' + team_info['teamteamplate'] swiss_table", "place, data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if sort_place: teams = sorted(teams, key=itemgetter(2,", "teams_table += ' |' + player_tag + 'link=' + player_info['link']", "dynamic=[], sort_place=True): header = '{{TeamCardToggleButton}}\\n' teams_ordered = '' # Use", "if match['top']['winner']: match_line += 'W' else: match_line += 'FF' match_line", "ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id", "means the team dropped down from the previous round #", "filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki') with open(filename, 'w+', newline='\\n',", "+ '\\n' sidebar += '|series=' + '\\n' sidebar += '|organizer='", "'\\n' except KeyError: sidebar += '|edate=\\n' sidebar += '|web=' +", "= bw_teams.get_team_info(team[3], team[1]) teams_table += '|team=' + team_info['name'] + '\\n'", "'|hide=false}}\\n' for match in rounds[str(i)]: swiss_match_table += match swiss_match_table +=", "except KeyError: sidebar += '|edate=\\n' sidebar += '|web=' + '\\n'", "end}}\\n' return prize_pool def main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki =", "'\\n' sidebar += '|icon=' + '\\n' sidebar += '|series=' +", "team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type = 'W' else: bracket_type = 'D'", "+= '\\n' bracket += '}}\\n' return bracket def create_match_maps(match, teams,", "= '611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki =", "match['isBye']: team_bot = bw_teams.get_team_info('0', 'BYE') match_line += '|team1=' + team_top['teamteamplate']", "match['isComplete']: return match_line match_line = '{{MatchMaps\\n' match_line += '|date=\\n' if", "= '{{TeamCard columns end}}\\n' if dynamic: footer += '}}\\n' return", "wiki_name + '}}\\n' return sidebar def create_event_format(data): event_format = ''", "else: team_previous_round[match['top']['teamID']] = False bracket += '\\n' if 'teamID' in", "return match_line match_line = '{{MatchMaps\\n' match_line += '|date=\\n' if 'teamID'", "'|name=' + data['name'] + '\\n' sidebar += '|shortname=' + data['name']", "for match in rounds[str(i)]: swiss_match_table += match swiss_match_table += '{{MatchListEnd}}\\n'", "match['isBye']: team_top = bw_teams.get_team_info('0', 'BYE') if 'teamID' in match['bottom']: team_bot", "+= '\\n' if '\\n' not in swiss_table[-1]: swiss_table += '\\n'", "+= '|vod=\\n' match_line += '}}\\n' match_line += '}}\\n' return match_line", "+= '|discord=' + '\\n' sidebar += '|map1=' + '\\n' sidebar", "match['numGames'] != numGames: if rounds: event_format += '** ' +", "to keep track of match['next'] and build up the D", "header += '|name' + str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] +", "+= '{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round ' + str(i) + '}}=====\\n'", "# todo handle double elimination brackets # set up team", "up team number trackers team_previous_round = dict() # set up", "'\\n' sidebar += '|team_number=' + str(len(data['teams'])) + '\\n' sidebar +=", "if 'place' in data['teams'][team_id]: place = data['teams'][team_id]['place'] else: place =", "'=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table += '|team' + str(rank", "'{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round ' + str(i) + '}}=====\\n' swiss_match_table", "+ '\\n' for idx, player in enumerate(data['teams'][team[0]]['players']): player_tag = 'p'", "== \"elimination\": numGames = 0 rounds = 0 for match", "|lastvsscore3=\\n' prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool += '}}\\n'", "team_info['name'] )) if sort_place: teams = sorted(teams, key=itemgetter(2, 4, 0))", "round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R' + str(match['roundNumber']) +", "match['top']['winner']: match_line += 'W' else: match_line += 'FF' match_line +=", "+= '{{box|start|padding=2em}}\\n' else: swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round '", "+= '|temp_tie' + str(rank+1) + '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n'", "teams, bw_teams): match_line = '' if not match['isComplete']: return match_line", "'{{box|start|padding=2em}}\\n' else: tables += '{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group ' +", "= 'D' else: bracket_type = 'D' bracket_indicator = '|R' +", "'|next=' + '\\n' sidebar += '}}\\n' sidebar += '{{Upcoming matches", "str(i) + ' Matches|matchsection=Round ' \\ + str(i) + '|hide=false}}\\n'", "0)) else: teams = sorted(teams, key=itemgetter(4, 0)) return teams def", "bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber']", "# https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType'] == 'winner': #", "match_line += '|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n' match_line += '|twitch= |youtube=\\n'", "in group['matches']: match_line = create_match_maps(match, teams, bw_teams) match_table += match_line", "+ ' ' if 'winner' in match['top'] and match['top']['winner']: bracket", "|lastscore2= |lastvsscore2=\\n' prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool +=", "= create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes'])", "else: swiss_table += '|bg' + str(rank + 1) + '=down'", "= list() for team_id in data['teams']: if 'place' in data['teams'][team_id]:", "+= '|web=' + '\\n' sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] +", "+= '{{box|start|padding=2em}}\\n' else: tables += '{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group '", "|lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=2 |usdprize=0 |tbd |lastvs1=", "+= '|bg' + str(rank + 1) + '=down' team_info =", "\"swiss\": event_format += '** ' + str(stage['bracket']['roundsCount']) + '-round '", "'DETeamBracket\\n' else: print('Unknown stage style: ' + stage['bracket']['style']) return #", "and match['top']['winner']: match_line += '|winner=1\\n' elif 'winner' in match['bottom'] and", "bracket_indicator + 'literal=BYE ' if 'score' in match['top']: bracket +=", "elif match['isBye']: team_top = bw_teams.get_team_info('0', 'BYE') if 'teamID' in match['bottom']:", "[1] * (len(stage['bracket']['series']) + 1) round_max_win_match_count[0] = 0 round_max_loss_match_count =", "match['bottom']['winner']: match_line += 'W' else: match_line += 'FF' else: match_line", "== 'winner': # round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) # elif", "round and in a 16 team DE the 4th winners", "= '{{TeamCardToggleButton}}\\n' teams_ordered = '' # Use prior rounds as", "tables += '{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group ' + group['name'] +", "'}}\\n' match_line += '}}\\n' return match_line def create_round_robin_tables(stage, teams, bw_teams,", "and match['bottom']['winner']: bracket += bracket_indicator + 'win=2 ' team_previous_round[match['bottom']['teamID']] =", "'\\n' sidebar += '|discord=' + '\\n' sidebar += '|map1=' +", "+ \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table += '}}\\n' return swiss_table def", "updated for non SE16 templates # In DE brackets D", "+ ' |' + player_tag + 'flag=' + player_info['flag'] if", "+ player_tag + 'link=' + player_info['link'] teams_table += '\\n' #", "max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) # elif match['matchType'] == 'loser': # round_max_loss_match_count[match['roundNumber']]", "rounds as a tiebreaker for when multiple teams have the", "def create_elim_bracket(stage, teams, bw_teams): if stage['bracket']['style'] == 'single': bracket =", "+ str(len(data['teams'])) + '\\n' sidebar += '|previous=' + '\\n' sidebar", "team_top['teamteamplate'] match_line += '|team2=' + team_bot['teamteamplate'] if 'isTie' in match", "'winner' in match['top'] and match['top']['winner']: match_line += '|winner=1\\n' elif 'winner'", "'|rulebook=' + '\\n' sidebar += '|twitter=' + '\\n' sidebar +=", "|lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=2 |usdprize=0 |tbd", "league' + '\\n' sidebar += '|liquipediatier=' + '\\n' sidebar +=", "+ '\\n' sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' +", "'|prizepool=' + data['prizes'] + '\\n' sidebar += '|type=Online' + '\\n'", "+ '}}\\n' f.write(display) sidebar = create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n')", "+ 1) if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info", "\\ + str(i) + '|hide=false}}\\n' for match in rounds[str(i)]: swiss_match_table", "= '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki =", "'=' + dropped_style + '' else: swiss_table += '|bg' +", "bw_teams) f.write(bracket) elif stage['bracket']['type'] == 'roundrobin': f.write('===' + stage['name'] +", "+= '{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n' return swiss_match_table def create_elim_bracket(stage, teams,", "= standing['place'] else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place teams =", "+ group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables += '|tournament=' + wiki_name +", "+= '|c= |cflag=\\n' # teams_table += '|qualifier=\\n' teams_table += '}}\\n'", "in match['bottom']: bracket += bracket_indicator + 'score=' + str(match['bottom']['score']) +", "str(match['matchNumber'] * 2 + round_match_offset) if 'teamID' in match['bottom']: team_name", "# matches = sorted(stage['matches'], key=itemgetter('matchNumber')) matches = stage['matches'] for match", "dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n') for stage in event_data.tournament_data['stages']: if stage['bracket']['type']", "'|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n' prize_pool += '}}\\n' prize_pool += '{{Prize", "player_info['link']: teams_table += ' |' + player_tag + 'link=' +", "calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'], player['inGameName']) teams_table += '|' + player_tag", "of match['next'] and build up the D and W that", "team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name'] ))", "\\ + str(match['matchNumber'] * 2 + round_match_offset) if 'teamID' in", "Participants', # 'count': -1}, ] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players =", "+= match swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n' return swiss_match_table", "in match['bottom']: if match['bottom']['teamID'] in team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type =", "i == 1: swiss_match_table += '{{box|start|padding=2em}}\\n' else: swiss_match_table += '{{box|break|padding=2em}}\\n'", "'{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else: print('Unknown stage style: '", "# Default first round to D and then future bracket", "= max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'],", "+ 'flag=' + player_info['flag'] if player_info['link']: teams_table += ' |'", "+= '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '}}\\n' prize_pool +=", "world_cup_id wiki_name = world_cup_wiki participant_tabs = [ # {'tab_name': 'Top", "'\\n' sidebar += '|map1=' + '\\n' sidebar += '|map2=' +", "0 round_max_loss_match_count = [1] * (len(stage['bracket']['series']) + 1) round_max_loss_match_count[0] =", "' \\ + stage['bracket']['seriesStyle'] + str(numGames) + '\\n' rounds =", "round_match_offset) if 'teamID' in match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket", "+= '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '|tbd |lastvs3= |lastscore3=", "'|image=' + '\\n' sidebar += '|icon=' + '\\n' sidebar +=", "'|vod=\\n' match_line += '}}\\n' match_line += '}}\\n' return match_line def", "create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True): header = '{{TeamCardToggleButton}}\\n' teams_ordered =", "= create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs, sort_place=True) f.write(teams) f.write('==Results==\\n') for stage", "world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki", "1] else: round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] - 1] \\", "+ bracket_type \\ + str(match['matchNumber'] * 2 - 1 +", "teams[standing['team']['_id']]['name']) group_table += '|bg' + str(pos + 1) + '=down|team'", "'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03'", "'{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group ' + group['name'] + '}}====\\n' tables", "sidebar += '|map4=' + '\\n' sidebar += '|map5=' + '\\n'", "= 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id = '60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id =", "|youtube=\\n' match_line += '|vod=\\n' match_line += '}}\\n' match_line += '}}\\n'", "* 2 - 1 + round_match_offset) if 'teamID' in match['top']:", "str(rank+1) + '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table += '}}\\n'", "pathlib import Path import calcup_roster_tracking def create_sidebar(data, wiki_name): sidebar =", "match['next'] # Not exactly sure how to address round_team_number, in", "'\\n' if 'teamID' in match['bottom']: if match['bottom']['teamID'] in team_previous_round: if", "'|shortname=' + data['name'] + '\\n' sidebar += '|tickername=' + data['name']", "= rank_teams(data, bw_teams, sort_place) dynamic_idx = 0 if dynamic: header", "|lastvsscore1=\\n' prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '}}\\n'", "import Path import calcup_roster_tracking def create_sidebar(data, wiki_name): sidebar = '{{Infobox", "wiki_name, include_matches=True) f.write(round_robin_tables) else: print('Unsupported bracket type of: ' +", "|lastvsscore1=\\n' prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '|tbd", "match in group['matches']: match_line = create_match_maps(match, teams, bw_teams) match_table +=", "+ stage['bracket']['type'] + '\\n' elif stage['bracket']['type'] == \"elimination\": numGames =", "= -2 * round_max_loss_match_count[match['roundNumber'] - 1] \\ + (round_max_win_match_count[match['roundNumber']] -", "+ round_match_offset) if 'teamID' in match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate']", "2 + round_match_offset) if 'teamID' in match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'],", "% 8 == 0: swiss_table += '\\n' if '\\n' not", "bracket += bracket_indicator + 'literal=BYE ' if 'score' in match['top']:", "data['teams'][standing['team']['_id']]['place'] = \\ standing['place'] + (1 - 1 / data['teams'][standing['team']['_id']]['place'])", "else: if break_ties: data['teams'][standing['team']['_id']]['place'] = \\ standing['place'] + (1 -", "+ str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber'] * 2 -", "f.write('===Prize Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data,", "+= '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' except KeyError:", "if dynamic: if team_num == dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard columns", "'%Y-%m-%d') + '\\n' except KeyError: sidebar += '|edate=\\n' sidebar +=", "slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize", "'|series=' + '\\n' sidebar += '|organizer=' + data['organization']['name'] + '\\n'", "+= '{{prize pool start}}\\n' prize_pool += '{{prize pool slot |place=1", "swiss_table += '|bg' + str(rank + 1) + '=' +", "'|organizer=' + data['organization']['name'] + '\\n' sidebar += '|organizer-link=' + '\\n'", "bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if sort_place:", "'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major' ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b'", "datetime from operator import itemgetter from pathlib import Path import", "event_format += '** ' + str(stage['bracket']['roundsCount']) + '-round ' +", "'=' + dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered += '|content' + str(dynamic_idx+1)", "'|c= |cflag=\\n' # teams_table += '|qualifier=\\n' teams_table += '}}\\n' teams_ordered", "'teamID' in match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot", "enumerate(stage['standings']): if 'place' in standing: if 'place' not in data['teams'][standing['team']['_id']]:", "= create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables) else: print('Unsupported bracket", "match['isTie']: match_line += '|winner=0\\n' elif 'winner' in match['top'] and match['top']['winner']:", "4th round and in a 16 team DE the 4th", "match['top']: bracket += bracket_indicator + 'score=' + str(match['top']['score']) + '", "= '{{MatchMaps\\n' match_line += '|date=\\n' if 'teamID' in match['top']: team_top", "+= '{{prize pool slot |place=5-8 |usdprize=0\\n' prize_pool += '|tbd |lastvs1=", "ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki", "ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id", "ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki = 'World_Cup' twin_suns_tourny_id", "f.write('==About==\\n') f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n')", "vs R2W5 and R2D2 vs R2W6 # Might want to", "+= 'W' else: match_line += 'FF' else: match_line += '|games1='", "f.write(teams) f.write('==Results==\\n') for stage in event_data.tournament_data['stages']: if stage['bracket']['type'] == 'swiss':", "|cflag=\\n' # teams_table += '|qualifier=\\n' teams_table += '}}\\n' teams_ordered +=", "in match['bottom'] and match['bottom']['winner']: match_line += '|winner=2\\n' else: match_line +=", "+= '{{MatchListStart|width=450px|title=Round ' + str(i) + ' Matches|matchsection=Round ' \\", "# 'count': -1}, ] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings()", "as f: display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\\n' f.write(display)", "in range(1, len(rounds) + 1): if i == 1: swiss_match_table", "defined by match['next'] # Not exactly sure how to address", "create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif stage['bracket']['type'] == 'roundrobin': f.write('===' +", "'literal=BYE ' if 'score' in match['bottom']: bracket += bracket_indicator +", "'\\n' sidebar += '|format=' + '\\n' sidebar += '|patch=' +", "match_line = create_match_maps(match, teams, bw_teams) match_table += match_line tables +=", "+= '|instagram=' + '\\n' sidebar += '|discord=' + '\\n' sidebar", "= sorted(teams, key=itemgetter(2, 4, 0)) else: teams = sorted(teams, key=itemgetter(4,", "match_line: continue try: rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line)", "# {'tab_name': 'Top 32', # 'count': 32}, # {'tab_name': 'Other", "event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki') with", "f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize", "is # called the 4th round and in a 16", "+ '\\n' except KeyError: sidebar += '|edate=\\n' sidebar += '|web='", "' ' if 'winner' in match['top'] and match['top']['winner']: bracket +=", "this will need to get updated for non SE16 templates", "'|discord=' + '\\n' sidebar += '|map1=' + '\\n' sidebar +=", "except KeyError: rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line) for i in range(1,", "sidebar += '|patch=' + '\\n' sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'],", "prize_pool += '{{prize pool slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1=", "key=itemgetter(2, 4, 0)) else: teams = sorted(teams, key=itemgetter(4, 0)) return", "+= '|winner=1\\n' elif 'winner' in match['bottom'] and match['bottom']['winner']: match_line +=", "f.write('====Swiss Match Results====\\n') swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams) f.write(swiss_matches) elif", "== 1: tables += '{{box|start|padding=2em}}\\n' else: tables += '{{box|break|padding=2em}}\\n' tables", "'SETeamBracket\\n' elif stage['bracket']['style'] == 'double': bracket = '{{' + str(stage['bracket']['teamsCount'])", "'p' + str(idx + 1) if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid: player['userID']", "rounds: event_format += '** ' + str(rounds) + '-round '", "in enumerate(teams): if dynamic: if team_num == dynamic[dynamic_idx]['count']: teams_ordered +=", "'|games1=' if match['top']['winner']: match_line += 'W' else: match_line += 'FF'", "+ (1 - 1 / data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] = standing['place']", "bracket = '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif stage['bracket']['style'] ==", "tables += '{{box|start|padding=2em}}\\n' else: tables += '{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group", "datetime import datetime from operator import itemgetter from pathlib import", "'FF' else: match_line += '|games1=' + str(match['top']['score']) match_line += '|games2='", "swiss_table += '}}\\n' return swiss_table def create_swiss_matches(matches, teams, bw_teams): swiss_match_table", "https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType'] == 'winner': # round_max_win_match_count[match['roundNumber']]", "= create_match_maps(match, teams, bw_teams) match_table += match_line tables += match_table", "keep track of match['next'] and build up the D and", "\\ standing['place'] + (1 - 1 / data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place']", "tiebreaker for when multiple teams have the same place at", "match_line += '|walkover=1' match_line += '|games1=' if match['top']['winner']: match_line +=", "swiss_table += '|team' + str(rank + 1) + '=' +", "def create_event_format(data): event_format = '' for stage in data['stages']: event_format", "dynamic: footer += '}}\\n' return header + teams_ordered + footer", "+ str(rank + 1) + '=' + dropped_style + ''", "* round_max_loss_match_count[match['roundNumber'] - 1] \\ + (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] -", "teams def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True): header = '{{TeamCardToggleButton}}\\n'", "SE16 templates # In DE brackets D means the team", "rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line) for i in", "str(dynamic_idx+1) + '=' + '\\n' header += '{{TeamCard columns start|cols=5|height=250}}\\n'", "round is called the 6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc", "wiki_name): sidebar = '{{Infobox league' + '\\n' sidebar += '|liquipediatier='", "str(dynamic_idx+1) + '=' + '\\n' teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n'", "data['teams']: if 'place' in data['teams'][team_id]: place = data['teams'][team_id]['place'] else: place", "team[1]) teams_table += '|team=' + team_info['name'] + '\\n' teams_table +=", "+= '{{prize pool slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n'", "f: display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\\n' f.write(display) sidebar", "+ '\\n' sidebar += '|twitch=' + '\\n' sidebar += '|instagram='", "bracket += '\\n' bracket += '}}\\n' return bracket def create_match_maps(match,", "event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename = Path.joinpath(event_path, event_data.tournament_data['name'] +", "= calcup_roster_tracking.eventid_to_missing_userid[player['_id']] player_info = bw_players.get_player_info(player['userID'], player['inGameName']) teams_table += '|' +", "+= 1 teams_ordered += '|name' + str(dynamic_idx + 1) +", "'BYE') if 'teamID' in match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif", "bracket type of: ' + stage['bracket']['type']) if __name__ == '__main__':", "May also just need to keep track of match['next'] and", "'count': 16}, # {'tab_name': 'Top 32', # 'count': 32}, #", "'\\n' sidebar += '|instagram=' + '\\n' sidebar += '|discord=' +", "+ event_data.tournament_data['name'] + '}}\\n' f.write(display) sidebar = create_sidebar(event_data.tournament_data, wiki_name) f.write(sidebar)", "if dynamic: footer += '}}\\n' return header + teams_ordered +", "== 'roundrobin': f.write('===' + stage['name'] + '===\\n') round_robin_tables = create_round_robin_tables(stage,", "+= '|organizer-link=' + '\\n' sidebar += '|sponsor=' + '\\n' sidebar", "main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1'", "+ 1) + \"=\" \\ + team_info['teamteamplate'] + '\\n' group_header", "'winner' in match['bottom'] and match['bottom']['winner']: match_line += '|winner=2\\n' else: match_line", "'|pbg' + str(pos + 1) + '=down' for standing in", "'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id", "'|name' + str(dynamic_idx + 1) + '=' + dynamic[dynamic_idx]['tab_name'] +", "1) + \"=\" \\ + team_info['teamteamplate'] + '\\n' group_header +=", "bw_teams) f.write(swiss_table) f.write('====Swiss Match Results====\\n') swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams)", "= 'D' bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \\", "stage['bracket']['seriesStyle'] + str(numGames) + '\\n' rounds = 1 numGames =", "bracket += bracket_indicator + 'score=' + str(match['bottom']['score']) + ' '", "in data['stages']: event_format += '* ' + stage['name'] + '\\n'", "else: bracket_type = 'D' else: bracket_type = 'D' bracket_indicator =", "+ 'literal=BYE ' if 'score' in match['top']: bracket += bracket_indicator", "bracket_type = 'D' else: bracket_type = 'D' bracket_indicator = '|R'", "|lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '}}\\n' prize_pool += '{{prize pool", "in enumerate(data['teams'][team[0]]['players']): player_tag = 'p' + str(idx + 1) if", "event_format += '* ' + stage['name'] + '\\n' if stage['bracket']['type']", "+ dynamic[dynamic_idx]['tab_name'] + '\\n' header += '|This=1\\n' header += '|content'", "+ '\\n' sidebar += '|twitter=' + '\\n' sidebar += '|twitch='", "'=' + '\\n' teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' else: if", "'|twitch= |youtube=\\n' match_line += '|vod=\\n' match_line += '}}\\n' match_line +=", "ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b' ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor' ccs_spring_major_id = '6061b764f68d8733c8455fcf' ccs_spring_major_wiki", "start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3], team[1]) teams_table +=", "+= '|games2=' + str(match['bottom']['score']) + '\\n' match_line += '|details={{BracketMatchSummary\\n' match_line", "first round to D and then future bracket type is", "sidebar += '|image=' + '\\n' sidebar += '|icon=' + '\\n'", "pool slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool +=", "+ '=' + '\\n' teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' else:", "swiss_table += '|temp_tie' + str(rank+1) + '=' + \"{:7.3f}\".format(record['opponentsMatchWinPercentage']) +", "team DE the 4th winners bracket round is called the", "sidebar += '|twitter=' + '\\n' sidebar += '|twitch=' + '\\n'", "' team_previous_round[match['top']['teamID']] = True else: team_previous_round[match['top']['teamID']] = False bracket +=", "|lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=3-4 |usdprize=0\\n'", "TODO: this will need to get updated for non SE16", "'{{MatchListEnd}}\\n' tables += '{{box|end}}\\n' return tables def create_prize_pool(prize): prize_pool =", "'{{TeamCard columns end}}\\n' dynamic_idx += 1 teams_ordered += '|name' +", "1) + '=' + dropped_style + '' else: swiss_table +=", "+= bracket_indicator + 'score=' + str(match['bottom']['score']) + ' ' if", "+= '|content' + str(dynamic_idx+1) + '=' + '\\n' header +=", "' + group['name'] + '}}====\\n' tables += '{{GroupTableLeague|title=Group ' +", "'-round ' + stage['bracket']['type'] + '\\n' elif stage['bracket']['type'] == \"elimination\":", "sidebar += '|map5=' + '\\n' sidebar += '|team_number=' + str(len(data['teams']))", "pos, standing_id in enumerate(group['standingIDs']): group_header += '|pbg' + str(pos +", "16}, # {'tab_name': 'Top 32', # 'count': 32}, # {'tab_name':", "'w+', newline='\\n', encoding='utf-8') as f: display = '{{DISPLAYTITLE:' + event_data.tournament_data['name']", "0 if dynamic: header += '{{tabs dynamic\\n' header += '|name'", "standing_id in enumerate(group['standingIDs']): group_header += '|pbg' + str(pos + 1)", "+ '\\n' swiss_table += '}}\\n' return swiss_table def create_swiss_matches(matches, teams,", "+= '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' + data['slug'] + '/'", "+= '{{prize pool slot |place=3-4 |usdprize=0\\n' prize_pool += '|tbd |lastvs1=", "bracket round is called the 6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc #", "match['top']['winner']: bracket += bracket_indicator + 'win=1 ' team_previous_round[match['top']['teamID']] = True", "vs R2W6 # Might want to key off match['inConsolationBracket'] #", "sure how to address round_team_number, in a 8 team DE", "+ '\\n' prize_pool += '{{prize pool start}}\\n' prize_pool += '{{prize", "not match['isComplete']: return match_line match_line = '{{MatchMaps\\n' match_line += '|date=\\n'", "+= '|next=' + '\\n' sidebar += '}}\\n' sidebar += '{{Upcoming", "include_matches: match_table = '{{MatchListStart|title=Group ' + group['name'] + ' Matches|width=450px|hide=true}}\\n'", "break_ties=False): for stage in data['stages']: for place, standing in enumerate(stage['standings']):", "match swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n' return swiss_match_table def", "+= '{{box|break|padding=2em}}\\n' tables += '===={{HiddenSort|Group ' + group['name'] + '}}====\\n'", "the team dropped down from the previous round # In", "'' for pos, standing_id in enumerate(group['standingIDs']): group_header += '|pbg' +", "tables += '{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\\n' tables +=", "] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings() bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id)", "= bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place, data['teams'][team_id]['persistentTeamID'], team_info['name'] )) if", "stage['standings']: if standing_id == standing['_id']: # if standing['disqualified']: # has_drop", "= [1] * (len(stage['bracket']['series']) + 1) round_max_win_match_count[0] = 0 round_max_loss_match_count", "= 'D' if match['matchType'] == 'winner': round_match_offset = -2 *", "swiss_match_table += '{{box|end}}\\n' return swiss_match_table def create_elim_bracket(stage, teams, bw_teams): if", "= create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss Match Results====\\n') swiss_matches = create_swiss_matches(stage['matches'],", "round_max_loss_match_count[match['roundNumber']]) if not 'teamID' in match['top']: continue if match['top']['teamID'] in", "+ str(stage['bracket']['roundsCount']) + '|diff=false\\n' for i in range(stage['bracket']['teamsCount']): swiss_table +=", "match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']: team_top = bw_teams.get_team_info('0',", "= create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif stage['bracket']['type'] == 'roundrobin': f.write('==='", "+= bracket_indicator + 'team=' + team_name + ' ' else:", "+= '|content' + str(dynamic_idx+1) + '=' + '\\n' teams_ordered +=", "So there are rounds where D vs L happen such", "' |' + player_tag + 'link=' + player_info['link'] teams_table +=", "= 'D' else: bracket_type = 'D' if match['matchType'] == 'winner':", "match_line += '|games2=' + str(match['bottom']['score']) + '\\n' match_line += '|details={{BracketMatchSummary\\n'", "sidebar += '|web=' + '\\n' sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug']", "+= '|sponsor=' + '\\n' sidebar += '|localcurrency=' + '\\n' sidebar", "'W' else: bracket_type = 'D' else: bracket_type = 'D' else:", "'\\n' group_header += '|tiebreaker1=series\\n' tables += group_header tables += group_table", "bracket_indicator + 'score=' + str(match['top']['score']) + ' ' if 'winner'", "== \"swiss\": event_format += '** ' + str(stage['bracket']['roundsCount']) + '-round", "match_line = create_match_maps(match, teams, bw_teams) if not match_line: continue try:", "' + str(rounds) + '-round ' \\ + stage['bracket']['seriesStyle'] +", "sidebar += '|platform=' + data['platform'] + '\\n' sidebar += '|country='", "else: bracket_type = 'D' else: bracket_type = 'D' else: bracket_type", "+ data['organization']['slug'] + '/' + data['slug'] + '/' \\ +", "' Matches|matchsection=Round ' \\ + str(i) + '|hide=false}}\\n' for match", "stage['bracket']['style']) return # todo handle double elimination brackets # set", "'teamID' in match['top']: continue if match['top']['teamID'] in team_previous_round: if team_previous_round[match['top']['teamID']]:", "rounds = 1 numGames = match['numGames'] else: rounds += 1", "|place=3-4 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool +=", "bw_teams): swiss_match_table = '' rounds = dict() for match in", "' + str(i) + '}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round ' +", "+ '\\n' sidebar += '|name=' + data['name'] + '\\n' sidebar", "elif 'winner' in match['top'] and match['top']['winner']: match_line += '|winner=1\\n' elif", "vs L happen such as R2D1 vs R2W5 and R2D2", "str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type'] + '\\n' elif stage['bracket']['type']", "\"=\" \\ + team_info['teamteamplate'] + '\\n' group_header += '|tiebreaker1=series\\n' tables", "= 0 # matches = sorted(stage['matches'], key=itemgetter('matchNumber')) matches = stage['matches']", "+ str(dynamic_idx + 1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n'", "'\\n' teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' else: if team_num ==", "'/bracket-list' + '\\n' sidebar += '|rulebook=' + '\\n' sidebar +=", "str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n' else: print('Unknown stage style: ' + stage['bracket']['style'])", "TEAMS event_data.reduce_teams() event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename = Path.joinpath(event_path,", "+ '\\n' sidebar += '|map3=' + '\\n' sidebar += '|map4='", "return swiss_match_table def create_elim_bracket(stage, teams, bw_teams): if stage['bracket']['style'] == 'single':", "+ '}}====\\n' tables += '{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\\n'", "'|winner=0\\n' elif 'winner' in match['top'] and match['top']['winner']: match_line += '|winner=1\\n'", "'\\n' sidebar += '|sponsor=' + '\\n' sidebar += '|localcurrency=' +", "bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' + team_name +", "= stage['matches'] for match in matches: # TODO: this will", "to D and then future bracket type is defined by", "exist_ok=True) filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki') with open(filename, 'w+',", "team_info['image'] + '\\n' for idx, player in enumerate(data['teams'][team[0]]['players']): player_tag =", "Matches|width=450px|hide=true}}\\n' for match in group['matches']: match_line = create_match_maps(match, teams, bw_teams)", "in data['teams']: if 'place' in data['teams'][team_id]: place = data['teams'][team_id]['place'] else:", "prize_pool += '{{Prize pool end}}\\n' return prize_pool def main(): ccs_winter_minor_id", "str(rounds) + '-round ' \\ + stage['bracket']['seriesStyle'] + str(numGames) +", "+ 1): if i == 1: swiss_match_table += '{{box|start|padding=2em}}\\n' else:", "match_line += 'W' else: match_line += 'FF' match_line += '|games2='", "data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place teams = list() for team_id", "+ ' Matches|width=450px|hide=true}}\\n' for match in group['matches']: match_line = create_match_maps(match,", "= sorted(teams, key=itemgetter(4, 0)) return teams def create_participants(data, bw_players, bw_teams,", "+ str(match['top']['score']) match_line += '|games2=' + str(match['bottom']['score']) + '\\n' match_line", "= battlefy_wiki_linkings.BattlefyWikiPlayerLinkings() event_data = battlefy_data.BattlefyData(tournament_id) event_data.load_tournament_data() # FORCE REDUCE TEAMS", "team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']: team_top = bw_teams.get_team_info('0', 'BYE')", "'\\n' teams_ordered += '|content' + str(dynamic_idx+1) + '=' + '\\n'", "+ player_tag + '=' + player_info['name'] \\ + ' |'", "+ str(match['matchNumber'] * 2 + round_match_offset) if 'teamID' in match['bottom']:", "in a 8 team DE the third winners bracket round", "teams, bw_teams) match_table += match_line tables += match_table tables +=", "sidebar += '|prizepool=' + data['prizes'] + '\\n' sidebar += '|type=Online'", "in matches: match_line = create_match_maps(match, teams, bw_teams) if not match_line:", "'D' else: bracket_type = 'D' else: bracket_type = 'D' bracket_indicator", "the end teams = rank_teams(data, bw_teams, sort_place) dynamic_idx = 0", "gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id wiki_name", "max(match['matchNumber'], round_max_win_match_count[match['roundNumber']]) elif match['matchType'] == 'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']])", "team_bot = bw_teams.get_team_info('0', 'BYE') match_line += '|team1=' + team_top['teamteamplate'] match_line", "= list() rounds[str(match['roundNumber'])].append(match_line) for i in range(1, len(rounds) + 1):", "def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True): header = '{{TeamCardToggleButton}}\\n' teams_ordered", "'=' + dynamic[dynamic_idx]['tab_name'] + '\\n' header += '|This=1\\n' header +=", "start}}\\n' prize_pool += '{{prize pool slot |place=1 |usdprize=0 |tbd |lastvs1=", "+ dropped_style + '' else: swiss_table += '|bg' + str(rank", "DE brackest W means the team won the previous round", "prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '|tbd |lastvs3=", "match_line += '|date=|finished=true\\n' match_line += '|twitch= |youtube=\\n' match_line += '|vod=\\n'", "event_data.load_tournament_data() # FORCE REDUCE TEAMS event_data.reduce_teams() event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True,", "prize_pool def main(): ccs_winter_minor_id = '5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id", "'win=2 ' team_previous_round[match['bottom']['teamID']] = True elif 'teamID' in match['bottom']: team_previous_round[match['bottom']['teamID']]", "multiple teams have the same place at the end teams", "+ 'DETeamBracket\\n' else: print('Unknown stage style: ' + stage['bracket']['style']) return", "str(pos + 1) + '=down|team' + str(pos + 1) +", "dynamic_idx += 1 teams_ordered += '|name' + str(dynamic_idx + 1)", "match_line += 'W' else: match_line += 'FF' else: match_line +=", "idx == 1: tables += '{{box|start|padding=2em}}\\n' else: tables += '{{box|break|padding=2em}}\\n'", "record['team']['name']) swiss_table += '|team' + str(rank + 1) + '='", "'|tickername=' + data['name'] + '\\n' sidebar += '|image=' + '\\n'", "= 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major' ccs_championship_id =", "sidebar += '|organizer=' + data['organization']['name'] + '\\n' sidebar += '|organizer-link='", "'place' in standing: if 'place' not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] =", "Not exactly sure how to address round_team_number, in a 8", "idx, group in enumerate(stage['groups']): if idx == 1: tables +=", "'60dd319012cb9c33c2f63868' ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor'", "+= '|map2=' + '\\n' sidebar += '|map3=' + '\\n' sidebar", "place = 0 team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name']) teams.append((team_id, data['teams'][team_id]['name'], place,", "in match['bottom'] and match['bottom']['winner']: match_line += 'W' else: match_line +=", "'=' + player_info['name'] \\ + ' |' + player_tag +", "'%Y-%m-%d') + '\\n' try: sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime(", "teams, bw_teams): if stage['bracket']['style'] == 'single': bracket = '{{' +", "+ footer def create_swiss_table(stage, bw_teams): dropped_style = 'drop' swiss_table =", "stage['bracket']['style'] == 'double': bracket = '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\\n'", "round to D and then future bracket type is defined", "else: match_line += 'FF' else: match_line += '|games1=' + str(match['top']['score'])", "in match['top']: team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator +", "'score=' + str(match['top']['score']) + ' ' if 'winner' in match['top']", "else: swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round ' + str(i)", "= 'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id =", "future bracket type is defined by match['next'] # Not exactly", "at the end teams = rank_teams(data, bw_teams, sort_place) dynamic_idx =", "{'tab_name': 'Top 16', # 'count': 16}, # {'tab_name': 'Top 32',", "that way instead # Default first round to D and", "create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n') f.write('===Prize Pool===\\n') prize_pool = create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool)", "Use prior rounds as a tiebreaker for when multiple teams", "1) + '=down|team' + str(pos + 1) + \"=\" \\", "stage['bracket']['series']: if match['numGames'] != numGames: if rounds: event_format += '**", "todo handle double elimination brackets # set up team number", "D and then future bracket type is defined by match['next']", "import itemgetter from pathlib import Path import calcup_roster_tracking def create_sidebar(data,", "event_data.reduce_teams() event_path = event_data.get_tournament_data_path() event_path.mkdir(parents=True, exist_ok=True) filename = Path.joinpath(event_path, event_data.tournament_data['name']", "group['name'] + ' Matches|width=450px|hide=true}}\\n' for match in group['matches']: match_line =", "|lastvs2= |lastscore2= |lastvsscore2=\\n' prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool", "2 - 1 + round_match_offset) if 'teamID' in match['top']: team_name", "\\ + (round_max_win_match_count[match['roundNumber']] - round_max_win_match_count[match['roundNumber'] - 1]) * 2 #", "# round_max_loss_match_count[match['roundNumber']]) if not 'teamID' in match['top']: continue if match['top']['teamID']", "+ '\\n' sidebar += '|shortname=' + data['name'] + '\\n' sidebar", "+ '\\n' try: sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d')", "group_header tables += group_table tables += \"}}\\n\" if include_matches: match_table", "+= ' |' + player_tag + 'link=' + player_info['link'] teams_table", "to get updated for non SE16 templates # In DE", "== dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard columns end}}\\n' dynamic_idx += 1", "else: teams = sorted(teams, key=itemgetter(4, 0)) return teams def create_participants(data,", "= 0 rounds = 0 for match in stage['bracket']['series']: if", "'\\n' match_line += '|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n' match_line += '|twitch=", "+= '|tiebreaker1=series\\n' tables += group_header tables += group_table tables +=", "+ 1) round_max_win_match_count[0] = 0 round_max_loss_match_count = [1] * (len(stage['bracket']['series'])", "+ '\\n' elif stage['bracket']['type'] == \"elimination\": numGames = 0 rounds", "stage['bracket']['style'] == 'single': bracket = '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n'", "'{{MatchListStart|title=Group ' + group['name'] + ' Matches|width=450px|hide=true}}\\n' for match in", "+ '\\n' sidebar += '|icon=' + '\\n' sidebar += '|series='", "+= '|name=' + data['name'] + '\\n' sidebar += '|shortname=' +", "+= '\\n' # teams_table += '|c= |cflag=\\n' # teams_table +=", "sidebar += '|organizer-link=' + '\\n' sidebar += '|sponsor=' + '\\n'", "'|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\\n'", "+= '\\n' for rank, record in enumerate(stage['standings']): if record['disqualified']: swiss_table", "data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place else: if break_ties: data['teams'][standing['team']['_id']]['place']", "+= '|team1=' + team_top['teamteamplate'] match_line += '|team2=' + team_bot['teamteamplate'] if", "+ '\\n' sidebar += '|tickername=' + data['name'] + '\\n' sidebar", "teams[match['top']['teamID']]['name']) elif match['isBye']: team_top = bw_teams.get_team_info('0', 'BYE') if 'teamID' in", "round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] - 1] \\ + (round_max_win_match_count[match['roundNumber']]", "bw_teams): match_line = '' if not match['isComplete']: return match_line match_line", "sidebar += '|twitch=' + '\\n' sidebar += '|instagram=' + '\\n'", "# https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if match['matchType'] == 'winner': # round_max_win_match_count[match['roundNumber']] =", "+= 'FF' match_line += '|games2=' if 'winner' in match['bottom'] and", "+ ' Matches|matchsection=Round ' \\ + str(i) + '|hide=false}}\\n' for", "return teams def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True): header =", "pool slot |place=5-8 |usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n'", "+ ' ' if 'winner' in match['bottom'] and match['bottom']['winner']: bracket", "from operator import itemgetter from pathlib import Path import calcup_roster_tracking", "+ '\\n' sidebar += '|localcurrency=' + '\\n' sidebar += '|prizepool='", "teams_table footer = '{{TeamCard columns end}}\\n' if dynamic: footer +=", "swiss_table += '\\n' if '\\n' not in swiss_table[-1]: swiss_table +=", "Matches|matchsection=Round ' \\ + str(i) + '|hide=false}}\\n' for match in", "the D and W that way instead # Default first", "+ '\\n' sidebar += '|sponsor=' + '\\n' sidebar += '|localcurrency='", "+ team_bot['teamteamplate'] if 'isTie' in match and match['isTie']: match_line +=", "non SE16 templates # In DE brackets D means the", "try: rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line) for i", "len(stage['standings']) + place else: if break_ties: data['teams'][standing['team']['_id']]['place'] = \\ standing['place']", "tables += group_header tables += group_table tables += \"}}\\n\" if", "'===={{HiddenSort|Group ' + group['name'] + '}}====\\n' tables += '{{GroupTableLeague|title=Group '", "if 'winner' in match['bottom'] and match['bottom']['winner']: match_line += 'W' else:", "data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place else: if break_ties: data['teams'][standing['team']['_id']]['place'] =", "tables += '{{box|end}}\\n' return tables def create_prize_pool(prize): prize_pool = prize", "break_ties: data['teams'][standing['team']['_id']]['place'] = \\ standing['place'] + (1 - 1 /", "elif match['matchType'] == 'loser': # round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_loss_match_count[match['roundNumber']])", "= True team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table += '|bg' +", "swiss_match_table = '' rounds = dict() for match in matches:", "'\\n' sidebar += '|type=Online' + '\\n' sidebar += '|platform=' +", "= '{{MatchListStart|title=Group ' + group['name'] + ' Matches|width=450px|hide=true}}\\n' for match", "+= group_table tables += \"}}\\n\" if include_matches: match_table = '{{MatchListStart|title=Group", "'teamID' in match['top']: team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'], teams[match['top']['teamID']]['name']) elif match['isBye']: team_top", "the 6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc #", "record in enumerate(stage['standings']): if record['disqualified']: swiss_table += '|bg' + str(rank", "= '' # Use prior rounds as a tiebreaker for", "+ 'link=' + player_info['link'] teams_table += '\\n' # teams_table +=", "standing['place'] else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place teams = list()", "team_bot['teamteamplate'] if 'isTie' in match and match['isTie']: match_line += '|winner=0\\n'", "+= '|name' + str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\\n'", "standing['place'] + (1 - 1 / data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] =", "str(i + 1) + '=down' if (i + 1) %", "else: round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] - 1] \\ +", "elif stage['bracket']['style'] == 'double': bracket = '{{' + str(stage['bracket']['teamsCount']) +", "match['bottom']: if match['bottom']['teamID'] in team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type = 'W'", "+ '\\n' sidebar += '|map2=' + '\\n' sidebar += '|map3='", "'}}=====\\n' swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i) + ' Matches|matchsection=Round", "'\\n' prize_pool += '{{prize pool start}}\\n' prize_pool += '{{prize pool", "+ team_top['teamteamplate'] match_line += '|team2=' + team_bot['teamteamplate'] if 'isTie' in", "'\\n' rounds = 1 numGames = match['numGames'] else: rounds +=", "False bracket += '\\n' bracket += '}}\\n' return bracket def", "- 1 + round_match_offset) if 'teamID' in match['top']: team_name =", "event_data.tournament_data['teams'], bw_teams) f.write(bracket) elif stage['bracket']['type'] == 'roundrobin': f.write('===' + stage['name']", "if 'score' in match['bottom']: bracket += bracket_indicator + 'score=' +", "sidebar += '|tickername=' + data['name'] + '\\n' sidebar += '|image='", "enumerate(stage['groups']): if idx == 1: tables += '{{box|start|padding=2em}}\\n' else: tables", "+ group['name'] + '}}====\\n' tables += '{{GroupTableLeague|title=Group ' + group['name']", "also just need to keep track of match['next'] and build", "- round_max_win_match_count[match['roundNumber'] - 1]) * 2 # Increment for next", "+ '=down' if (i + 1) % 8 == 0:", "+ '\\n' group_header += '|tiebreaker1=series\\n' tables += group_header tables +=", "+= '}}\\n' prize_pool += '{{prize pool slot |place=5-8 |usdprize=0\\n' prize_pool", "in range(stage['bracket']['teamsCount']): swiss_table += '|pbg' + str(i + 1) +", "'{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3], team[1]) teams_table += '|team=' + team_info['name']", "\\ + team_info['teamteamplate'] + '\\n' group_header += '|tiebreaker1=series\\n' tables +=", "+ dynamic[dynamic_idx]['tab_name'] + '\\n' teams_ordered += '|content' + str(dynamic_idx+1) +", "footer = '{{TeamCard columns end}}\\n' if dynamic: footer += '}}\\n'", "+ '===\\n') round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables)", "'' else: swiss_table += '|bg' + str(rank + 1) +", "+ str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif stage['bracket']['style'] == 'double': bracket =", "'teamID' in match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator", "'score=' + str(match['bottom']['score']) + ' ' if 'winner' in match['bottom']", "else: data['teams'][standing['team']['_id']]['place'] = standing['place'] else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place", "'{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\\n' elif stage['bracket']['style'] == 'double': bracket", "+= '|format=' + '\\n' sidebar += '|patch=' + '\\n' sidebar", "to address round_team_number, in a 8 team DE the third", "if i == 1: swiss_match_table += '{{box|start|padding=2em}}\\n' else: swiss_match_table +=", "|usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n' prize_pool += '{{prize pool slot", "def create_match_maps(match, teams, bw_teams): match_line = '' if not match['isComplete']:", "R2D2 vs R2W6 # Might want to key off match['inConsolationBracket']", "sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' + data['slug'] +", "up round-match count trackers round_max_win_match_count = [1] * (len(stage['bracket']['series']) +", "+= bracket_indicator + 'score=' + str(match['top']['score']) + ' ' if", "swiss_table = create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss Match Results====\\n') swiss_matches =", "player_tag + 'link=' + player_info['link'] teams_table += '\\n' # teams_table", "dict() for match in matches: match_line = create_match_maps(match, teams, bw_teams)", "round_max_loss_match_count[0] = 0 # matches = sorted(stage['matches'], key=itemgetter('matchNumber')) matches =", "- 1]) * 2 # Increment for next time if", "prize_pool += '}}\\n' prize_pool += '{{prize pool slot |place=5-8 |usdprize=0\\n'", "swiss_table += '|pbg' + str(i + 1) + '=down' if", "swiss_match_table += '{{box|break|padding=2em}}\\n' swiss_match_table += '====={{HiddenSort|Round ' + str(i) +", "'|web=' + '\\n' sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/'", "create_elim_bracket(stage, teams, bw_teams): if stage['bracket']['style'] == 'single': bracket = '{{'", "team_num == dynamic[dynamic_idx]['count']: teams_ordered += '{{TeamCard columns end}}\\n' dynamic_idx +=", "create_match_maps(match, teams, bw_teams): match_line = '' if not match['isComplete']: return", "D means the team dropped down from the previous round", "team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'], teams[standing['team']['_id']]['name']) group_table += '|bg' + str(pos +", "'60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id = '61314505635fe17a14eafe03' ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major'", "swiss_table def create_swiss_matches(matches, teams, bw_teams): swiss_match_table = '' rounds =", "len(stage['standings']) + place teams = list() for team_id in data['teams']:", "+= '|games1=' + str(match['top']['score']) match_line += '|games2=' + str(match['bottom']['score']) +", "= bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' + team_name", "and W that way instead # Default first round to", "matches: match_line = create_match_maps(match, teams, bw_teams) if not match_line: continue", "+ '|diff=false\\n' for i in range(stage['bracket']['teamsCount']): swiss_table += '|pbg' +", "= '5ff3354193edb53839d44d55' ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor' ccs_winter_major_id = '60019f8ebcc5ed46373408a1' ccs_winter_major_wiki =", "'{{TeamCardToggleButton}}\\n' teams_ordered = '' # Use prior rounds as a", "+ data['prizes'] + '\\n' sidebar += '|type=Online' + '\\n' sidebar", "have the same place at the end teams = rank_teams(data,", "header += '{{tabs dynamic\\n' header += '|name' + str(dynamic_idx+1) +", "+ str(rounds) + '-round ' \\ + stage['bracket']['seriesStyle'] + str(numGames)", "'\\n' swiss_table += '}}\\n' return swiss_table def create_swiss_matches(matches, teams, bw_teams):", "if 'teamID' in match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket +=", "round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) # elif match['matchType'] == 'loser':", "sidebar += '|format=' + '\\n' sidebar += '|patch=' + '\\n'", "str(rank + 1) + '=down' team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table", "'\\n' sidebar += '|shortname=' + data['name'] + '\\n' sidebar +=", "rank_teams(data, bw_teams, sort_place=True, break_ties=False): for stage in data['stages']: for place,", "tables += group_table tables += \"}}\\n\" if include_matches: match_table =", "# called the 4th round and in a 16 team", "+= '|map1=' + '\\n' sidebar += '|map2=' + '\\n' sidebar", "for team_num, team in enumerate(teams): if dynamic: if team_num ==", "when multiple teams have the same place at the end", "if record['disqualified']: swiss_table += '|bg' + str(rank + 1) +", "+ '}}\\n' return sidebar def create_event_format(data): event_format = '' for", "DE the 4th winners bracket round is called the 6th", "= 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor' ccs_summer_major_id =", "team_previous_round: if team_previous_round[match['bottom']['teamID']]: bracket_type = 'W' else: bracket_type = 'D'", "|lastvsscore4=\\n' prize_pool += '}}\\n' prize_pool += '{{Prize pool end}}\\n' return", "'loser': round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R' + str(match['roundNumber'])", "def rank_teams(data, bw_teams, sort_place=True, break_ties=False): for stage in data['stages']: for", "|usdprize=0\\n' prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\\n' prize_pool += '|tbd", "+ '\\n' sidebar += '|previous=' + '\\n' sidebar += '|next='", "'}}\\n' return header + teams_ordered + footer def create_swiss_table(stage, bw_teams):", "+ player_info['link'] teams_table += '\\n' # teams_table += '|c= |cflag=\\n'", "round_max_win_match_count[0] = 0 round_max_loss_match_count = [1] * (len(stage['bracket']['series']) + 1)", "and then future bracket type is defined by match['next'] #", "wiki_name + '\\n' group_header = '' group_table = '' for", "sidebar += '|name=' + data['name'] + '\\n' sidebar += '|shortname='", "+ str(pos + 1) + \"=\" \\ + team_info['teamteamplate'] +", "teams, bw_teams) if not match_line: continue try: rounds[str(match['roundNumber'])].append(match_line) except KeyError:", "+ place else: if break_ties: data['teams'][standing['team']['_id']]['place'] = \\ standing['place'] +", "match_line match_line = '{{MatchMaps\\n' match_line += '|date=\\n' if 'teamID' in", "# round_max_win_match_count[match['roundNumber']]) # elif match['matchType'] == 'loser': # round_max_loss_match_count[match['roundNumber']] =", "= True else: team_previous_round[match['top']['teamID']] = False bracket += '\\n' if", "+ '=' + player_info['name'] \\ + ' |' + player_tag", "wiki_name) f.write(sidebar) f.write('==About==\\n') f.write('===Format===\\n') event_format = create_event_format(event_data.tournament_data) f.write(event_format) f.write('===Broadcast Talent===\\n')", "ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e' world_cup_wiki", "' |' + player_tag + 'flag=' + player_info['flag'] if player_info['link']:", "* (len(stage['bracket']['series']) + 1) round_max_win_match_count[0] = 0 round_max_loss_match_count = [1]", "match['bottom']: team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name']) elif match['isBye']: team_bot = bw_teams.get_team_info('0',", "and match['top']['winner']: bracket += bracket_indicator + 'win=1 ' team_previous_round[match['top']['teamID']] =", "for stage in event_data.tournament_data['stages']: if stage['bracket']['type'] == 'swiss': f.write('===Swiss Stage===\\n')", "bw_teams) if not match_line: continue try: rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])]", "+ 'win=2 ' team_previous_round[match['bottom']['teamID']] = True elif 'teamID' in match['bottom']:", "+= '{{prize pool slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\\n'", "'===\\n') round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables) else:", "' + group['name'] + ' Matches|width=450px|hide=true}}\\n' for match in group['matches']:", "= dict() for match in matches: match_line = create_match_maps(match, teams,", "stage in data['stages']: event_format += '* ' + stage['name'] +", "if break_ties: data['teams'][standing['team']['_id']]['place'] = \\ standing['place'] + (1 - 1", "'winner': round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] - 1] else: round_match_offset", "|lastvsscore1=}}\\n' prize_pool += '{{prize pool slot |place=3-4 |usdprize=0\\n' prize_pool +=", "teams_ordered + footer def create_swiss_table(stage, bw_teams): dropped_style = 'drop' swiss_table", "return bracket def create_match_maps(match, teams, bw_teams): match_line = '' if", "= max(match['matchNumber'], # round_max_win_match_count[match['roundNumber']]) # elif match['matchType'] == 'loser': #", "sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') + '\\n' except", "# Use prior rounds as a tiebreaker for when multiple", "create_swiss_matches(matches, teams, bw_teams): swiss_match_table = '' rounds = dict() for", "'{{prize pool start}}\\n' prize_pool += '{{prize pool slot |place=1 |usdprize=0", "teams_ordered += teams_table footer = '{{TeamCard columns end}}\\n' if dynamic:", "max(match['matchNumber'], round_max_loss_match_count[match['roundNumber']]) bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \\", "+= '{{tabs dynamic\\n' header += '|name' + str(dynamic_idx+1) + '='", "standing: if 'place' not in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) +", "+= '|bg' + str(pos + 1) + '=down|team' + str(pos", "else: bracket_type = 'D' if match['matchType'] == 'winner': round_match_offset =", "' if 'score' in match['bottom']: bracket += bracket_indicator + 'score='", "return swiss_table def create_swiss_matches(matches, teams, bw_teams): swiss_match_table = '' rounds", "if '\\n' not in swiss_table[-1]: swiss_table += '\\n' for rank,", "player_info['link'] teams_table += '\\n' # teams_table += '|c= |cflag=\\n' #", "+= '\\n' if 'teamID' in match['bottom']: if match['bottom']['teamID'] in team_previous_round:", "'W' else: match_line += 'FF' else: match_line += '|games1=' +", "world_cup_wiki = 'World_Cup' twin_suns_tourny_id = '60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id", "elif stage['bracket']['type'] == 'roundrobin': f.write('===' + stage['name'] + '===\\n') round_robin_tables", "how to address round_team_number, in a 8 team DE the", "for non SE16 templates # In DE brackets D means", "|' + player_tag + 'flag=' + player_info['flag'] if player_info['link']: teams_table", "' else: bracket += bracket_indicator + 'literal=BYE ' if 'score'", "dropped down from the previous round # In DE brackest", "bw_teams, wiki_name, include_matches=True) f.write(round_robin_tables) else: print('Unsupported bracket type of: '", "+ '.wiki') with open(filename, 'w+', newline='\\n', encoding='utf-8') as f: display", "+ 1) + '=down' if (i + 1) % 8", "'\\n' if stage['bracket']['type'] == \"swiss\": event_format += '** ' +", "data['stages']: event_format += '* ' + stage['name'] + '\\n' if", "match['top']['winner']: match_line += '|winner=1\\n' elif 'winner' in match['bottom'] and match['bottom']['winner']:", "prize_pool += '}}\\n' prize_pool += '{{Prize pool end}}\\n' return prize_pool", "create_prize_pool(event_data.tournament_data['prizes']) f.write(prize_pool) f.write('==Participants==\\n') teams = create_participants(event_data.tournament_data, bw_players, bw_teams, dynamic=participant_tabs, sort_place=True)", "operator import itemgetter from pathlib import Path import calcup_roster_tracking def", "trackers team_previous_round = dict() # set up round-match count trackers", "not in swiss_table[-1]: swiss_table += '\\n' for rank, record in", "'\\n' sidebar += '|image=' + '\\n' sidebar += '|icon=' +", "= 'Calrissian_Cup/Summer/Major' ccs_fall_minor_id = '60fa26043ba15d73719669bd' ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor' ccs_fall_major_id =", "= '6061b764f68d8733c8455fcf' ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major' ccs_summer_minor_id = '60b41961d35b1411a7b31d64' ccs_summer_minor_wiki =", "teams_table = '{{TeamCard\\n' team_info = bw_teams.get_team_info(team[3], team[1]) teams_table += '|team='", "teams_table += '|c= |cflag=\\n' # teams_table += '|qualifier=\\n' teams_table +=", "rounds[str(i)]: swiss_match_table += match swiss_match_table += '{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n'", "+ '\\n' sidebar += '|discord=' + '\\n' sidebar += '|map1='", "+ 1) + '=' + team_info['teamteamplate'] swiss_table += '|temp_tie' +", "match['bottom'] and match['bottom']['winner']: match_line += 'W' else: match_line += 'FF'", "'Other Notable Participants', # 'count': -1}, ] bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings()", "+ '\\n' sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime( '%Y-%m-%d') +", "sidebar += '|edate=\\n' sidebar += '|web=' + '\\n' sidebar +=", "+ str(match['bottom']['score']) + '\\n' match_line += '|details={{BracketMatchSummary\\n' match_line += '|date=|finished=true\\n'", "in match['top']: bracket += bracket_indicator + 'score=' + str(match['top']['score']) +", "\\ + data['_id'] + '/bracket-list' + '\\n' sidebar += '|rulebook='", "'|date=|finished=true\\n' match_line += '|twitch= |youtube=\\n' match_line += '|vod=\\n' match_line +=", "'\\n' sidebar += '|map4=' + '\\n' sidebar += '|map5=' +", "match['bottom'] and match['bottom']['winner']: match_line += '|winner=2\\n' else: match_line += '|winner=0\\n'", "continue try: rounds[str(match['roundNumber'])].append(match_line) except KeyError: rounds[str(match['roundNumber'])] = list() rounds[str(match['roundNumber'])].append(match_line) for", "0: teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n' team_info", "'|team2=' + team_bot['teamteamplate'] if 'isTie' in match and match['isTie']: match_line", "if match['top']['teamID'] in team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type = 'W' else:", "sorted(teams, key=itemgetter(2, 4, 0)) else: teams = sorted(teams, key=itemgetter(4, 0))", "teams = sorted(teams, key=itemgetter(4, 0)) return teams def create_participants(data, bw_players,", "teams_ordered += '|content' + str(dynamic_idx+1) + '=' + '\\n' teams_ordered", "6th round # https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc # https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc # if", "teams, bw_teams): swiss_match_table = '' rounds = dict() for match", "Might want to key off match['inConsolationBracket'] # May also just", "+ stage['bracket']['seriesStyle'] + str(numGames) + '\\n' return event_format def rank_teams(data,", "+= teams_table footer = '{{TeamCard columns end}}\\n' if dynamic: footer", "'{{MatchListEnd}}\\n' swiss_match_table += '{{box|end}}\\n' return swiss_match_table def create_elim_bracket(stage, teams, bw_teams):", "+ str(match['matchNumber'] * 2 - 1 + round_match_offset) if 'teamID'", "# 'count': 16}, # {'tab_name': 'Top 32', # 'count': 32},", "off match['inConsolationBracket'] # May also just need to keep track", "+ '|hide=false}}\\n' for match in rounds[str(i)]: swiss_match_table += match swiss_match_table", "enumerate(group['standingIDs']): group_header += '|pbg' + str(pos + 1) + '=down'", "+ '\\n' rounds = 1 numGames = match['numGames'] else: rounds", "= Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki') with open(filename, 'w+', newline='\\n', encoding='utf-8')", "f.write(round_robin_tables) else: print('Unsupported bracket type of: ' + stage['bracket']['type']) if", "and match['isTie']: match_line += '|winner=0\\n' elif 'winner' in match['top'] and", "+ bracket_type \\ + str(match['matchNumber'] * 2 + round_match_offset) if", "'|tbd |lastvs3= |lastscore3= |lastvsscore3=\\n' prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\\n'", "team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name']) swiss_table += '|team' + str(rank +", "data['name'] + '\\n' sidebar += '|image=' + '\\n' sidebar +=", "' + stage['bracket']['type'] + '\\n' elif stage['bracket']['type'] == \"elimination\": numGames", "# set up round-match count trackers round_max_win_match_count = [1] *", "match['bottom']['winner']: match_line += '|winner=2\\n' else: match_line += '|winner=0\\n' if match['isBye']:", "+ '\\n' header += '|This=1\\n' header += '|content' + str(dynamic_idx+1)", "+ team_info['image'] + '\\n' for idx, player in enumerate(data['teams'][team[0]]['players']): player_tag", "+ \"=\" \\ + team_info['teamteamplate'] + '\\n' group_header += '|tiebreaker1=series\\n'", "stage in event_data.tournament_data['stages']: if stage['bracket']['type'] == 'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss", "as a tiebreaker for when multiple teams have the same", "'Global_Squadrons_League/2021/Season_1' tournament_id = world_cup_id wiki_name = world_cup_wiki participant_tabs = [", "data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] = standing['place'] else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) +", "' if 'winner' in match['bottom'] and match['bottom']['winner']: bracket += bracket_indicator", "exactly sure how to address round_team_number, in a 8 team", "+ str(i + 1) + '=down' if (i + 1)", "+= '}}\\n' sidebar += '{{Upcoming matches tournament|' + wiki_name +", "sidebar += '|type=Online' + '\\n' sidebar += '|platform=' + data['platform']", "= '|R' + str(match['roundNumber']) + bracket_type \\ + str(match['matchNumber'] *", "/ data['teams'][standing['team']['_id']]['place']) else: data['teams'][standing['team']['_id']]['place'] = standing['place'] else: data['teams'][standing['team']['_id']]['place'] = len(stage['standings'])", "+= '|image=' + team_info['image'] + '\\n' for idx, player in", "teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\\n' teams_table = '{{TeamCard\\n' team_info =", "= '60806876938bed74f6edea9e' twin_suns_wiki = 'Twin_Suns_Tournament' gsl_s1_id = '5ff4b388fd124e11b18e185d' gsl_s1_wiki =", "+= '{{Upcoming matches tournament|' + wiki_name + '}}\\n' return sidebar", "Increment for next time if match['matchType'] == 'winner': round_max_win_match_count[match['roundNumber']] =", "where D vs L happen such as R2D1 vs R2W5", "player_info['name'] \\ + ' |' + player_tag + 'flag=' +", "'winner' in match['bottom'] and match['bottom']['winner']: match_line += 'W' else: match_line", "key off match['inConsolationBracket'] # May also just need to keep", "match['top']['teamID'] in team_previous_round: if team_previous_round[match['top']['teamID']]: bracket_type = 'W' else: bracket_type", "'Calrissian_Cup/Fall/Major' ccs_championship_id = '6150dd2b0dd060282bebb0eb' ccs_championship_wiki = 'Calrissian_Cup/Championship' world_cup_id = '611dac6ecb6f6260d5f30b6e'", "sidebar += '|map3=' + '\\n' sidebar += '|map4=' + '\\n'", "in data['teams'][standing['team']['_id']]: data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place else: if break_ties:", "str(match['bottom']['score']) + ' ' if 'winner' in match['bottom'] and match['bottom']['winner']:", "will need to get updated for non SE16 templates #", "the third winners bracket round is # called the 4th", "f.write('====Swiss Standings====\\n') swiss_table = create_swiss_table(stage, bw_teams) f.write(swiss_table) f.write('====Swiss Match Results====\\n')", "group['matches']: match_line = create_match_maps(match, teams, bw_teams) match_table += match_line tables", "1 if rounds: event_format += '** ' + str(rounds) +", "\"{:7.3f}\".format(record['opponentsMatchWinPercentage']) + '\\n' swiss_table += '}}\\n' return swiss_table def create_swiss_matches(matches,", "+= '|patch=' + '\\n' sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime(", "|' + player_tag + 'link=' + player_info['link'] teams_table += '\\n'", "= sorted(stage['matches'], key=itemgetter('matchNumber')) matches = stage['matches'] for match in matches:", "+= '|twitter=' + '\\n' sidebar += '|twitch=' + '\\n' sidebar", "data['_id'] + '/bracket-list' + '\\n' sidebar += '|rulebook=' + '\\n'", "team_info['teamteamplate'] + '\\n' group_header += '|tiebreaker1=series\\n' tables += group_header tables", "match['bottom']: team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'], teams[match['bottom']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team='", "'|team=' + team_info['name'] + '\\n' teams_table += '|image=' + team_info['image']", "if stage['bracket']['type'] == 'swiss': f.write('===Swiss Stage===\\n') f.write('====Swiss Standings====\\n') swiss_table =", "dynamic[dynamic_idx]['tab_name'] + '\\n' header += '|This=1\\n' header += '|content' +", "' + str(i) + ' Matches|matchsection=Round ' \\ + str(i)", "+= '|winner=2\\n' else: match_line += '|winner=0\\n' if match['isBye']: match_line +=", "enumerate(data['teams'][team[0]]['players']): player_tag = 'p' + str(idx + 1) if player['_id']", "dict() # set up round-match count trackers round_max_win_match_count = [1]", "+ data['_id'] + '/bracket-list' + '\\n' sidebar += '|rulebook=' +", "just need to keep track of match['next'] and build up", "4th winners bracket round is called the 6th round #", "teams[match['top']['teamID']]['name'])['teamteamplate'] bracket += bracket_indicator + 'team=' + team_name + '" ]
[ "240, 230), bg = [] ): self.screen = screen fg", "text_rect) pass def getProperties(self): return self.text_rect def redraw(self): self.screen.blit(self.image[0], self.image[1])", "screen.blit(word_surface, (x, y)) x += word_width + space x =", "ren, (x,y) screen.blit(ren, (x, y)) # Cursiva if italic: a_sys_font.set_bold(0)", "self.text_rect = (x, y),ren.get_size() # text = str(self.counter) # label", "x + word_width >= max_width: x = pos[0] # Reset", "# self.image.blit(label, text_rect) pass def getProperties(self): return self.text_rect def redraw(self):", "Si hay fondo de texto ren = a_sys_font.render(text, 1, fg,", "label = self.myfont.render(text, 1, (255,0,0)) # text_rect = label.get_rect() #", "class TextArea(): def __init__(self, screen, text, x, y, fuente='Calibri', text_size", "self.coord = x, y font = pygame.font.SysFont(fuente, text_size) words =", "y, fuente='Calibri', text_size = 20, color=pygame.Color('black')): self.coord = x, y", "word_height pass def getProperties(self): return self.size, self.coord ##################### EJEMPLO DE", "if len(bg) > 1: # Si hay fondo de texto", "# Reset the x. y += word_height # Start on", ">= max_width: x = pos[0] # Reset the x. y", "fondo de texto ren = a_sys_font.render(text, 1, fg, bg) else:", "x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')): self.coord = x,", "y font = pygame.font.SysFont(fuente, text_size) words = [word.split(' ') for", "self.text_rect) # self.text_rect = (x, y),ren.get_size() # text = str(self.counter)", "(x, y),ren.get_size() # text = str(self.counter) # label = self.myfont.render(text,", "size = font.size(text) # Font a_sys_font = pygame.font.SysFont(fuente, text_size) #", "# Si hay fondo de texto ren = a_sys_font.render(text, 1,", "# Start on new row. self.size = word_width, word_height pass", "# Cursiva if italic: a_sys_font.set_bold(0) # Negritas if bold: a_sys_font.set_bold(0)", "text_size = 20, color=pygame.Color('black')): self.coord = x, y font =", "pass def getProperties(self): return self.text_rect def redraw(self): self.screen.blit(self.image[0], self.image[1]) pass", "Texto(screen, 'Hola', 10, 10) class TextArea(): def __init__(self, screen, text,", "def __init__(self, screen, text, x, y, text_size = 20, fuente", "# Construccion del texto if len(bg) > 1: # Si", "space = font.size(' ')[0] # The width of a space.", "self.image = ren, (x,y) screen.blit(ren, (x, y)) # Cursiva if", "10, 10) class TextArea(): def __init__(self, screen, text, x, y,", "space. max_width, max_height = screen.get_size() pos = x,y for line", "False, subrayado= False, color = (250, 240, 230), bg =", "= (50,50) # self.image.blit(label, text_rect) pass def getProperties(self): return self.text_rect", "text_size) # Cursiva if italic: a_sys_font.set_bold(1) # Negritas if bold:", "bg = [] ): self.screen = screen fg = color", "str(self.counter) # label = self.myfont.render(text, 1, (255,0,0)) # text_rect =", "# The width of a space. max_width, max_height = screen.get_size()", "= (x,y) self.image = ren, (x,y) screen.blit(ren, (x, y)) #", "y)) # Cursiva if italic: a_sys_font.set_bold(0) # Negritas if bold:", "= font.size(text) # Font a_sys_font = pygame.font.SysFont(fuente, text_size) # Cursiva", "= x+size[0], y self.text_rect = ren.get_rect() self.text_rect.center = (x,y) self.image", "of words. space = font.size(' ')[0] # The width of", "= pygame.font.SysFont(fuente, text_size) words = [word.split(' ') for word in", "in text.splitlines()] # 2D array where each row is a", "self.text_rect = ren.get_rect() self.text_rect.center = (x,y) self.image = ren, (x,y)", "False, bold= False, subrayado= False, color = (250, 240, 230),", "__init__(self, screen, text, x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')):", "0, color) word_width, word_height = word_surface.get_size() if x + word_width", "del texto if len(bg) > 1: # Si hay fondo", "230), bg = [] ): self.screen = screen fg =", "1, fg) # self.size = x+size[0], y self.text_rect = ren.get_rect()", "= font.render(word, 0, color) word_width, word_height = word_surface.get_size() if x", "pygame.font.SysFont(fuente, text_size) words = [word.split(' ') for word in text.splitlines()]", "') for word in text.splitlines()] # 2D array where each", "x. y += word_height # Start on new row. screen.blit(word_surface,", "= pygame.font.SysFont(fuente, text_size) # Cursiva if italic: a_sys_font.set_bold(1) # Negritas", "DE USO ############################## # texto1 = Texto(screen, 'Hola', 10, 10)", "[] ): self.screen = screen fg = color self.coord =", "1, (255,0,0)) # text_rect = label.get_rect() # text_rect.center = (50,50)", "screen.get_size() pos = x,y for line in words: for word", "max_width, max_height = screen.get_size() pos = x,y for line in", "self.screen.blit(self.image[0], self.image[1]) pass ##################### EJEMPLO DE USO ############################## # texto1", "of a space. max_width, max_height = screen.get_size() pos = x,y", "word_width >= max_width: x = pos[0] # Reset the x.", "text, x, y, text_size = 20, fuente = 'Calibri', italic", "# self.image.blit(ren, self.text_rect) # self.text_rect = (x, y),ren.get_size() # text", "where each row is a list of words. space =", "word_width, word_height pass def getProperties(self): return self.size, self.coord ##################### EJEMPLO", "# text_rect.center = (50,50) # self.image.blit(label, text_rect) pass def getProperties(self):", "pos[0] # Reset the x. y += word_height # Start", "= font.size(' ')[0] # The width of a space. max_width,", "bg) else: # Si no, transparente ren = a_sys_font.render(text, 1,", "self.coord ##################### EJEMPLO DE USO ############################## # textarea1 = Textarea(screen,", "new row. self.size = word_width, word_height pass def getProperties(self): return", "= 'Calibri', italic = False, bold= False, subrayado= False, color", "20, fuente = 'Calibri', italic = False, bold= False, subrayado=", "fg) # self.size = x+size[0], y self.text_rect = ren.get_rect() self.text_rect.center", "fuente = 'Calibri', italic = False, bold= False, subrayado= False,", "return self.text_rect def redraw(self): self.screen.blit(self.image[0], self.image[1]) pass ##################### EJEMPLO DE", "y, text_size = 20, fuente = 'Calibri', italic = False,", "Si no, transparente ren = a_sys_font.render(text, 1, fg) # self.size", "(x, y)) x += word_width + space x = pos[0]", "row is a list of words. space = font.size(' ')[0]", "(x,y) self.image = ren, (x,y) screen.blit(ren, (x, y)) # Cursiva", "word_surface = font.render(word, 0, color) word_width, word_height = word_surface.get_size() if", "new row. screen.blit(word_surface, (x, y)) x += word_width + space", "pass ##################### EJEMPLO DE USO ############################## # texto1 = Texto(screen,", "class Texto: def __init__(self, screen, text, x, y, text_size =", "TextArea(): def __init__(self, screen, text, x, y, fuente='Calibri', text_size =", "self.image.blit(ren, self.text_rect) # self.text_rect = (x, y),ren.get_size() # text =", "getProperties(self): return self.size, self.coord ##################### EJEMPLO DE USO ############################## #", "if italic: a_sys_font.set_bold(0) # Negritas if bold: a_sys_font.set_bold(0) # Subrayado", "a_sys_font.set_bold(1) # Negritas if bold: a_sys_font.set_bold(1) # Subrayado if subrayado:", "words = [word.split(' ') for word in text.splitlines()] # 2D", "x, y #load font, prepare values font = pygame.font.Font(None, 80)", "80) size = font.size(text) # Font a_sys_font = pygame.font.SysFont(fuente, text_size)", "Construccion del texto if len(bg) > 1: # Si hay", "+= word_height # Start on new row. self.size = word_width,", "##################### EJEMPLO DE USO ############################## # textarea1 = Textarea(screen, 'Hola", "if bold: a_sys_font.set_bold(0) # Subrayado if subrayado: a_sys_font.set_underline(0) # self.image.blit(ren,", "len(bg) > 1: # Si hay fondo de texto ren", "self.size = x+size[0], y self.text_rect = ren.get_rect() self.text_rect.center = (x,y)", "ren.get_rect() self.text_rect.center = (x,y) self.image = ren, (x,y) screen.blit(ren, (x,", "font = pygame.font.SysFont(fuente, text_size) words = [word.split(' ') for word", "space x = pos[0] # Reset the x. y +=", "Negritas if bold: a_sys_font.set_bold(1) # Subrayado if subrayado: a_sys_font.set_underline(1) #", "(50,50) # self.image.blit(label, text_rect) pass def getProperties(self): return self.text_rect def", "x,y for line in words: for word in line: word_surface", "# 2D array where each row is a list of", "on new row. self.size = word_width, word_height pass def getProperties(self):", "fuente='Calibri', text_size = 20, color=pygame.Color('black')): self.coord = x, y font", "redraw(self): self.screen.blit(self.image[0], self.image[1]) pass ##################### EJEMPLO DE USO ############################## #", "############################## # texto1 = Texto(screen, 'Hola', 10, 10) class TextArea():", "y #load font, prepare values font = pygame.font.Font(None, 80) size", "font.render(word, 0, color) word_width, word_height = word_surface.get_size() if x +", "self.size, self.coord ##################### EJEMPLO DE USO ############################## # textarea1 =", "array where each row is a list of words. space", "EJEMPLO DE USO ############################## # textarea1 = Textarea(screen, 'Hola mundo", "10) class TextArea(): def __init__(self, screen, text, x, y, fuente='Calibri',", "def redraw(self): self.screen.blit(self.image[0], self.image[1]) pass ##################### EJEMPLO DE USO ##############################", "def getProperties(self): return self.size, self.coord ##################### EJEMPLO DE USO ##############################", "Cursiva if italic: a_sys_font.set_bold(1) # Negritas if bold: a_sys_font.set_bold(1) #", "= word_surface.get_size() if x + word_width >= max_width: x =", "bold= False, subrayado= False, color = (250, 240, 230), bg", "Reset the x. y += word_height # Start on new", "= color self.coord = x, y #load font, prepare values", "texto ren = a_sys_font.render(text, 1, fg, bg) else: # Si", "(x,y) screen.blit(ren, (x, y)) # Cursiva if italic: a_sys_font.set_bold(0) #", "text_size) words = [word.split(' ') for word in text.splitlines()] #", "screen, text, x, y, text_size = 20, fuente = 'Calibri',", "subrayado: a_sys_font.set_underline(1) # Construccion del texto if len(bg) > 1:", "[word.split(' ') for word in text.splitlines()] # 2D array where", "x += word_width + space x = pos[0] # Reset", "ren = a_sys_font.render(text, 1, fg, bg) else: # Si no,", "row. self.size = word_width, word_height pass def getProperties(self): return self.size,", "def __init__(self, screen, text, x, y, fuente='Calibri', text_size = 20,", "The width of a space. max_width, max_height = screen.get_size() pos", "no, transparente ren = a_sys_font.render(text, 1, fg) # self.size =", "on new row. screen.blit(word_surface, (x, y)) x += word_width +", "= x,y for line in words: for word in line:", "x, y, text_size = 20, fuente = 'Calibri', italic =", "prepare values font = pygame.font.Font(None, 80) size = font.size(text) #", "color = (250, 240, 230), bg = [] ): self.screen", "italic: a_sys_font.set_bold(1) # Negritas if bold: a_sys_font.set_bold(1) # Subrayado if", "text_rect.center = (50,50) # self.image.blit(label, text_rect) pass def getProperties(self): return", "max_width: x = pos[0] # Reset the x. y +=", "a_sys_font.render(text, 1, fg, bg) else: # Si no, transparente ren", "+= word_height # Start on new row. screen.blit(word_surface, (x, y))", "self.myfont.render(text, 1, (255,0,0)) # text_rect = label.get_rect() # text_rect.center =", "# Subrayado if subrayado: a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect) # self.text_rect", "return self.size, self.coord ##################### EJEMPLO DE USO ############################## # textarea1", "word_width + space x = pos[0] # Reset the x.", "USO ############################## # textarea1 = Textarea(screen, 'Hola mundo que tal", "getProperties(self): return self.text_rect def redraw(self): self.screen.blit(self.image[0], self.image[1]) pass ##################### EJEMPLO", "word_width, word_height = word_surface.get_size() if x + word_width >= max_width:", "a_sys_font.set_bold(1) # Subrayado if subrayado: a_sys_font.set_underline(1) # Construccion del texto", "self.screen = screen fg = color self.coord = x, y", "+= word_width + space x = pos[0] # Reset the", "= self.myfont.render(text, 1, (255,0,0)) # text_rect = label.get_rect() # text_rect.center", "y += word_height # Start on new row. self.size =", "pygame class Texto: def __init__(self, screen, text, x, y, text_size", "in line: word_surface = font.render(word, 0, color) word_width, word_height =", "= False, bold= False, subrayado= False, color = (250, 240,", "pass def getProperties(self): return self.size, self.coord ##################### EJEMPLO DE USO", "for word in line: word_surface = font.render(word, 0, color) word_width,", "# text = str(self.counter) # label = self.myfont.render(text, 1, (255,0,0))", "Subrayado if subrayado: a_sys_font.set_underline(1) # Construccion del texto if len(bg)", "Subrayado if subrayado: a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect) # self.text_rect =", "a space. max_width, max_height = screen.get_size() pos = x,y for", "color) word_width, word_height = word_surface.get_size() if x + word_width >=", "= word_width, word_height pass def getProperties(self): return self.size, self.coord #####################", "Start on new row. screen.blit(word_surface, (x, y)) x += word_width", "pos = x,y for line in words: for word in", "text.splitlines()] # 2D array where each row is a list", "= [] ): self.screen = screen fg = color self.coord", "word_height # Start on new row. screen.blit(word_surface, (x, y)) x", "bold: a_sys_font.set_bold(1) # Subrayado if subrayado: a_sys_font.set_underline(1) # Construccion del", "self.size = word_width, word_height pass def getProperties(self): return self.size, self.coord", "'Calibri', italic = False, bold= False, subrayado= False, color =", "EJEMPLO DE USO ############################## # texto1 = Texto(screen, 'Hola', 10,", "color self.coord = x, y #load font, prepare values font", "color=pygame.Color('black')): self.coord = x, y font = pygame.font.SysFont(fuente, text_size) words", "word_surface.get_size() if x + word_width >= max_width: x = pos[0]", "= ren, (x,y) screen.blit(ren, (x, y)) # Cursiva if italic:", "+ space x = pos[0] # Reset the x. y", "(250, 240, 230), bg = [] ): self.screen = screen", "= Texto(screen, 'Hola', 10, 10) class TextArea(): def __init__(self, screen,", "a_sys_font.set_underline(1) # Construccion del texto if len(bg) > 1: #", "words: for word in line: word_surface = font.render(word, 0, color)", "# texto1 = Texto(screen, 'Hola', 10, 10) class TextArea(): def", "in words: for word in line: word_surface = font.render(word, 0,", "else: # Si no, transparente ren = a_sys_font.render(text, 1, fg)", "the x. y += word_height # Start on new row.", "italic: a_sys_font.set_bold(0) # Negritas if bold: a_sys_font.set_bold(0) # Subrayado if", "text, x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')): self.coord =", "# textarea1 = Textarea(screen, 'Hola mundo que tal estas hoy')", "each row is a list of words. space = font.size('", "y)) x += word_width + space x = pos[0] #", "(255,0,0)) # text_rect = label.get_rect() # text_rect.center = (50,50) #", "= str(self.counter) # label = self.myfont.render(text, 1, (255,0,0)) # text_rect", "a_sys_font.set_bold(0) # Negritas if bold: a_sys_font.set_bold(0) # Subrayado if subrayado:", "= [word.split(' ') for word in text.splitlines()] # 2D array", "hay fondo de texto ren = a_sys_font.render(text, 1, fg, bg)", "self.coord = x, y #load font, prepare values font =", "a_sys_font = pygame.font.SysFont(fuente, text_size) # Cursiva if italic: a_sys_font.set_bold(1) #", "a_sys_font.set_bold(0) # Subrayado if subrayado: a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect) #", "# Negritas if bold: a_sys_font.set_bold(0) # Subrayado if subrayado: a_sys_font.set_underline(0)", "= 20, fuente = 'Calibri', italic = False, bold= False,", "for line in words: for word in line: word_surface =", "values font = pygame.font.Font(None, 80) size = font.size(text) # Font", "1: # Si hay fondo de texto ren = a_sys_font.render(text,", "x = pos[0] # Reset the x. y += word_height", "is a list of words. space = font.size(' ')[0] #", "# Start on new row. screen.blit(word_surface, (x, y)) x +=", "text = str(self.counter) # label = self.myfont.render(text, 1, (255,0,0)) #", "= (250, 240, 230), bg = [] ): self.screen =", "Texto: def __init__(self, screen, text, x, y, text_size = 20,", "Negritas if bold: a_sys_font.set_bold(0) # Subrayado if subrayado: a_sys_font.set_underline(0) #", "self.text_rect.center = (x,y) self.image = ren, (x,y) screen.blit(ren, (x, y))", "font.size(' ')[0] # The width of a space. max_width, max_height", "(x, y)) # Cursiva if italic: a_sys_font.set_bold(0) # Negritas if", "word in text.splitlines()] # 2D array where each row is", "= a_sys_font.render(text, 1, fg) # self.size = x+size[0], y self.text_rect", "> 1: # Si hay fondo de texto ren =", "= label.get_rect() # text_rect.center = (50,50) # self.image.blit(label, text_rect) pass", "DE USO ############################## # textarea1 = Textarea(screen, 'Hola mundo que", "fg, bg) else: # Si no, transparente ren = a_sys_font.render(text,", "self.image[1]) pass ##################### EJEMPLO DE USO ############################## # texto1 =", "if bold: a_sys_font.set_bold(1) # Subrayado if subrayado: a_sys_font.set_underline(1) # Construccion", "if x + word_width >= max_width: x = pos[0] #", "if subrayado: a_sys_font.set_underline(1) # Construccion del texto if len(bg) >", "pygame.font.SysFont(fuente, text_size) # Cursiva if italic: a_sys_font.set_bold(1) # Negritas if", "__init__(self, screen, text, x, y, text_size = 20, fuente =", "= pos[0] # Reset the x. y += word_height #", "if italic: a_sys_font.set_bold(1) # Negritas if bold: a_sys_font.set_bold(1) # Subrayado", "line: word_surface = font.render(word, 0, color) word_width, word_height = word_surface.get_size()", "): self.screen = screen fg = color self.coord = x,", "subrayado: a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect) # self.text_rect = (x, y),ren.get_size()", "italic = False, bold= False, subrayado= False, color = (250,", "de texto ren = a_sys_font.render(text, 1, fg, bg) else: #", "words. space = font.size(' ')[0] # The width of a", "import pygame class Texto: def __init__(self, screen, text, x, y,", "# Negritas if bold: a_sys_font.set_bold(1) # Subrayado if subrayado: a_sys_font.set_underline(1)", "a_sys_font.render(text, 1, fg) # self.size = x+size[0], y self.text_rect =", "word_height # Start on new row. self.size = word_width, word_height", "= x, y #load font, prepare values font = pygame.font.Font(None,", "y += word_height # Start on new row. screen.blit(word_surface, (x,", "# self.size = x+size[0], y self.text_rect = ren.get_rect() self.text_rect.center =", "texto1 = Texto(screen, 'Hola', 10, 10) class TextArea(): def __init__(self,", "# label = self.myfont.render(text, 1, (255,0,0)) # text_rect = label.get_rect()", "def getProperties(self): return self.text_rect def redraw(self): self.screen.blit(self.image[0], self.image[1]) pass #####################", "fg = color self.coord = x, y #load font, prepare", "if subrayado: a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect) # self.text_rect = (x,", "False, color = (250, 240, 230), bg = [] ):", "ren = a_sys_font.render(text, 1, fg) # self.size = x+size[0], y", "= screen.get_size() pos = x,y for line in words: for", "x. y += word_height # Start on new row. self.size", "############################## # textarea1 = Textarea(screen, 'Hola mundo que tal estas", "')[0] # The width of a space. max_width, max_height =", "list of words. space = font.size(' ')[0] # The width", "#load font, prepare values font = pygame.font.Font(None, 80) size =", "1, fg, bg) else: # Si no, transparente ren =", "width of a space. max_width, max_height = screen.get_size() pos =", "for word in text.splitlines()] # 2D array where each row", "# Subrayado if subrayado: a_sys_font.set_underline(1) # Construccion del texto if", "font.size(text) # Font a_sys_font = pygame.font.SysFont(fuente, text_size) # Cursiva if", "+ word_width >= max_width: x = pos[0] # Reset the", "word in line: word_surface = font.render(word, 0, color) word_width, word_height", "# Si no, transparente ren = a_sys_font.render(text, 1, fg) #", "= (x, y),ren.get_size() # text = str(self.counter) # label =", "= 20, color=pygame.Color('black')): self.coord = x, y font = pygame.font.SysFont(fuente,", "# Font a_sys_font = pygame.font.SysFont(fuente, text_size) # Cursiva if italic:", "20, color=pygame.Color('black')): self.coord = x, y font = pygame.font.SysFont(fuente, text_size)", "transparente ren = a_sys_font.render(text, 1, fg) # self.size = x+size[0],", "Font a_sys_font = pygame.font.SysFont(fuente, text_size) # Cursiva if italic: a_sys_font.set_bold(1)", "x+size[0], y self.text_rect = ren.get_rect() self.text_rect.center = (x,y) self.image =", "pygame.font.Font(None, 80) size = font.size(text) # Font a_sys_font = pygame.font.SysFont(fuente,", "font = pygame.font.Font(None, 80) size = font.size(text) # Font a_sys_font", "x, y font = pygame.font.SysFont(fuente, text_size) words = [word.split(' ')", "screen.blit(ren, (x, y)) # Cursiva if italic: a_sys_font.set_bold(0) # Negritas", "self.text_rect def redraw(self): self.screen.blit(self.image[0], self.image[1]) pass ##################### EJEMPLO DE USO", "y),ren.get_size() # text = str(self.counter) # label = self.myfont.render(text, 1,", "# self.text_rect = (x, y),ren.get_size() # text = str(self.counter) #", "text_size = 20, fuente = 'Calibri', italic = False, bold=", "screen fg = color self.coord = x, y #load font,", "= ren.get_rect() self.text_rect.center = (x,y) self.image = ren, (x,y) screen.blit(ren,", "max_height = screen.get_size() pos = x,y for line in words:", "line in words: for word in line: word_surface = font.render(word,", "##################### EJEMPLO DE USO ############################## # texto1 = Texto(screen, 'Hola',", "self.image.blit(label, text_rect) pass def getProperties(self): return self.text_rect def redraw(self): self.screen.blit(self.image[0],", "row. screen.blit(word_surface, (x, y)) x += word_width + space x", "screen, text, x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')): self.coord", "= pygame.font.Font(None, 80) size = font.size(text) # Font a_sys_font =", "USO ############################## # texto1 = Texto(screen, 'Hola', 10, 10) class", "word_height = word_surface.get_size() if x + word_width >= max_width: x", "= a_sys_font.render(text, 1, fg, bg) else: # Si no, transparente", "bold: a_sys_font.set_bold(0) # Subrayado if subrayado: a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect)", "font, prepare values font = pygame.font.Font(None, 80) size = font.size(text)", "label.get_rect() # text_rect.center = (50,50) # self.image.blit(label, text_rect) pass def", "a_sys_font.set_underline(0) # self.image.blit(ren, self.text_rect) # self.text_rect = (x, y),ren.get_size() #", "text_rect = label.get_rect() # text_rect.center = (50,50) # self.image.blit(label, text_rect)", "y self.text_rect = ren.get_rect() self.text_rect.center = (x,y) self.image = ren,", "subrayado= False, color = (250, 240, 230), bg = []", "texto if len(bg) > 1: # Si hay fondo de", "Cursiva if italic: a_sys_font.set_bold(0) # Negritas if bold: a_sys_font.set_bold(0) #", "'Hola', 10, 10) class TextArea(): def __init__(self, screen, text, x,", "2D array where each row is a list of words.", "# text_rect = label.get_rect() # text_rect.center = (50,50) # self.image.blit(label,", "= x, y font = pygame.font.SysFont(fuente, text_size) words = [word.split('", "# Cursiva if italic: a_sys_font.set_bold(1) # Negritas if bold: a_sys_font.set_bold(1)", "Start on new row. self.size = word_width, word_height pass def", "a list of words. space = font.size(' ')[0] # The", "= screen fg = color self.coord = x, y #load" ]
[ "model using the whole orignial dataset, and save the model", "\"_SMOGN\" if args.SMOGN else log_folder_name log_folder_name = log_folder_name + \"_SW\"", "def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value =", "Four rooms have low quality data and we delete them", "data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE: # Label all the", "not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether use the", "# We extract the data of particular room and run", "y room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1)", "the result into X and y for training. room_data_smote =", "prediction value into the dataframe X_train, X_test, y_train, y_test =", "use SMOTE and SMOGN at the same time!\" # Load", "= Trials() best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed))", "weight=weight) if args.SampleWeight else DMatrix(data=X, label=y) # Cross_validation with hyper-parameter", "warnings from typing import Tuple import numpy as np import", "= (room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1) y =", "into the dataframe X_train, X_test, y_train, y_test = train_test_split(X, y,", "import tqdm from xgboost import DMatrix, cv # Set up", "all the AC data by 0.75, all AC above 0.75", "validation function to work. data_matrix = DMatrix(data=X, label=y, weight=weight) if", "return \"RMSE\", root_squard_error def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str,", "room_data_smote = pd.concat([room_data_smote, smote_split], axis=1) y = room_data_smote['AC'] X =", "0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20,", "import pickle import warnings from typing import Tuple import numpy", "309 or room == 312 or room == 826 or", "as model checkpoint best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha':", "watchlist = [(d_test, 'eval'), (d_train, 'train')] xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train,", "function parser = argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str, required=False, default='R2',", "prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop a model using the whole orignial", "r2_value = r2_score(truth_value, predt) return \"R2\", r2_value def fobjective(space): param_dict_tunning", "SMOGN at the same time!\" # Load the data with", "algorithm and retrieve the result. model_smote = SMOTE(random_state=621, k_neighbors=3) room_data_smote,", "Define our evaluation functions def RMSE(predt: np.ndarray, dtrain: DMatrix) ->", "data matrix for the built-in cross validation function to work.", "to do the cross validation, still with ten folds, this", "2030 + room # Four rooms have low quality data", "room_list = data['Location'].unique() # ranging through all the rooms and", "'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning,", "during the training and after the training. error_csv = pd.DataFrame(", "some directory to store the models and future analysis figures.", "and split the result into X and y for training.", "Models\") # Create two dataframes to store the result during", "= log_folder_name + \"_SMOGN\" if args.SMOGN else log_folder_name log_folder_name =", "= np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation':", "pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) print(\"Training finished!\")", "= data['Location'].unique() # ranging through all the rooms and do", "else: y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight:", "100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2), 'subsample': hp.uniform(\"subsample\", 0.5, 1),", "dataset, and save the model xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200,", "low quality data and we delete them manually if room", "Ignore all the warnings and set pandas to display every", "result into X and y for training. room_data_smote = pd.concat([room_data_smote,", "result during the training and after the training. error_csv =", "log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name = log_folder_name", "tuning space = {'max_depth': hp.quniform(\"max_depth\", 3, 10, 1), 'learning_rate': hp.uniform(\"learning_rate\",", "# Use the built-in cv function to do the cross", "xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction =", "and future analysis figures. # log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name", "xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse', maximize=False, shuffle=True)", "the model xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None,", "# Save all the models we trained for future use", "the models and future analysis figures. # log_folder_name = \"Test_{}_{}\".format(args.metric,", "or not\") parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether use", "cv function to do the cross validation, still with ten", "ground truth and prediction value into the dataframe X_train, X_test,", "os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our evaluation functions def RMSE(predt: np.ndarray, dtrain:", "'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'],", "parser.parse_args() # Ignore all the warnings and set pandas to", "data with a positive AC electricity consumption value, and drop", "y, test_size=0.2, random_state=seed) d_train = DMatrix(X_train, label=y_train) d_test = DMatrix(X_test,", "required=False, default=1, help=\"Whether use the SMOTE or not\") parser.add_argument(\"--SMOGN\", choices=[0,", "the data with a positive AC electricity consumption value, and", "and save the model xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist,", "+ \"_SW\" if args.SampleWeight else log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\" assert", "dtrain: DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label() root_squard_error =", "need them data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data = data[data.AC >", "functions def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value", "== 917 or room == 1001: continue # We extract", "# Create two dataframes to store the result during the", "room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room),", "917 or room == 1001: continue # We extract the", "= 2030 + room # Four rooms have low quality", "'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree':", "dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) # Save all", "up an argument parser to decide the metric function parser", "and drop the time data as we don't need them", "and after the training. error_csv = pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric),", "parser = argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str, required=False, default='R2', help=\"The", "all the warnings and set pandas to display every column", "same time!\" # Load the data with a positive AC", "feval=eval_dict[args.metric], maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] =", "folder name exists\" if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) #", "r2_score(truth_value, predt) return \"R2\", r2_value def fobjective(space): param_dict_tunning = {'max_depth':", "maximize=False, shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict = {'RMSE':", "hp.quniform(\"min_child_weight\", 1, 20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100, 1), 'reg_lambda':", "import os import pickle import warnings from typing import Tuple", "= fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed)) # setup our", "maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room':", "= {'room': room, 'observation': json.dumps(real), 'prediction': json.dumps(prediction)} # Dump the", "= argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str, required=False, default='R2', help=\"The evaluation", "validation, still with ten folds, this will return us the", "train the xgboost model with cross-validation for each unique room", "import pandas as pd import xgboost as xgb from hyperopt", "and set pandas to display every column and row everytime", "room. for room in tqdm(room_list): seed = 2030 + room", "axis=1) elif args.SMOGN: if len(room_data) < 500: room_data['SMOTE_split'] = (room_data['AC']", "y=class_sample) X = X.to_numpy() # Build another full data matrix", "one training_testing for ploting, and save both ground truth and", "0.75 will be marked as 1, otherwise 0. Split into", "consumption value, and drop the time data as we don't", "index_col=0) y = room_data['AC'] X = room_data.drop(['AC'], axis=1) else: y", "we print a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None) assert", "Dump the error dataframes into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name),", "args.SMOTE else log_folder_name log_folder_name = log_folder_name + \"_SMOGN\" if args.SMOGN", "math import os import pickle import warnings from typing import", "= data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True) # Create some", "'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100, 1),", "= cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric],", "trained for future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name,", "model_smote = SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split = model_smote.fit_resample(X, y) #", "everytime we print a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None)", "and run the SMOTE algorithm on it. room_data = data[data.Location", "1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2), 'subsample': hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss':", "above 0.75 will be marked as 1, otherwise 0. Split", "= model_smote.fit_resample(X, y) # concat the result from SMOTE and", "= room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight: class_sample = pd.cut(y, bins=15) weight", "log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\" assert log_folder_name != previous_parameter_folder, \"Previous folder", "eval_dict = {'RMSE': RMSE, 'R2': R2} print(\"Start Training The Models\")", "elif args.SMOGN: if len(room_data) < 500: room_data['SMOTE_split'] = (room_data['AC'] >", "marked as 1, otherwise 0. Split into X and y", "room_data_smote, smote_split = model_smote.fit_resample(X, y) # concat the result from", "verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) # Save all the models we", "import DMatrix, cv # Set up an argument parser to", "AC electricity consumption value, and drop the time data as", "= pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list = data['Location'].unique() # ranging through", "STATUS_OK, Trials from imblearn.over_sampling import SMOTE from numpy.random import RandomState", "function to do the cross validation, still with ten folds,", "the models we trained for future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room),", "!= previous_parameter_folder, \"Previous folder name exists\" if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name))", "RandomState from sklearn.metrics import r2_score, mean_squared_error from sklearn.model_selection import train_test_split", "SMOTE algorithm and retrieve the result. model_smote = SMOTE(random_state=621, k_neighbors=3)", "log_folder_name log_folder_name = log_folder_name + \"_SW\" if args.SampleWeight else log_folder_name", "axis=1).reset_index(drop=True) if args.SMOTE: # Label all the AC data by", "or room == 312 or room == 826 or room", "rstate=RandomState(seed)) # setup our training parameters and a model variable", "label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y) # Cross_validation with", "parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int, required=False, default=1, help=\"Whether use the SMOTE", "every column and row everytime we print a dataframe warnings.filterwarnings('ignore')", "data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True) # Create", "out the training progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int, required=False, default=1,", "each room. for room in tqdm(room_list): seed = 2030 +", "axis=1) else: y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'], axis=1).fillna(method='pad') if", "Build another full data matrix for the built-in cross validation", "args.SampleWeight else DMatrix(data=X, label=y) # Cross_validation with hyper-parameter tuning space", "= {'RMSE': RMSE, 'R2': R2} print(\"Start Training The Models\") #", "with hyper-parameter tuning space = {'max_depth': hp.quniform(\"max_depth\", 3, 10, 1),", "are dumped into two csv files in the current work", "# ranging through all the rooms and do the training", "cross-validation for each room. for room in tqdm(room_list): seed =", "to train the XGBoost model\") parser.add_argument(\"--log\", choices=[0, 1, 100], type=int,", "result. model_smote = SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split = model_smote.fit_resample(X, y)", "continue # We extract the data of particular room and", "of particular room and run the SMOTE algorithm on it.", "json.dumps(real), 'prediction': json.dumps(prediction)} # Dump the error dataframes into csv", "X and y room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X =", "xgboost model with cross-validation for each unique room in the", "Trials() best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed)) #", "import RandomState from sklearn.metrics import r2_score, mean_squared_error from sklearn.model_selection import", "RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label()", "if args.SMOTE: # Label all the AC data by 0.75,", "np import pandas as pd import xgboost as xgb from", "import argparse import json import math import os import pickle", "room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE: # Label", "cross validation, still with ten folds, this will return us", "'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room',", "as_pandas=True, num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] = room error_csv.loc[len(error_csv)]", "through all the rooms and do the training and cross-validation", "function to work. data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight", "20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0,", "# Set up an argument parser to decide the metric", "files in the current work directory. import argparse import json", "Use one training_testing for ploting, and save both ground truth", "SMOTE and SMOGN at the same time!\" # Load the", "0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0,", "by 0.75, all AC above 0.75 will be marked as", "'objective': 'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200,", "= xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction", "extract the data of particular room and run the SMOTE", "pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight: class_sample = pd.cut(y,", "retrieve the result. model_smote = SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split =", "smote_split], axis=1) y = room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)", "X = room_data.drop(['AC'], axis=1) else: y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X =", "sklearn.metrics import r2_score, mean_squared_error from sklearn.model_selection import train_test_split from sklearn.utils", "to print out the training progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int,", "pd.cut(y, bins=15) weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample) X = X.to_numpy() #", "SMOTE from numpy.random import RandomState from sklearn.metrics import r2_score, mean_squared_error", "row everytime we print a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows',", "train_test_split from sklearn.utils import compute_sample_weight from tqdm import tqdm from", "'R2': R2} print(\"Start Training The Models\") # Create two dataframes", "pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y = room_data['AC'] X = room_data.drop(['AC'], axis=1) else:", "DMatrix(data=X, label=y) # Cross_validation with hyper-parameter tuning space = {'max_depth':", "into two csv files in the current work directory. import", "we don't need them data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data =", "fobjective(space): param_dict_tunning = {'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight':", "best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) # Use the built-in cv function", "use the SMOTE or not\") parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int, required=False,", "xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) # Save", "pd.concat([room_data_smote, smote_split], axis=1) y = room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'],", "'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room', 'observation',", "have low quality data and we delete them manually if", "for the built-in cross validation function to work. data_matrix =", "or not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether use", "nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] =", "{\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict = {'RMSE': RMSE, 'R2': R2}", "The Models\") # Create two dataframes to store the result", "train the XGBoost model\") parser.add_argument(\"--log\", choices=[0, 1, 100], type=int, required=False,", "y for training. room_data_smote = pd.concat([room_data_smote, smote_split], axis=1) y =", "'observation': json.dumps(real), 'prediction': json.dumps(prediction)} # Dump the error dataframes into", "tqdm import tqdm from xgboost import DMatrix, cv # Set", "the SMOGN or not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int, required=False, default=0,", "model\") parser.add_argument(\"--log\", choices=[0, 1, 100], type=int, required=False, default=0, help=\"Whether to", "DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label() r2_value = r2_score(truth_value,", "int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda':", "== 826 or room == 917 or room == 1001:", "float]: truth_value = dtrain.get_label() r2_value = r2_score(truth_value, predt) return \"R2\",", "'train')] xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric],", "after the training. error_csv = pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean',", "verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist()", "fmin, tpe, hp, STATUS_OK, Trials from imblearn.over_sampling import SMOTE from", "is the code to train the xgboost model with cross-validation", "choices=[0, 1, 100], type=int, required=False, default=0, help=\"Whether to print out", "bins=15) weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample) X = X.to_numpy() # Build", "all the models we trained for future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name,", "log_folder_name + \"_SW\" if args.SampleWeight else log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\"", "feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] = room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1]", "split the result into X and y for training. room_data_smote", "axis=1) y = room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif", "particular room and run the SMOTE algorithm on it. room_data", "xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse',", "else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y = room_data['AC'] X =", "compute_sample_weight from tqdm import tqdm from xgboost import DMatrix, cv", "SMOGN or not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether", "delete them manually if room == 309 or room ==", "1, 20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\",", "'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'],", "R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label()", "'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']}", "early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] = room", "model xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric],", "= data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE: # Label all", "result from SMOTE and split the result into X and", "'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list = data['Location'].unique() #", "the result during the training and after the training. error_csv", "os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our evaluation functions def RMSE(predt: np.ndarray,", "None) pd.set_option('display.max_rows', None) assert args.SMOTE != args.SMOGN, \"Can't use SMOTE", "= DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y) #", "ten folds, this will return us the results. xgb_cv_result =", "training and after the training. error_csv = pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric),", "to decide the metric function parser = argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2',", "index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop a model using the whole", "axis=1).fillna(method='pad') if args.SampleWeight: class_sample = pd.cut(y, bins=15) weight = compute_sample_weight(class_weight=\"balanced\",", "= {'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha':", "required=False, default=0, help=\"Whether use the sample weight\") args = parser.parse_args()", "the whole orignial dataset, and save the model xgb_model_full =", "room in the dataset. # Models are dumped into ./models", "the training progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int, required=False, default=1, help=\"Whether", "num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] = room error_csv.loc[len(error_csv)] =", "= dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\", root_squard_error def", "xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)]", "trials=trials, rstate=RandomState(seed)) # setup our training parameters and a model", "'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction'])", "help=\"Whether use the SMOTE or not\") parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int,", "312 or room == 826 or room == 917 or", "csv files in the current work directory. import argparse import", "print out the training progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int, required=False,", "run the SMOTE algorithm on it. room_data = data[data.Location ==", "Cross_validation with hyper-parameter tuning space = {'max_depth': hp.quniform(\"max_depth\", 3, 10,", "type=int, required=False, default=0, help=\"Whether use the sample weight\") args =", "rooms and do the training and cross-validation for each room.", "100], type=int, required=False, default=0, help=\"Whether to print out the training", "os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)", "parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether use the sample", "model variable as model checkpoint best_param_dict = {'objective': 'reg:squarederror', 'max_depth':", "\"Previous folder name exists\" if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name))", "import compute_sample_weight from tqdm import tqdm from xgboost import DMatrix,", "from imblearn.over_sampling import SMOTE from numpy.random import RandomState from sklearn.metrics", "index_col=0) data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True) #", "future analysis figures. # log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name =", "if args.SMOTE else log_folder_name log_folder_name = log_folder_name + \"_SMOGN\" if", "return us the results. xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30,", "np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real), 'prediction': json.dumps(prediction)} #", "Tuple import numpy as np import pandas as pd import", "# Develop a model using the whole orignial dataset, and", "(room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1) y = room_data['SMOTE_split']", "work directory. import argparse import json import math import os", "return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict = {'RMSE': RMSE, 'R2':", "args.SMOGN: if len(room_data) < 500: room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')", "- 1] # Use one training_testing for ploting, and save", "to train the xgboost model with cross-validation for each unique", "1], type=int, required=False, default=1, help=\"Whether use the SMOTE or not\")", "= room_data.drop(['AC'], axis=1) else: y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'],", "room_data.drop(['AC'], axis=1) else: y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'], axis=1).fillna(method='pad')", "the same time!\" # Load the data with a positive", "predt)) return \"RMSE\", root_squard_error def R2(predt: np.ndarray, dtrain: DMatrix) ->", "use to train the XGBoost model\") parser.add_argument(\"--log\", choices=[0, 1, 100],", "else log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\" assert log_folder_name != previous_parameter_folder, \"Previous", "axis=1) y = room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else:", "the warnings and set pandas to display every column and", "for training. room_data_smote = pd.concat([room_data_smote, smote_split], axis=1) y = room_data_smote['AC']", "0.75, all AC above 0.75 will be marked as 1,", "best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) # Use", "early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse', maximize=False, shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0],", "warnings and set pandas to display every column and row", "as we don't need them data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data", "'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv =", "shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict = {'RMSE': RMSE,", "from sklearn.metrics import r2_score, mean_squared_error from sklearn.model_selection import train_test_split from", "imblearn.over_sampling import SMOTE from numpy.random import RandomState from sklearn.metrics import", "room == 1001: continue # We extract the data of", "y_test = train_test_split(X, y, test_size=0.2, random_state=seed) d_train = DMatrix(X_train, label=y_train)", "Split into X and y room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')", "= \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name = log_folder_name +", "import numpy as np import pandas as pd import xgboost", "an argument parser to decide the metric function parser =", "\"_SW\" if args.SampleWeight else log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\" assert log_folder_name", "0, 100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2), 'subsample': hp.uniform(\"subsample\", 0.5,", "= dtrain.get_label() r2_value = r2_score(truth_value, predt) return \"R2\", r2_value def", "for each unique room in the dataset. # Models are", "pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std'])", "'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list", "0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1) y = room_data['SMOTE_split'] # Run", "'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder,", "hyperopt import fmin, tpe, hp, STATUS_OK, Trials from imblearn.over_sampling import", "and SMOGN at the same time!\" # Load the data", "room), best_param_dict) else: trials = Trials() best_hyperparams = fmin(fn=fobjective, space=space,", "two csv files in the current work directory. import argparse", "variable as model checkpoint best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']),", "if args.SMOGN else log_folder_name log_folder_name = log_folder_name + \"_SW\" if", "y = room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif args.SMOGN:", "best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name,", "from sklearn.utils import compute_sample_weight from tqdm import tqdm from xgboost", "error dataframes into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) #", "error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop a model using the", "# setup our training parameters and a model variable as", "electricity consumption value, and drop the time data as we", "parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether use the SMOGN", "\"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name = log_folder_name + \"_SMOTE\"", "mean_squared_error from sklearn.model_selection import train_test_split from sklearn.utils import compute_sample_weight from", "args.SMOTE != args.SMOGN, \"Can't use SMOTE and SMOGN at the", "pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True)", "= math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\", root_squard_error def R2(predt: np.ndarray, dtrain:", "all AC above 0.75 will be marked as 1, otherwise", "trials = Trials() best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials,", "help=\"Whether use the SMOGN or not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int,", "Use the built-in cv function to do the cross validation,", "(d_train, 'train')] xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None,", "a model using the whole orignial dataset, and save the", "hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1,", "both ground truth and prediction value into the dataframe X_train,", "into X and y room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X", "dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist()", "use the sample weight\") args = parser.parse_args() # Ignore all", "= room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else: room_data =", "= X.to_numpy() # Build another full data matrix for the", "drop the time data as we don't need them data", "from sklearn.model_selection import train_test_split from sklearn.utils import compute_sample_weight from tqdm", "seed = 2030 + room # Four rooms have low", "room == 917 or room == 1001: continue # We", "DMatrix(X_test, label=y_test) watchlist = [(d_test, 'eval'), (d_train, 'train')] xgb_model_train_test =", "= room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif args.SMOGN: if len(room_data) < 500:", "# Dump the error dataframes into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False)", "space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss':", "on it. room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE:", "as np import pandas as pd import xgboost as xgb", "X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed) d_train =", "if len(room_data) < 500: room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X", "'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20, 1), 'reg_alpha':", "y = room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else: room_data", "== 1001: continue # We extract the data of particular", "= room_data['AC'] X = room_data.drop(['AC'], axis=1) else: y = pd.DataFrame(room_data['AC'].fillna(method='pad'))", "dataframes to store the result during the training and after", "json import math import os import pickle import warnings from", "dtrain: DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label() r2_value =", "truth and prediction value into the dataframe X_train, X_test, y_train,", "'observation', 'prediction']) room_list = data['Location'].unique() # ranging through all the", "hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\",", "space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix,", "= DMatrix(X_test, label=y_test) watchlist = [(d_test, 'eval'), (d_train, 'train')] xgb_model_train_test", "numpy as np import pandas as pd import xgboost as", "room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight: class_sample = pd.cut(y, bins=15) weight =", "{'RMSE': RMSE, 'R2': R2} print(\"Start Training The Models\") # Create", "= \"Test_R2_HYPEROPT\" assert log_folder_name != previous_parameter_folder, \"Previous folder name exists\"", "maximize=True) xgb_cv_result['room'] = room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1] #", "seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] = room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result)", "import fmin, tpe, hp, STATUS_OK, Trials from imblearn.over_sampling import SMOTE", "Tuple[str, float]: truth_value = dtrain.get_label() r2_value = r2_score(truth_value, predt) return", "pd.set_option('display.max_rows', None) assert args.SMOTE != args.SMOGN, \"Can't use SMOTE and", "default=0, help=\"Whether use the sample weight\") args = parser.parse_args() #", "the metric function parser = argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str,", "truth_value = dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\", root_squard_error", "the rooms and do the training and cross-validation for each", "np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else: trials = Trials()", "tqdm from xgboost import DMatrix, cv # Set up an", "!= args.SMOGN, \"Can't use SMOTE and SMOGN at the same", "hp.uniform(\"reg_lambda\", 0, 2), 'subsample': hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0,", "and do the training and cross-validation for each room. for", "prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list = data['Location'].unique() # ranging", "len(room_data) < 500: room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X =", "X = room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight: class_sample = pd.cut(y, bins=15)", "y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed) d_train = DMatrix(X_train,", "pandas as pd import xgboost as xgb from hyperopt import", "help=\"The evaluation metric you want to use to train the", "\"RMSE\", root_squard_error def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:", "import r2_score, mean_squared_error from sklearn.model_selection import train_test_split from sklearn.utils import", "assert args.SMOTE != args.SMOGN, \"Can't use SMOTE and SMOGN at", "the error dataframes into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False)", "os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our evaluation functions def", "checkpoint best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda':", "= parser.parse_args() # Ignore all the warnings and set pandas", "= pd.concat([room_data_smote, smote_split], axis=1) y = room_data_smote['AC'] X = room_data_smote.drop(['AC',", "\"Test_R2_HYPEROPT\" assert log_folder_name != previous_parameter_folder, \"Previous folder name exists\" if", "param_dict_tunning = {'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']),", "'prediction']) room_list = data['Location'].unique() # ranging through all the rooms", "algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed)) # setup our training parameters and", "num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) # Save all the", "our evaluation functions def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str,", "cv # Set up an argument parser to decide the", "don't need them data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data = data[data.AC", "the time data as we don't need them data =", "SMOTE or not\") parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether", "'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result =", "np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) # Use the built-in cv function to", "# Build another full data matrix for the built-in cross", "built-in cross validation function to work. data_matrix = DMatrix(data=X, label=y,", "choices=[0, 1], type=int, required=False, default=1, help=\"Whether use the SMOTE or", "best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample':", "as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse', maximize=False, shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\":", "axis=1) y = room_data['SMOTE_split'] # Run the SMOTE algorithm and", "training progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int, required=False, default=1, help=\"Whether use", "them manually if room == 309 or room == 312", "'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) # Use the", "this will return us the results. xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict,", "y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight: class_sample", "training. error_csv = pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric),", "or room == 1001: continue # We extract the data", "np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real),", "for each room. for room in tqdm(room_list): seed = 2030", "9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name,", "XGBoost model\") parser.add_argument(\"--log\", choices=[0, 1, 100], type=int, required=False, default=0, help=\"Whether", "room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE: # Label all the AC data", "csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop a model", "Develop a model using the whole orignial dataset, and save", "if args.SampleWeight: class_sample = pd.cut(y, bins=15) weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample)", "dataframes into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop", "int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result", "log_folder_name + \"_SMOGN\" if args.SMOGN else log_folder_name log_folder_name = log_folder_name", "dumped into two csv files in the current work directory.", "with a positive AC electricity consumption value, and drop the", "Run the SMOTE algorithm and retrieve the result. model_smote =", "fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed)) # setup our training", "maximize=True) # Save all the models we trained for future", "use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) print(\"Training", "xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)", "and prediction value into the dataframe X_train, X_test, y_train, y_test", "'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'],", "from tqdm import tqdm from xgboost import DMatrix, cv #", "evaluation metric you want to use to train the XGBoost", "to work. data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else", "0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item()", "STATUS_OK} eval_dict = {'RMSE': RMSE, 'R2': R2} print(\"Start Training The", "room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y =", "Create two dataframes to store the result during the training", "= compute_sample_weight(class_weight=\"balanced\", y=class_sample) X = X.to_numpy() # Build another full", "time!\" # Load the data with a positive AC electricity", "decide the metric function parser = argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'],", "Create some directory to store the models and future analysis", "the training. error_csv = pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std',", "and save both ground truth and prediction value into the", "DMatrix(X_train, label=y_train) d_test = DMatrix(X_test, label=y_test) watchlist = [(d_test, 'eval'),", "log_folder_name log_folder_name = log_folder_name + \"_SMOGN\" if args.SMOGN else log_folder_name", "evaluation functions def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:", "cross-validation for each unique room in the dataset. # Models", "1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict =", "SMOTE algorithm on it. room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True)", "ploting, and save both ground truth and prediction value into", "# Create some directory to store the models and future", "if args.SampleWeight else DMatrix(data=X, label=y) # Cross_validation with hyper-parameter tuning", "to store the result during the training and after the", "want to use to train the XGBoost model\") parser.add_argument(\"--log\", choices=[0,", "best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed)) # setup", "= pd.cut(y, bins=15) weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample) X = X.to_numpy()", "use the SMOGN or not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1], type=int, required=False,", "for future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room),", "hp.uniform(\"min_split_loss\", 0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room),", "hp, STATUS_OK, Trials from imblearn.over_sampling import SMOTE from numpy.random import", "prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room': room,", "== 309 or room == 312 or room == 826", "pickle import warnings from typing import Tuple import numpy as", "room == 826 or room == 917 or room ==", "room, 'observation': json.dumps(real), 'prediction': json.dumps(prediction)} # Dump the error dataframes", "log_folder_name + \"_SMOTE\" if args.SMOTE else log_folder_name log_folder_name = log_folder_name", "RMSE, 'R2': R2} print(\"Start Training The Models\") # Create two", "Save all the models we trained for future use pickle.dump(xgb_model_train_test,", "will return us the results. xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5,", "the XGBoost model\") parser.add_argument(\"--log\", choices=[0, 1, 100], type=int, required=False, default=0,", "model checkpoint best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'],", "feval=eval_dict[args.metric], maximize=True) # Save all the models we trained for", "the result. model_smote = SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split = model_smote.fit_resample(X,", "it. room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE: #", "Set up an argument parser to decide the metric function", "2), 'subsample': hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)} if", "3, 10, 1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5,", "'Date', 'Hour'], axis=1).reset_index(drop=True) # Create some directory to store the", "dumped into ./models and results are dumped into two csv", "argparse import json import math import os import pickle import", "log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name = log_folder_name + \"_SMOTE\" if args.SMOTE", "training and cross-validation for each room. for room in tqdm(room_list):", "{'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']),", "two dataframes to store the result during the training and", "best_param_dict) else: trials = Trials() best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest,", "'eval'), (d_train, 'train')] xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log,", "k_neighbors=3) room_data_smote, smote_split = model_smote.fit_resample(X, y) # concat the result", "the built-in cross validation function to work. data_matrix = DMatrix(data=X,", "work. data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X,", "value into the dataframe X_train, X_test, y_train, y_test = train_test_split(X,", "SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split = model_smote.fit_resample(X, y) # concat the", "best_param_dict) # Use the built-in cv function to do the", "folds, this will return us the results. xgb_cv_result = cv(dtrain=data_matrix,", "results. xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed,", "from xgboost import DMatrix, cv # Set up an argument", "the data of particular room and run the SMOTE algorithm", "us the results. xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True,", "X.to_numpy() # Build another full data matrix for the built-in", "int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate':", "results are dumped into two csv files in the current", "analysis figures. # log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\"", "else: trials = Trials() best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400,", "\"R2\", r2_value def fobjective(space): param_dict_tunning = {'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'],", "= np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real), 'prediction': json.dumps(prediction)}", "label=y_train) d_test = DMatrix(X_test, label=y_test) watchlist = [(d_test, 'eval'), (d_train,", "into X and y for training. room_data_smote = pd.concat([room_data_smote, smote_split],", "'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\",", "previous_parameter_folder = \"Test_R2_HYPEROPT\" assert log_folder_name != previous_parameter_folder, \"Previous folder name", "'SMOTE_split'], axis=1) else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y = room_data['AC']", "the code to train the xgboost model with cross-validation for", "and y room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'],", "# Models are dumped into ./models and results are dumped", "and retrieve the result. model_smote = SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split", "-> Tuple[str, float]: truth_value = dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value, predt))", "weight\") args = parser.parse_args() # Ignore all the warnings and", "pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None) assert args.SMOTE != args.SMOGN, \"Can't use", "1], type=int, required=False, default=0, help=\"Whether use the sample weight\") args", "1, otherwise 0. Split into X and y room_data['SMOTE_split'] =", "into ./models and results are dumped into two csv files", "args = parser.parse_args() # Ignore all the warnings and set", "default=0, help=\"Whether use the SMOGN or not\") parser.add_argument(\"--SampleWeight\", choices=[0, 1],", "error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1] # Use one training_testing for", "them data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data = data[data.AC > 0].drop(['Time',", "'Hour'], axis=1).reset_index(drop=True) # Create some directory to store the models", "sample weight\") args = parser.parse_args() # Ignore all the warnings", "'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30,", "room # Four rooms have low quality data and we", "display every column and row everytime we print a dataframe", "choices=[0, 1], type=int, required=False, default=0, help=\"Whether use the sample weight\")", "float]: truth_value = dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\",", "predt) return \"R2\", r2_value def fobjective(space): param_dict_tunning = {'max_depth': int(space['max_depth']),", "# log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name =", "data['Location'].unique() # ranging through all the rooms and do the", "== room].drop(['Location'], axis=1).reset_index(drop=True) if args.SMOTE: # Label all the AC", "be marked as 1, otherwise 0. Split into X and", "# This is the code to train the xgboost model", "save both ground truth and prediction value into the dataframe", "Models are dumped into ./models and results are dumped into", "root_squard_error = math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\", root_squard_error def R2(predt: np.ndarray,", "warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None) assert args.SMOTE != args.SMOGN, \"Can't", "-> Tuple[str, float]: truth_value = dtrain.get_label() r2_value = r2_score(truth_value, predt)", "the SMOTE algorithm on it. room_data = data[data.Location == room].drop(['Location'],", "'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2), 'subsample': hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\",", "argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str, required=False, default='R2', help=\"The evaluation metric", "the SMOTE algorithm and retrieve the result. model_smote = SMOTE(random_state=621,", "DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y) # Cross_validation", "if room == 309 or room == 312 or room", "= log_folder_name + \"_SMOTE\" if args.SMOTE else log_folder_name log_folder_name =", "= room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif args.SMOGN: if", "quality data and we delete them manually if room ==", "xgb from hyperopt import fmin, tpe, hp, STATUS_OK, Trials from", "store the result during the training and after the training.", "+ \"_SMOGN\" if args.SMOGN else log_folder_name log_folder_name = log_folder_name +", "# Label all the AC data by 0.75, all AC", "we delete them manually if room == 309 or room", "and row everytime we print a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None)", "room_data.drop(['SMOTE_split'], axis=1) y = room_data['SMOTE_split'] # Run the SMOTE algorithm", "space['learning_rate'], 'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample':", "room == 312 or room == 826 or room ==", "import math import os import pickle import warnings from typing", "axis=1) else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y = room_data['AC'] X", "'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) #", "and y for training. room_data_smote = pd.concat([room_data_smote, smote_split], axis=1) y", "import json import math import os import pickle import warnings", "args.SampleWeight: class_sample = pd.cut(y, bins=15) weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample) X", "X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0)", "you want to use to train the XGBoost model\") parser.add_argument(\"--log\",", "Training The Models\") # Create two dataframes to store the", "full data matrix for the built-in cross validation function to", "the AC data by 0.75, all AC above 0.75 will", "import xgboost as xgb from hyperopt import fmin, tpe, hp,", "else log_folder_name log_folder_name = log_folder_name + \"_SMOGN\" if args.SMOGN else", "tqdm(room_list): seed = 2030 + room # Four rooms have", "'subsample': hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder,", "# Cross_validation with hyper-parameter tuning space = {'max_depth': hp.quniform(\"max_depth\", 3,", "into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop a", "'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) # Use the built-in cv", "500: room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1)", "space=space, algo=tpe.suggest, max_evals=400, trials=trials, rstate=RandomState(seed)) # setup our training parameters", "None) assert args.SMOTE != args.SMOGN, \"Can't use SMOTE and SMOGN", "'colsample_bytree': space['colsample_bytree'], 'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'],", "space = {'max_depth': hp.quniform(\"max_depth\", 3, 10, 1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1,", "= {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight':", "pd import xgboost as xgb from hyperopt import fmin, tpe,", "os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our evaluation functions def RMSE(predt:", "= r2_score(truth_value, predt) return \"R2\", r2_value def fobjective(space): param_dict_tunning =", "required=False, default='R2', help=\"The evaluation metric you want to use to", "the results. xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200,", "save the model xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log,", "default=1, help=\"Whether use the SMOTE or not\") parser.add_argument(\"--SMOGN\", choices=[0, 1],", "parser to decide the metric function parser = argparse.ArgumentParser() parser.add_argument(\"--metric\",", "xgboost as xgb from hyperopt import fmin, tpe, hp, STATUS_OK,", "'prediction': json.dumps(prediction)} # Dump the error dataframes into csv files.", "num_boost_round=200, seed=seed, metrics='rmse', maximize=False, shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK}", "hyper-parameter tuning space = {'max_depth': hp.quniform(\"max_depth\", 3, 10, 1), 'learning_rate':", "as xgb from hyperopt import fmin, tpe, hp, STATUS_OK, Trials", "print(\"Start Training The Models\") # Create two dataframes to store", "the xgboost model with cross-validation for each unique room in", "from numpy.random import RandomState from sklearn.metrics import r2_score, mean_squared_error from", "the training and cross-validation for each room. for room in", "y) # concat the result from SMOTE and split the", "1001: continue # We extract the data of particular room", "'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'}", "= log_folder_name + \"_SW\" if args.SampleWeight else log_folder_name previous_parameter_folder =", "smote_split = model_smote.fit_resample(X, y) # concat the result from SMOTE", "assert log_folder_name != previous_parameter_folder, \"Previous folder name exists\" if not", "1, 100], type=int, required=False, default=0, help=\"Whether to print out the", "This is the code to train the xgboost model with", "# Run the SMOTE algorithm and retrieve the result. model_smote", "label=y) # Cross_validation with hyper-parameter tuning space = {'max_depth': hp.quniform(\"max_depth\",", "models we trained for future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))", "Trials from imblearn.over_sampling import SMOTE from numpy.random import RandomState from", "data and we delete them manually if room == 309", "xgb_cv_result['room'] = room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1] # Use", "max_evals=400, trials=trials, rstate=RandomState(seed)) # setup our training parameters and a", "hp.quniform(\"max_depth\", 3, 10, 1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\",", "will be marked as 1, otherwise 0. Split into X", "room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1) y", "a model variable as model checkpoint best_param_dict = {'objective': 'reg:squarederror',", "print a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None) assert args.SMOTE", "dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\", root_squard_error def R2(predt:", "room), best_param_dict) # Use the built-in cv function to do", "np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else: trials = Trials() best_hyperparams = fmin(fn=fobjective,", "def fobjective(space): param_dict_tunning = {'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree': space['colsample_bytree'],", "from hyperopt import fmin, tpe, hp, STATUS_OK, Trials from imblearn.over_sampling", "set pandas to display every column and row everytime we", "room in tqdm(room_list): seed = 2030 + room # Four", "in tqdm(room_list): seed = 2030 + room # Four rooms", "args.SMOTE: # Label all the AC data by 0.75, all", "R2} print(\"Start Training The Models\") # Create two dataframes to", "hp.quniform(\"reg_alpha\", 0, 100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2), 'subsample': hp.uniform(\"subsample\",", "training parameters and a model variable as model checkpoint best_param_dict", "room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else:", "AC above 0.75 will be marked as 1, otherwise 0.", "the dataset. # Models are dumped into ./models and results", "r2_value def fobjective(space): param_dict_tunning = {'max_depth': int(space['max_depth']), 'learning_rate': space['learning_rate'], 'colsample_bytree':", "model_smote.fit_resample(X, y) # concat the result from SMOTE and split", "return \"R2\", r2_value def fobjective(space): param_dict_tunning = {'max_depth': int(space['max_depth']), 'learning_rate':", "= DMatrix(X_train, label=y_train) d_test = DMatrix(X_test, label=y_test) watchlist = [(d_test,", "not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our evaluation functions", "data of particular room and run the SMOTE algorithm on", "store the models and future analysis figures. # log_folder_name =", "+ \"_SMOTE\" if args.SMOTE else log_folder_name log_folder_name = log_folder_name +", "y = room_data['AC'] X = room_data.drop(['AC'], axis=1) else: y =", "data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y)", "= xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) #", "otherwise 0. Split into X and y room_data['SMOTE_split'] = (room_data['AC']", "import Tuple import numpy as np import pandas as pd", "= [(d_test, 'eval'), (d_train, 'train')] xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200,", "required=False, default=0, help=\"Whether use the SMOGN or not\") parser.add_argument(\"--SampleWeight\", choices=[0,", "pandas to display every column and row everytime we print", "= room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) else: room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y", "numpy.random import RandomState from sklearn.metrics import r2_score, mean_squared_error from sklearn.model_selection", "else DMatrix(data=X, label=y) # Cross_validation with hyper-parameter tuning space =", "xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)", "== 312 or room == 826 or room == 917", "data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True) # Create some directory", "columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv", "log_folder_name = log_folder_name + \"_SW\" if args.SampleWeight else log_folder_name previous_parameter_folder", "DMatrix, cv # Set up an argument parser to decide", "value, and drop the time data as we don't need", "> 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True) # Create some directory to", "np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label() root_squard_error", "params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room']", "d_train = DMatrix(X_train, label=y_train) d_test = DMatrix(X_test, label=y_test) watchlist =", "\"Test_R2_HYPEROPT\" log_folder_name = log_folder_name + \"_SMOTE\" if args.SMOTE else log_folder_name", "the sample weight\") args = parser.parse_args() # Ignore all the", "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed) d_train", "dataframe X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed)", "pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list = data['Location'].unique() # ranging through all", "do the training and cross-validation for each room. for room", "3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20, 1),", "X = room_data.drop(['SMOTE_split'], axis=1) y = room_data['SMOTE_split'] # Run the", "sklearn.utils import compute_sample_weight from tqdm import tqdm from xgboost import", "args.SMOGN else log_folder_name log_folder_name = log_folder_name + \"_SW\" if args.SampleWeight", "[(d_test, 'eval'), (d_train, 'train')] xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist,", "1] # Use one training_testing for ploting, and save both", "test_size=0.2, random_state=seed) d_train = DMatrix(X_train, label=y_train) d_test = DMatrix(X_test, label=y_test)", "= xgb_cv_result.loc[len(xgb_cv_result) - 1] # Use one training_testing for ploting,", "args.SampleWeight else log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\" assert log_folder_name != previous_parameter_folder,", "training_testing for ploting, and save both ground truth and prediction", "Label all the AC data by 0.75, all AC above", "{'max_depth': hp.quniform(\"max_depth\", 3, 10, 1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree':", "room_data['SMOTE_split'] # Run the SMOTE algorithm and retrieve the result.", "seed=seed, metrics='rmse', maximize=False, shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict", "for room in tqdm(room_list): seed = 2030 + room #", "# Four rooms have low quality data and we delete", "0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict", "argument parser to decide the metric function parser = argparse.ArgumentParser()", "< 500: room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int') X = room_data.drop(['SMOTE_split'],", "parameters and a model variable as model checkpoint best_param_dict =", "best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) # Use the built-in", "prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real), 'prediction': json.dumps(prediction)} # Dump", "type=int, required=False, default=1, help=\"Whether use the SMOTE or not\") parser.add_argument(\"--SMOGN\",", "import SMOTE from numpy.random import RandomState from sklearn.metrics import r2_score,", "the built-in cv function to do the cross validation, still", "math.sqrt(mean_squared_error(truth_value, predt)) return \"RMSE\", root_squard_error def R2(predt: np.ndarray, dtrain: DMatrix)", "room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif args.SMOGN: if len(room_data) < 500: room_data['SMOTE_split']", "DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value,", "do the cross validation, still with ten folds, this will", "= room_data.drop(['SMOTE_split'], axis=1) y = room_data['SMOTE_split'] # Run the SMOTE", "ranging through all the rooms and do the training and", "# concat the result from SMOTE and split the result", "# Ignore all the warnings and set pandas to display", "figures. # log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name", "room and run the SMOTE algorithm on it. room_data =", "dtrain.get_label() r2_value = r2_score(truth_value, predt) return \"R2\", r2_value def fobjective(space):", "with ten folds, this will return us the results. xgb_cv_result", "import train_test_split from sklearn.utils import compute_sample_weight from tqdm import tqdm", "each unique room in the dataset. # Models are dumped", "axis=1).reset_index(drop=True) # Create some directory to store the models and", "default='R2', help=\"The evaluation metric you want to use to train", "= room_data['SMOTE_split'] # Run the SMOTE algorithm and retrieve the", "model with cross-validation for each unique room in the dataset.", "as pd import xgboost as xgb from hyperopt import fmin,", "if args.SampleWeight else log_folder_name previous_parameter_folder = \"Test_R2_HYPEROPT\" assert log_folder_name !=", "and a model variable as model checkpoint best_param_dict = {'objective':", "import warnings from typing import Tuple import numpy as np", "unique room in the dataset. # Models are dumped into", "826 or room == 917 or room == 1001: continue", "best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'],", "best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)", "datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")) log_folder_name = \"Test_R2_HYPEROPT\" log_folder_name = log_folder_name + \"_SMOTE\" if", "another full data matrix for the built-in cross validation function", "the SMOTE or not\") parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int, required=False, default=0,", "directory to store the models and future analysis figures. #", "still with ten folds, this will return us the results.", "= train_test_split(X, y, test_size=0.2, random_state=seed) d_train = DMatrix(X_train, label=y_train) d_test", "train_test_split(X, y, test_size=0.2, random_state=seed) d_train = DMatrix(X_train, label=y_train) d_test =", "random_state=seed) d_train = DMatrix(X_train, label=y_train) d_test = DMatrix(X_test, label=y_test) watchlist", "or room == 826 or room == 917 or room", "SMOTE and split the result into X and y for", "X = X.to_numpy() # Build another full data matrix for", "shuffle=True, feval=eval_dict[args.metric], maximize=True) xgb_cv_result['room'] = room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) -", "10, 1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1),", "space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True,", "required=False, default=0, help=\"Whether to print out the training progress\") parser.add_argument(\"--SMOTE\",", "= np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else: trials =", "choices=[0, 1], type=int, required=False, default=0, help=\"Whether use the SMOGN or", "'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed,", "data as we don't need them data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0)", "\"status\": STATUS_OK} eval_dict = {'RMSE': RMSE, 'R2': R2} print(\"Start Training", "room == 309 or room == 312 or room ==", "compute_sample_weight(class_weight=\"balanced\", y=class_sample) X = X.to_numpy() # Build another full data", "+ room # Four rooms have low quality data and", "choices=['R2', 'RMSE'], type=str, required=False, default='R2', help=\"The evaluation metric you want", "the cross validation, still with ten folds, this will return", "and we delete them manually if room == 309 or", "args.SMOGN, \"Can't use SMOTE and SMOGN at the same time!\"", "with cross-validation for each unique room in the dataset. #", "training. room_data_smote = pd.concat([room_data_smote, smote_split], axis=1) y = room_data_smote['AC'] X", "positive AC electricity consumption value, and drop the time data", "metric you want to use to train the XGBoost model\")", "time data as we don't need them data = pd.read_csv(\"summer_data_compiled.csv\",", "metric function parser = argparse.ArgumentParser() parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str, required=False,", "'test-rmse-mean', 'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list = data['Location'].unique()", "index=False) # Develop a model using the whole orignial dataset,", "X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif args.SMOGN: if len(room_data) <", "to use to train the XGBoost model\") parser.add_argument(\"--log\", choices=[0, 1,", "# Load the data with a positive AC electricity consumption", "= pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'],", "all the rooms and do the training and cross-validation for", "real = np.array(y_test).tolist() prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real), 'prediction':", "= pd.DataFrame(room_data['AC'].fillna(method='pad')) X = room_data.drop(['AC'], axis=1).fillna(method='pad') if args.SampleWeight: class_sample =", "evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real =", "orignial dataset, and save the model xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix,", "evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) # Save all the models", "log_folder_name != previous_parameter_folder, \"Previous folder name exists\" if not os.path.exists('./{}/'.format(log_folder_name)):", "or room == 917 or room == 1001: continue #", "manually if room == 309 or room == 312 or", "xgb_cv_result.loc[len(xgb_cv_result) - 1] # Use one training_testing for ploting, and", "files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False) prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False) # Develop a model using", "(xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict = {'RMSE': RMSE, 'R2': R2} print(\"Start", "else log_folder_name log_folder_name = log_folder_name + \"_SW\" if args.SampleWeight else", "room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y = room_data['AC'] X = room_data.drop(['AC'],", "> 0.75).astype('int') X = room_data.drop(['SMOTE_split'], axis=1) y = room_data['SMOTE_split'] #", "root_squard_error def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value", "def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value =", "\"_SMOTE\" if args.SMOTE else log_folder_name log_folder_name = log_folder_name + \"_SMOGN\"", "class_sample = pd.cut(y, bins=15) weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample) X =", "data = pd.read_csv(\"summer_data_compiled.csv\", index_col=0) data = data[data.AC > 0].drop(['Time', 'Date',", "y = room_data['SMOTE_split'] # Run the SMOTE algorithm and retrieve", "1], type=int, required=False, default=0, help=\"Whether use the SMOGN or not\")", "'test-{}-std'.format(args.metric), 'test-rmse-mean', 'test-rmse-std']) prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction']) room_list =", "= {'max_depth': hp.quniform(\"max_depth\", 3, 10, 1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3),", "matrix for the built-in cross validation function to work. data_matrix", "if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)): best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room),", "Load the data with a positive AC electricity consumption value,", "os import pickle import warnings from typing import Tuple import", "code to train the xgboost model with cross-validation for each", "type=int, required=False, default=0, help=\"Whether use the SMOGN or not\") parser.add_argument(\"--SampleWeight\",", "xgb_model=None, feval=eval_dict[args.metric], maximize=True) # Save all the models we trained", "room_data_smote['AC'] X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1) elif args.SMOGN: if len(room_data)", "= xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse', maximize=False,", "a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None) assert args.SMOTE !=", "'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss': best_hyperparams['min_split_loss']} np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room),", "rooms have low quality data and we delete them manually", "hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)} if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)):", "= SMOTE(random_state=621, k_neighbors=3) room_data_smote, smote_split = model_smote.fit_resample(X, y) # concat", "# Define our evaluation functions def RMSE(predt: np.ndarray, dtrain: DMatrix)", "= pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean',", "1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2),", "'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'],", "sklearn.model_selection import train_test_split from sklearn.utils import compute_sample_weight from tqdm import", "0. Split into X and y room_data['SMOTE_split'] = (room_data['AC'] >", "= \"Test_R2_HYPEROPT\" log_folder_name = log_folder_name + \"_SMOTE\" if args.SMOTE else", "'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100, 1), 'reg_lambda': hp.uniform(\"reg_lambda\", 0, 2), 'subsample':", "room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else: trials = Trials() best_hyperparams", "cross validation function to work. data_matrix = DMatrix(data=X, label=y, weight=weight)", "# Use one training_testing for ploting, and save both ground", "our training parameters and a model variable as model checkpoint", "algorithm on it. room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True) if", "from SMOTE and split the result into X and y", "the training and after the training. error_csv = pd.DataFrame( columns=['room',", "parser.add_argument(\"--log\", choices=[0, 1, 100], type=int, required=False, default=0, help=\"Whether to print", "r2_score, mean_squared_error from sklearn.model_selection import train_test_split from sklearn.utils import compute_sample_weight", "1), 'learning_rate': hp.uniform(\"learning_rate\", 0.1, 3), 'colsample_bytree': hp.uniform(\"colsample_bytree\", 0.5, 1), 'min_child_weight':", "metrics='rmse', maximize=False, shuffle=True) return {\"loss\": (xgb_cv_result[\"test-rmse-mean\"]).tail(1).iloc[0], \"status\": STATUS_OK} eval_dict =", "label=y_test) watchlist = [(d_test, 'eval'), (d_train, 'train')] xgb_model_train_test = xgb.train(params=best_param_dict,", "column and row everytime we print a dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns',", "to display every column and row everytime we print a", "future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))", "0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True) # Create some directory to store", "exists\" if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our", "d_test = DMatrix(X_test, label=y_test) watchlist = [(d_test, 'eval'), (d_train, 'train')]", "'RMSE'], type=str, required=False, default='R2', help=\"The evaluation metric you want to", "We extract the data of particular room and run the", "name exists\" if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define", "as 1, otherwise 0. Split into X and y room_data['SMOTE_split']", "room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1] # Use one training_testing", "in the dataset. # Models are dumped into ./models and", "current work directory. import argparse import json import math import", "= room error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1] # Use one", "type=int, required=False, default=0, help=\"Whether to print out the training progress\")", "log_folder_name = log_folder_name + \"_SMOTE\" if args.SMOTE else log_folder_name log_folder_name", "X and y for training. room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)", "setup our training parameters and a model variable as model", "are dumped into ./models and results are dumped into two", "built-in cv function to do the cross validation, still with", "\"Can't use SMOTE and SMOGN at the same time!\" #", "num_boost_round=200, evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True) prediction = np.array(xgb_model_train_test.predict(d_test)).tolist() real", "params=param_dict_tunning, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse', maximize=False, shuffle=True) return", "best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'], 'colsample_bytree': best_hyperparams['colsample_bytree'], 'learning_rate': best_hyperparams['learning_rate'], 'subsample': best_hyperparams['subsample'], 'min_split_loss':", "tpe, hp, STATUS_OK, Trials from imblearn.over_sampling import SMOTE from numpy.random", "'SMOTE_split'], axis=1) elif args.SMOGN: if len(room_data) < 500: room_data['SMOTE_split'] =", "best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room), allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else: trials", "if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name)) os.mkdir('./{}/trntst_models/'.format(log_folder_name)) # Define our evaluation", "help=\"Whether to print out the training progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1],", "xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, shuffle=True,", "whole orignial dataset, and save the model xgb_model_full = xgb.train(params=best_param_dict,", "0, 2), 'subsample': hp.uniform(\"subsample\", 0.5, 1), 'min_split_loss': hp.uniform(\"min_split_loss\", 0, 9)}", "truth_value = dtrain.get_label() r2_value = r2_score(truth_value, predt) return \"R2\", r2_value", "dataframe warnings.filterwarnings('ignore') pd.set_option('display.max_columns', None) pd.set_option('display.max_rows', None) assert args.SMOTE != args.SMOGN,", "previous_parameter_folder, \"Previous folder name exists\" if not os.path.exists('./{}/'.format(log_folder_name)): os.mkdir('./{}'.format(log_folder_name)) os.mkdir('./{}/models/'.format(log_folder_name))", "AC data by 0.75, all AC above 0.75 will be", "1), 'min_child_weight': hp.quniform(\"min_child_weight\", 1, 20, 1), 'reg_alpha': hp.quniform(\"reg_alpha\", 0, 100,", "dataset. # Models are dumped into ./models and results are", "log_folder_name = log_folder_name + \"_SMOGN\" if args.SMOGN else log_folder_name log_folder_name", "int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective':", "the dataframe X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2,", "at the same time!\" # Load the data with a", "{'room': room, 'observation': json.dumps(real), 'prediction': json.dumps(prediction)} # Dump the error", "and cross-validation for each room. for room in tqdm(room_list): seed", "the current work directory. import argparse import json import math", "xgboost import DMatrix, cv # Set up an argument parser", "we trained for future use pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb')) pickle.dump(xgb_model_full,", "in the current work directory. import argparse import json import", "from typing import Tuple import numpy as np import pandas", "to store the models and future analysis figures. # log_folder_name", "directory. import argparse import json import math import os import", "a positive AC electricity consumption value, and drop the time", "cv(dtrain=data_matrix, params=best_param_dict, nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True)", "concat the result from SMOTE and split the result into", "progress\") parser.add_argument(\"--SMOTE\", choices=[0, 1], type=int, required=False, default=1, help=\"Whether use the", "Tuple[str, float]: truth_value = dtrain.get_label() root_squard_error = math.sqrt(mean_squared_error(truth_value, predt)) return", "'min_child_weight': int(space['min_child_weight']), 'reg_alpha': int(space['reg_alpha']), 'reg_lambda': space['reg_lambda'], 'subsample': space['subsample'], 'min_split_loss': space['min_split_loss'],", "nfold=5, early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=seed, metrics='rmse', maximize=False, shuffle=True) return {\"loss\":", "using the whole orignial dataset, and save the model xgb_model_full", "typing import Tuple import numpy as np import pandas as", "models and future analysis figures. # log_folder_name = \"Test_{}_{}\".format(args.metric, datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\"))", "{'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']), 'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'], 'min_child_weight': best_hyperparams['min_child_weight'],", "data by 0.75, all AC above 0.75 will be marked", "help=\"Whether use the sample weight\") args = parser.parse_args() # Ignore", "error_csv = pd.DataFrame( columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean', 'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric),", "default=0, help=\"Whether to print out the training progress\") parser.add_argument(\"--SMOTE\", choices=[0,", "parser.add_argument(\"--metric\", choices=['R2', 'RMSE'], type=str, required=False, default='R2', help=\"The evaluation metric you", "= pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0) y = room_data['AC'] X = room_data.drop(['AC'], axis=1)", "json.dumps(prediction)} # Dump the error dataframes into csv files. error_csv.to_csv('./{}/error.csv'.format(log_folder_name),", "type=str, required=False, default='R2', help=\"The evaluation metric you want to use", "the result from SMOTE and split the result into X", "allow_pickle=True).item() np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict) else: trials = Trials() best_hyperparams =", "./models and results are dumped into two csv files in", "for ploting, and save both ground truth and prediction value", "and results are dumped into two csv files in the", "np.ndarray, dtrain: DMatrix) -> Tuple[str, float]: truth_value = dtrain.get_label() r2_value", "space['subsample'], 'min_split_loss': space['min_split_loss'], 'objective': 'reg:squarederror'} xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5,", "not\") parser.add_argument(\"--SMOGN\", choices=[0, 1], type=int, required=False, default=0, help=\"Whether use the", "room_data['AC'] X = room_data.drop(['AC'], axis=1) else: y = pd.DataFrame(room_data['AC'].fillna(method='pad')) X", "weight = compute_sample_weight(class_weight=\"balanced\", y=class_sample) X = X.to_numpy() # Build another" ]
[ "packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig File Locator and Interpreter for Python',", "description='EditorConfig File Locator and Interpreter for Python', long_description=open('README.rst').read(), entry_points =", "'Programming Language :: Python', 'Programming Language :: Python :: 2.7',", "= { 'console_scripts': [ 'editorconfig = editorconfig.__main__:main', ] }, classifiers=[", ":: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language", "Language :: Python :: 3.9', 'Programming Language :: Python ::", "Independent', 'Programming Language :: Python', 'Programming Language :: Python ::", "Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming", ":: Python :: 3.6', 'Programming Language :: Python :: 3.7',", "3', 'Programming Language :: Python :: 3.5', 'Programming Language ::", "Read the version g = {} with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\")", "license='python', description='EditorConfig File Locator and Interpreter for Python', long_description=open('README.rst').read(), entry_points", "Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig File Locator and Interpreter for", "exec(fp.read(), g) v = g['VERSION'] version = \".\".join(str(x) for x", ":: OSI Approved :: Python Software Foundation License', 'Operating System", "for Python', long_description=open('README.rst').read(), entry_points = { 'console_scripts': [ 'editorconfig =", "Foundation License', 'Operating System :: OS Independent', 'Programming Language ::", "\"rt\") as fp: exec(fp.read(), g) v = g['VERSION'] version =", "= \".\".join(str(x) for x in v[:3]) if v[3] != \"final\":", "setup # Read the version g = {} with open(os.path.join(\"editorconfig\",", "Locator and Interpreter for Python', long_description=open('README.rst').read(), entry_points = { 'console_scripts':", ":: Python', 'Programming Language :: Python :: 2.7', 'Programming Language", "3.5', 'Programming Language :: Python :: 3.6', 'Programming Language ::", "'editorconfig = editorconfig.__main__:main', ] }, classifiers=[ 'License :: OSI Approved", "Language :: Python :: 3', 'Programming Language :: Python ::", "os from setuptools import setup # Read the version g", "url='http://editorconfig.org/', license='python', description='EditorConfig File Locator and Interpreter for Python', long_description=open('README.rst').read(),", ":: Python :: 2.7', 'Programming Language :: Python :: 3',", ":: Python :: 3.5', 'Programming Language :: Python :: 3.6',", "g['VERSION'] version = \".\".join(str(x) for x in v[:3]) if v[3]", "3.9', 'Programming Language :: Python :: Implementation :: PyPy', ],", "version = \".\".join(str(x) for x in v[:3]) if v[3] !=", "Interpreter for Python', long_description=open('README.rst').read(), entry_points = { 'console_scripts': [ 'editorconfig", "'Programming Language :: Python :: 3.8', 'Programming Language :: Python", "Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming", "fp: exec(fp.read(), g) v = g['VERSION'] version = \".\".join(str(x) for", "version=version, author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig File Locator and", "Language :: Python :: 3.8', 'Programming Language :: Python ::", "Python :: 3.9', 'Programming Language :: Python :: Implementation ::", "if v[3] != \"final\": version += \"-\" + v[3] setup(", "in v[:3]) if v[3] != \"final\": version += \"-\" +", "!= \"final\": version += \"-\" + v[3] setup( name='EditorConfig', version=version,", ":: OS Independent', 'Programming Language :: Python', 'Programming Language ::", "+ v[3] setup( name='EditorConfig', version=version, author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python',", "\"-\" + v[3] setup( name='EditorConfig', version=version, author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/',", "Language :: Python :: 3.6', 'Programming Language :: Python ::", "= {} with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as fp: exec(fp.read(), g)", ":: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language", ":: 2.7', 'Programming Language :: Python :: 3', 'Programming Language", "author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig File Locator and Interpreter", "\".\".join(str(x) for x in v[:3]) if v[3] != \"final\": version", "# Read the version g = {} with open(os.path.join(\"editorconfig\", \"version.py\"),", "Language :: Python :: 2.7', 'Programming Language :: Python ::", "Python', 'Programming Language :: Python :: 2.7', 'Programming Language ::", "'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming", "import os from setuptools import setup # Read the version", ":: Python :: 3.7', 'Programming Language :: Python :: 3.8',", "2.7', 'Programming Language :: Python :: 3', 'Programming Language ::", "Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming", "open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as fp: exec(fp.read(), g) v = g['VERSION']", "version += \"-\" + v[3] setup( name='EditorConfig', version=version, author='EditorConfig Team',", ":: 3.9', 'Programming Language :: Python :: Implementation :: PyPy',", "Language :: Python :: 3.5', 'Programming Language :: Python ::", "g = {} with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as fp: exec(fp.read(),", "'Programming Language :: Python :: 3.6', 'Programming Language :: Python", "] }, classifiers=[ 'License :: OSI Approved :: Python Software", ":: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language", ":: 3', 'Programming Language :: Python :: 3.5', 'Programming Language", ":: Python :: 3.9', 'Programming Language :: Python :: Implementation", "'Programming Language :: Python :: 2.7', 'Programming Language :: Python", "License', 'Operating System :: OS Independent', 'Programming Language :: Python',", "'Programming Language :: Python :: 3', 'Programming Language :: Python", "3.8', 'Programming Language :: Python :: 3.9', 'Programming Language ::", "File Locator and Interpreter for Python', long_description=open('README.rst').read(), entry_points = {", "setuptools import setup # Read the version g = {}", "for x in v[:3]) if v[3] != \"final\": version +=", "setup( name='EditorConfig', version=version, author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig File", "{} with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as fp: exec(fp.read(), g) v", "Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming", "3.7', 'Programming Language :: Python :: 3.8', 'Programming Language ::", "OS Independent', 'Programming Language :: Python', 'Programming Language :: Python", "Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming", "Approved :: Python Software Foundation License', 'Operating System :: OS", "v[3] setup( name='EditorConfig', version=version, author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig", ":: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language", "3.6', 'Programming Language :: Python :: 3.7', 'Programming Language ::", "}, classifiers=[ 'License :: OSI Approved :: Python Software Foundation", "Software Foundation License', 'Operating System :: OS Independent', 'Programming Language", "v[3] != \"final\": version += \"-\" + v[3] setup( name='EditorConfig',", "from setuptools import setup # Read the version g =", "and Interpreter for Python', long_description=open('README.rst').read(), entry_points = { 'console_scripts': [", "as fp: exec(fp.read(), g) v = g['VERSION'] version = \".\".join(str(x)", "+= \"-\" + v[3] setup( name='EditorConfig', version=version, author='EditorConfig Team', packages=['editorconfig'],", "'console_scripts': [ 'editorconfig = editorconfig.__main__:main', ] }, classifiers=[ 'License ::", "classifiers=[ 'License :: OSI Approved :: Python Software Foundation License',", "'Programming Language :: Python :: Implementation :: PyPy', ], )", "'Programming Language :: Python :: 3.7', 'Programming Language :: Python", "version g = {} with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as fp:", "Language :: Python :: 3.7', 'Programming Language :: Python ::", "with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as fp: exec(fp.read(), g) v =", ":: Python :: 3.8', 'Programming Language :: Python :: 3.9',", "'Programming Language :: Python :: 3.5', 'Programming Language :: Python", "g) v = g['VERSION'] version = \".\".join(str(x) for x in", "[ 'editorconfig = editorconfig.__main__:main', ] }, classifiers=[ 'License :: OSI", "the version g = {} with open(os.path.join(\"editorconfig\", \"version.py\"), \"rt\") as", "name='EditorConfig', version=version, author='EditorConfig Team', packages=['editorconfig'], url='http://editorconfig.org/', license='python', description='EditorConfig File Locator", "import setup # Read the version g = {} with", "v[:3]) if v[3] != \"final\": version += \"-\" + v[3]", "Python Software Foundation License', 'Operating System :: OS Independent', 'Programming", "entry_points = { 'console_scripts': [ 'editorconfig = editorconfig.__main__:main', ] },", "'Programming Language :: Python :: 3.9', 'Programming Language :: Python", "x in v[:3]) if v[3] != \"final\": version += \"-\"", "= editorconfig.__main__:main', ] }, classifiers=[ 'License :: OSI Approved ::", "Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming", "v = g['VERSION'] version = \".\".join(str(x) for x in v[:3])", "OSI Approved :: Python Software Foundation License', 'Operating System ::", "Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming", "= g['VERSION'] version = \".\".join(str(x) for x in v[:3]) if", "{ 'console_scripts': [ 'editorconfig = editorconfig.__main__:main', ] }, classifiers=[ 'License", "'License :: OSI Approved :: Python Software Foundation License', 'Operating", "long_description=open('README.rst').read(), entry_points = { 'console_scripts': [ 'editorconfig = editorconfig.__main__:main', ]", ":: Python :: 3', 'Programming Language :: Python :: 3.5',", ":: Python Software Foundation License', 'Operating System :: OS Independent',", "Python', long_description=open('README.rst').read(), entry_points = { 'console_scripts': [ 'editorconfig = editorconfig.__main__:main',", "System :: OS Independent', 'Programming Language :: Python', 'Programming Language", "\"final\": version += \"-\" + v[3] setup( name='EditorConfig', version=version, author='EditorConfig", "\"version.py\"), \"rt\") as fp: exec(fp.read(), g) v = g['VERSION'] version", "editorconfig.__main__:main', ] }, classifiers=[ 'License :: OSI Approved :: Python" ]
[ "None, \"pidfile\": \"vaping.pid\", \"plugin_path\": [],}, }, \"config_dir\": \"~/.vaping\", \"codec\": \"yaml\",", "500ms 90 = 1m30s **Arguments** - val (`str`) \"\"\" re_intv", "<filename>vaping/config.py<gh_stars>0 import re import munge def parse_interval(val): \"\"\" converts a", "== \"h\": total += count * 3600 elif unit ==", "{\"home_dir\": None, \"pidfile\": \"vaping.pid\", \"plugin_path\": [],}, }, \"config_dir\": \"~/.vaping\", \"codec\":", "= float(match[0]) if unit == \"s\": total += count elif", "from interval string '%s'\" % val) return total class Config(munge.Config):", "\"vaping\": {\"home_dir\": None, \"pidfile\": \"vaping.pid\", \"plugin_path\": [],}, }, \"config_dir\": \"~/.vaping\",", "elif unit == \"ms\": total += count / 1000 elif", "total += count * 3600 elif unit == \"d\": total", "\"config\": { \"vaping\": {\"home_dir\": None, \"pidfile\": \"vaping.pid\", \"plugin_path\": [],}, },", "total += count * 86400 else: raise ValueError(\"unknown unit from", "\"s\": total += count elif unit == \"m\": total +=", "**Arguments** - val (`str`) \"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val =", "unit == \"h\": total += count * 3600 elif unit", "== \"d\": total += count * 86400 else: raise ValueError(\"unknown", "= match[1] count = float(match[0]) if unit == \"s\": total", "total += count elif unit == \"m\": total += count", "= re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip() total = 0.0 for match", "unit == \"s\": total += count elif unit == \"m\":", "+= count elif unit == \"m\": total += count *", "* 86400 else: raise ValueError(\"unknown unit from interval string '%s'\"", "float(match[0]) if unit == \"s\": total += count elif unit", "== \"m\": total += count * 60 elif unit ==", "seconds .5 = 500ms 90 = 1m30s **Arguments** - val", "= 0.0 for match in re_intv.findall(val): unit = match[1] count", "= val.strip() total = 0.0 for match in re_intv.findall(val): unit", "* 60 elif unit == \"ms\": total += count /", "\"\"\" converts a string to float of seconds .5 =", "config manager \"\"\" defaults = { \"config\": { \"vaping\": {\"home_dir\":", "= 1m30s **Arguments** - val (`str`) \"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\")", "/ 1000 elif unit == \"h\": total += count *", "val = val.strip() total = 0.0 for match in re_intv.findall(val):", "total class Config(munge.Config): \"\"\" Vaping config manager \"\"\" defaults =", "converts a string to float of seconds .5 = 500ms", "\"pidfile\": \"vaping.pid\", \"plugin_path\": [],}, }, \"config_dir\": \"~/.vaping\", \"codec\": \"yaml\", }", "\"\"\" defaults = { \"config\": { \"vaping\": {\"home_dir\": None, \"pidfile\":", "string '%s'\" % val) return total class Config(munge.Config): \"\"\" Vaping", "re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip() total = 0.0 for match in", "+= count * 86400 else: raise ValueError(\"unknown unit from interval", "count = float(match[0]) if unit == \"s\": total += count", "of seconds .5 = 500ms 90 = 1m30s **Arguments** -", "import re import munge def parse_interval(val): \"\"\" converts a string", "== \"ms\": total += count / 1000 elif unit ==", "Config(munge.Config): \"\"\" Vaping config manager \"\"\" defaults = { \"config\":", "string to float of seconds .5 = 500ms 90 =", "class Config(munge.Config): \"\"\" Vaping config manager \"\"\" defaults = {", "= 500ms 90 = 1m30s **Arguments** - val (`str`) \"\"\"", "\"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip() total = 0.0", "{ \"config\": { \"vaping\": {\"home_dir\": None, \"pidfile\": \"vaping.pid\", \"plugin_path\": [],},", "val) return total class Config(munge.Config): \"\"\" Vaping config manager \"\"\"", "re_intv.findall(val): unit = match[1] count = float(match[0]) if unit ==", "val.strip() total = 0.0 for match in re_intv.findall(val): unit =", "else: raise ValueError(\"unknown unit from interval string '%s'\" % val)", "total += count / 1000 elif unit == \"h\": total", "raise ValueError(\"unknown unit from interval string '%s'\" % val) return", "+= count * 60 elif unit == \"ms\": total +=", "elif unit == \"h\": total += count * 3600 elif", "3600 elif unit == \"d\": total += count * 86400", "unit from interval string '%s'\" % val) return total class", "interval string '%s'\" % val) return total class Config(munge.Config): \"\"\"", "float of seconds .5 = 500ms 90 = 1m30s **Arguments**", "= { \"config\": { \"vaping\": {\"home_dir\": None, \"pidfile\": \"vaping.pid\", \"plugin_path\":", "1m30s **Arguments** - val (`str`) \"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val", "total += count * 60 elif unit == \"ms\": total", "manager \"\"\" defaults = { \"config\": { \"vaping\": {\"home_dir\": None,", "match in re_intv.findall(val): unit = match[1] count = float(match[0]) if", "to float of seconds .5 = 500ms 90 = 1m30s", "count * 86400 else: raise ValueError(\"unknown unit from interval string", "re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip() total = 0.0 for", "count elif unit == \"m\": total += count * 60", "\"ms\": total += count / 1000 elif unit == \"h\":", "60 elif unit == \"ms\": total += count / 1000", "\"h\": total += count * 3600 elif unit == \"d\":", "\"m\": total += count * 60 elif unit == \"ms\":", "* 3600 elif unit == \"d\": total += count *", "elif unit == \"m\": total += count * 60 elif", "'%s'\" % val) return total class Config(munge.Config): \"\"\" Vaping config", "\"\"\" Vaping config manager \"\"\" defaults = { \"config\": {", "total = 0.0 for match in re_intv.findall(val): unit = match[1]", "val (`str`) \"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip() total", "count / 1000 elif unit == \"h\": total += count", "{ \"vaping\": {\"home_dir\": None, \"pidfile\": \"vaping.pid\", \"plugin_path\": [],}, }, \"config_dir\":", "in re_intv.findall(val): unit = match[1] count = float(match[0]) if unit", "+= count / 1000 elif unit == \"h\": total +=", "count * 60 elif unit == \"ms\": total += count", "\"d\": total += count * 86400 else: raise ValueError(\"unknown unit", "== \"s\": total += count elif unit == \"m\": total", "for match in re_intv.findall(val): unit = match[1] count = float(match[0])", "import munge def parse_interval(val): \"\"\" converts a string to float", "90 = 1m30s **Arguments** - val (`str`) \"\"\" re_intv =", "return total class Config(munge.Config): \"\"\" Vaping config manager \"\"\" defaults", "unit == \"m\": total += count * 60 elif unit", "86400 else: raise ValueError(\"unknown unit from interval string '%s'\" %", "unit == \"ms\": total += count / 1000 elif unit", "defaults = { \"config\": { \"vaping\": {\"home_dir\": None, \"pidfile\": \"vaping.pid\",", "- val (`str`) \"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip()", "re import munge def parse_interval(val): \"\"\" converts a string to", "+= count * 3600 elif unit == \"d\": total +=", "ValueError(\"unknown unit from interval string '%s'\" % val) return total", ".5 = 500ms 90 = 1m30s **Arguments** - val (`str`)", "a string to float of seconds .5 = 500ms 90", "munge def parse_interval(val): \"\"\" converts a string to float of", "% val) return total class Config(munge.Config): \"\"\" Vaping config manager", "parse_interval(val): \"\"\" converts a string to float of seconds .5", "Vaping config manager \"\"\" defaults = { \"config\": { \"vaping\":", "if unit == \"s\": total += count elif unit ==", "1000 elif unit == \"h\": total += count * 3600", "def parse_interval(val): \"\"\" converts a string to float of seconds", "elif unit == \"d\": total += count * 86400 else:", "unit == \"d\": total += count * 86400 else: raise", "0.0 for match in re_intv.findall(val): unit = match[1] count =", "unit = match[1] count = float(match[0]) if unit == \"s\":", "count * 3600 elif unit == \"d\": total += count", "(`str`) \"\"\" re_intv = re.compile(r\"([\\d\\.]+)([a-zA-Z]+)\") val = val.strip() total =", "match[1] count = float(match[0]) if unit == \"s\": total +=" ]
[ "for sktime annotators.\"\"\" import pandas as pd import pytest from", "coding: utf-8 -*- \"\"\"Tests for sktime annotators.\"\"\" import pandas as", "pd import pytest from sktime.registry import all_estimators from sktime.utils._testing.estimator_checks import", "from sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS)", "import _make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator):", "Estimator.create_test_instance() args = _make_args(estimator, \"fit\") estimator.fit(*args) args = _make_args(estimator, \"predict\")", "all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test annotator output type.\"\"\"", "\"fit\") estimator.fit(*args) args = _make_args(estimator, \"predict\") y_pred = estimator.predict(*args) assert", "all_estimators from sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\",", "sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def", "args = _make_args(estimator, \"predict\") y_pred = estimator.predict(*args) assert isinstance(y_pred, pd.Series)", "import pytest from sktime.registry import all_estimators from sktime.utils._testing.estimator_checks import _make_args", "sktime annotators.\"\"\" import pandas as pd import pytest from sktime.registry", "\"\"\"Tests for sktime annotators.\"\"\" import pandas as pd import pytest", "annotators.\"\"\" import pandas as pd import pytest from sktime.registry import", "estimator = Estimator.create_test_instance() args = _make_args(estimator, \"fit\") estimator.fit(*args) args =", "-*- \"\"\"Tests for sktime annotators.\"\"\" import pandas as pd import", "pytest from sktime.registry import all_estimators from sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS", "as pd import pytest from sktime.registry import all_estimators from sktime.utils._testing.estimator_checks", "ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test annotator output type.\"\"\" estimator = Estimator.create_test_instance()", "= _make_args(estimator, \"fit\") estimator.fit(*args) args = _make_args(estimator, \"predict\") y_pred =", "_make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test", "estimator.fit(*args) args = _make_args(estimator, \"predict\") y_pred = estimator.predict(*args) assert isinstance(y_pred,", "# -*- coding: utf-8 -*- \"\"\"Tests for sktime annotators.\"\"\" import", "def test_output_type(Estimator): \"\"\"Test annotator output type.\"\"\" estimator = Estimator.create_test_instance() args", "args = _make_args(estimator, \"fit\") estimator.fit(*args) args = _make_args(estimator, \"predict\") y_pred", "= all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test annotator output", "output type.\"\"\" estimator = Estimator.create_test_instance() args = _make_args(estimator, \"fit\") estimator.fit(*args)", "pandas as pd import pytest from sktime.registry import all_estimators from", "test_output_type(Estimator): \"\"\"Test annotator output type.\"\"\" estimator = Estimator.create_test_instance() args =", "import all_estimators from sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False)", "type.\"\"\" estimator = Estimator.create_test_instance() args = _make_args(estimator, \"fit\") estimator.fit(*args) args", "return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test annotator output type.\"\"\" estimator", "\"\"\"Test annotator output type.\"\"\" estimator = Estimator.create_test_instance() args = _make_args(estimator,", "sktime.registry import all_estimators from sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\",", "_make_args(estimator, \"fit\") estimator.fit(*args) args = _make_args(estimator, \"predict\") y_pred = estimator.predict(*args)", "@pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test annotator output type.\"\"\" estimator =", "ALL_ANNOTATORS = all_estimators(estimator_types=\"series-annotator\", return_names=False) @pytest.mark.parametrize(\"Estimator\", ALL_ANNOTATORS) def test_output_type(Estimator): \"\"\"Test annotator", "utf-8 -*- \"\"\"Tests for sktime annotators.\"\"\" import pandas as pd", "-*- coding: utf-8 -*- \"\"\"Tests for sktime annotators.\"\"\" import pandas", "= Estimator.create_test_instance() args = _make_args(estimator, \"fit\") estimator.fit(*args) args = _make_args(estimator,", "from sktime.registry import all_estimators from sktime.utils._testing.estimator_checks import _make_args ALL_ANNOTATORS =", "import pandas as pd import pytest from sktime.registry import all_estimators", "annotator output type.\"\"\" estimator = Estimator.create_test_instance() args = _make_args(estimator, \"fit\")" ]
[ "'.h264') sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add \" + str(x) +", "sleep import boto3 import os.path import subprocess s3 = boto3.client('s3')", "(x == 6): x = 1 else: x = x", "s3 = boto3.client('s3') bucket = 'cambucket21' camera = PiCamera() #camera.resolution(1920,1080)", "+ str(x) +\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/'", "= boto3.client('s3') bucket = 'cambucket21' camera = PiCamera() #camera.resolution(1920,1080) x", "PiCamera() #camera.resolution(1920,1080) x = 0 camerafile = x while True:", "picamera import PiCamera from time import sleep import boto3 import", "import os.path import subprocess s3 = boto3.client('s3') bucket = 'cambucket21'", "+ str(x) + \".h264 \" + str(x) +\".mp4\", shell=True) sleep(1)", "x = 0 camerafile = x while True: if (x", "+\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/' + str(x)", "time import sleep import boto3 import os.path import subprocess s3", "0 camerafile = x while True: if (x == 6):", "import subprocess s3 = boto3.client('s3') bucket = 'cambucket21' camera =", "x while True: if (x == 6): x = 1", "= PiCamera() #camera.resolution(1920,1080) x = 0 camerafile = x while", "6): x = 1 else: x = x + 1", "import boto3 import os.path import subprocess s3 = boto3.client('s3') bucket", "True: if (x == 6): x = 1 else: x", "#camera.resolution(1920,1080) x = 0 camerafile = x while True: if", "subprocess.Popen(\"MP4Box -add \" + str(x) + \".h264 \" + str(x)", "camera.start_recording('/home/pi/' + str(x) + '.h264') sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add", "+ \".h264 \" + str(x) +\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/' +", "\".h264 \" + str(x) +\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/' + str(x)", "\" + str(x) +\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/' + str(x) +", "subprocess s3 = boto3.client('s3') bucket = 'cambucket21' camera = PiCamera()", "'cambucket21' camera = PiCamera() #camera.resolution(1920,1080) x = 0 camerafile =", "sleep(1) s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/' + str(x) + '.mp4')", "os.path import subprocess s3 = boto3.client('s3') bucket = 'cambucket21' camera", "= x while True: if (x == 6): x =", "import PiCamera from time import sleep import boto3 import os.path", "str(x) + '.h264') sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add \" +", "camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add \" + str(x) + \".h264 \"", "str(x) + \".h264 \" + str(x) +\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/'", "+ '.h264') sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add \" + str(x)", "-add \" + str(x) + \".h264 \" + str(x) +\".mp4\",", "= 'cambucket21' camera = PiCamera() #camera.resolution(1920,1080) x = 0 camerafile", "if (x == 6): x = 1 else: x =", "camera = PiCamera() #camera.resolution(1920,1080) x = 0 camerafile = x", "+ str(x) + '.h264') sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add \"", "shell=True) sleep(1) s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/' + str(x) +", "= 0 camerafile = x while True: if (x ==", "camerafile = x while True: if (x == 6): x", "= 1 else: x = x + 1 camera.start_preview() camera.start_recording('/home/pi/'", "bucket = 'cambucket21' camera = PiCamera() #camera.resolution(1920,1080) x = 0", "from time import sleep import boto3 import os.path import subprocess", "boto3 import os.path import subprocess s3 = boto3.client('s3') bucket =", "while True: if (x == 6): x = 1 else:", "x = x + 1 camera.start_preview() camera.start_recording('/home/pi/' + str(x) +", "camera.stop_preview() subprocess.Popen(\"MP4Box -add \" + str(x) + \".h264 \" +", "sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box -add \" + str(x) + \".h264", "<gh_stars>1-10 picamera import PiCamera from time import sleep import boto3", "\" + str(x) + \".h264 \" + str(x) +\".mp4\", shell=True)", "+ 1 camera.start_preview() camera.start_recording('/home/pi/' + str(x) + '.h264') sleep(2) camera.stop_recording()", "1 else: x = x + 1 camera.start_preview() camera.start_recording('/home/pi/' +", "x + 1 camera.start_preview() camera.start_recording('/home/pi/' + str(x) + '.h264') sleep(2)", "boto3.client('s3') bucket = 'cambucket21' camera = PiCamera() #camera.resolution(1920,1080) x =", "str(x) +\".mp4\", shell=True) sleep(1) s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/' +", "import sleep import boto3 import os.path import subprocess s3 =", "1 camera.start_preview() camera.start_recording('/home/pi/' + str(x) + '.h264') sleep(2) camera.stop_recording() camera.stop_preview()", "= x + 1 camera.start_preview() camera.start_recording('/home/pi/' + str(x) + '.h264')", "== 6): x = 1 else: x = x +", "else: x = x + 1 camera.start_preview() camera.start_recording('/home/pi/' + str(x)", "camera.start_preview() camera.start_recording('/home/pi/' + str(x) + '.h264') sleep(2) camera.stop_recording() camera.stop_preview() subprocess.Popen(\"MP4Box", "x = 1 else: x = x + 1 camera.start_preview()", "PiCamera from time import sleep import boto3 import os.path import" ]
[ "import datetime def run_example(): moment_in_time = datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday())", "datetime import datetime def run_example(): moment_in_time = datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal())", "run_example(): moment_in_time = datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment =", "def run_example(): moment_in_time = datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment", "moment_in_time = datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000)", "datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp())", "print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp()) print(other_moment.isocalendar()) if __name__", "other_moment = datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp()) print(other_moment.isocalendar()) if __name__ == \"__main__\":", "= datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp()) print(other_moment.isocalendar()) if __name__ == \"__main__\": run_example()", "print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp()) print(other_moment.isocalendar())", "print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp()) print(other_moment.isocalendar()) if", "<reponame>Mikma03/InfoShareacademy_Python_Courses<filename>Part_3_advanced/m04_datetime_and_timedelta/datetime_formats/example_1.py<gh_stars>0 from datetime import datetime def run_example(): moment_in_time = datetime.fromordinal(256)", "datetime def run_example(): moment_in_time = datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday())", "= datetime.fromordinal(256) print(moment_in_time) print(moment_in_time.toordinal()) print(moment_in_time.weekday()) print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000) print(other_moment)", "print(moment_in_time.isoweekday()) other_moment = datetime.fromtimestamp(16_000_000) print(other_moment) print(other_moment.timestamp()) print(other_moment.isocalendar()) if __name__ ==", "from datetime import datetime def run_example(): moment_in_time = datetime.fromordinal(256) print(moment_in_time)" ]
[ "kaldi.segmentation import NnetSAD, SegmentationProcessor from kaldi.nnet3 import NnetSimpleComputationOptions from kaldi.util.table", "feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |\" # Segment with", "= SegmentationProcessor(target_labels=[2]) # Define feature pipeline as a Kaldi rspecifier", "= \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |\" # Segment with SequentialMatrixReader(feats_rspec)", "= NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial =", "\"w\") as s: for key, feats in f: out =", "|\" # Segment with SequentialMatrixReader(feats_rspec) as f, open (\"segments\", \"w\")", "= NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph() decodable_opts =", "rspecifier feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |\" # Segment", "NnetSAD(model, transform, graph, decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2]) # Define feature", "seg.write(key, segments, s) print(\"segments:\", segments, flush=True) print(\"stats:\", stats, flush=True) print(\"global", "transform, graph, decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2]) # Define feature pipeline", "21 decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk = 150", "with SequentialMatrixReader(feats_rspec) as f, open (\"segments\", \"w\") as s: for", "ark:- |\" # Segment with SequentialMatrixReader(feats_rspec) as f, open (\"segments\",", "Segment with SequentialMatrixReader(feats_rspec) as f, open (\"segments\", \"w\") as s:", "= NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context =", "segments, stats = seg.process(out[\"alignment\"]) seg.write(key, segments, s) print(\"segments:\", segments, flush=True)", "NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context = 21", "= 0.3 sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts) seg =", "= sad.segment(feats) segments, stats = seg.process(out[\"alignment\"]) seg.write(key, segments, s) print(\"segments:\",", "import NnetSimpleComputationOptions from kaldi.util.table import SequentialMatrixReader # Construct SAD model", "Define feature pipeline as a Kaldi rspecifier feats_rspec = \"ark:compute-mfcc-feats", "0.3 sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2])", "sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2]) #", "\"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |\" # Segment with SequentialMatrixReader(feats_rspec) as", "= NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post) graph =", "decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale = 0.3 sad = NnetSAD(model, transform,", "key, feats in f: out = sad.segment(feats) segments, stats =", "from __future__ import print_function from kaldi.segmentation import NnetSAD, SegmentationProcessor from", "from kaldi.nnet3 import NnetSimpleComputationOptions from kaldi.util.table import SequentialMatrixReader # Construct", "NnetSimpleComputationOptions from kaldi.util.table import SequentialMatrixReader # Construct SAD model =", "= 21 decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk =", "# Construct SAD model = NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform", "SequentialMatrixReader # Construct SAD model = NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\")", "= seg.process(out[\"alignment\"]) seg.write(key, segments, s) print(\"segments:\", segments, flush=True) print(\"stats:\", stats,", "as a Kaldi rspecifier feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:-", "150 decodable_opts.acoustic_scale = 0.3 sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts)", "open (\"segments\", \"w\") as s: for key, feats in f:", "feats in f: out = sad.segment(feats) segments, stats = seg.process(out[\"alignment\"])", "kaldi.util.table import SequentialMatrixReader # Construct SAD model = NnetSAD.read_model(\"final.raw\") post", "= 0 decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale =", "stats = seg.process(out[\"alignment\"]) seg.write(key, segments, s) print(\"segments:\", segments, flush=True) print(\"stats:\",", "(\"segments\", \"w\") as s: for key, feats in f: out", "= NnetSAD(model, transform, graph, decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2]) # Define", "SAD model = NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post)", "kaldi.nnet3 import NnetSimpleComputationOptions from kaldi.util.table import SequentialMatrixReader # Construct SAD", "f: out = sad.segment(feats) segments, stats = seg.process(out[\"alignment\"]) seg.write(key, segments,", "decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk", "decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale", "Construct SAD model = NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform =", "from kaldi.segmentation import NnetSAD, SegmentationProcessor from kaldi.nnet3 import NnetSimpleComputationOptions from", "import NnetSAD, SegmentationProcessor from kaldi.nnet3 import NnetSimpleComputationOptions from kaldi.util.table import", "# Define feature pipeline as a Kaldi rspecifier feats_rspec =", "s) print(\"segments:\", segments, flush=True) print(\"stats:\", stats, flush=True) print(\"global stats:\", seg.stats,", "__future__ import print_function from kaldi.segmentation import NnetSAD, SegmentationProcessor from kaldi.nnet3", "as s: for key, feats in f: out = sad.segment(feats)", "# Segment with SequentialMatrixReader(feats_rspec) as f, open (\"segments\", \"w\") as", "0 decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale = 0.3", "print(\"segments:\", segments, flush=True) print(\"stats:\", stats, flush=True) print(\"global stats:\", seg.stats, flush=True)", "= NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context =", "a Kaldi rspecifier feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |\"", "scp:wav.scp ark:- |\" # Segment with SequentialMatrixReader(feats_rspec) as f, open", "print_function from kaldi.segmentation import NnetSAD, SegmentationProcessor from kaldi.nnet3 import NnetSimpleComputationOptions", "NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial = 0", "import print_function from kaldi.segmentation import NnetSAD, SegmentationProcessor from kaldi.nnet3 import", "SequentialMatrixReader(feats_rspec) as f, open (\"segments\", \"w\") as s: for key,", "0 decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale = 0.3 sad = NnetSAD(model,", "as f, open (\"segments\", \"w\") as s: for key, feats", "feature pipeline as a Kaldi rspecifier feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf", "graph = NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context", "python from __future__ import print_function from kaldi.segmentation import NnetSAD, SegmentationProcessor", "= 79 decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final =", "decodable_opts.extra_right_context_final = 0 decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale = 0.3 sad", "pipeline as a Kaldi rspecifier feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp", "SegmentationProcessor from kaldi.nnet3 import NnetSimpleComputationOptions from kaldi.util.table import SequentialMatrixReader #", "import SequentialMatrixReader # Construct SAD model = NnetSAD.read_model(\"final.raw\") post =", "transform = NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context", "Kaldi rspecifier feats_rspec = \"ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |\" #", "segments, s) print(\"segments:\", segments, flush=True) print(\"stats:\", stats, flush=True) print(\"global stats:\",", "79 decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final = 0", "post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph() decodable_opts", "f, open (\"segments\", \"w\") as s: for key, feats in", "in f: out = sad.segment(feats) segments, stats = seg.process(out[\"alignment\"]) seg.write(key,", "decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial = 0 decodable_opts.extra_right_context_final", "s: for key, feats in f: out = sad.segment(feats) segments,", "NnetSAD, SegmentationProcessor from kaldi.nnet3 import NnetSimpleComputationOptions from kaldi.util.table import SequentialMatrixReader", "NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph()", "from kaldi.util.table import SequentialMatrixReader # Construct SAD model = NnetSAD.read_model(\"final.raw\")", "decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2]) # Define feature pipeline as a", "seg = SegmentationProcessor(target_labels=[2]) # Define feature pipeline as a Kaldi", "--config=mfcc.conf scp:wav.scp ark:- |\" # Segment with SequentialMatrixReader(feats_rspec) as f,", "NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79", "NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post) graph = NnetSAD.make_sad_graph() decodable_opts = NnetSimpleComputationOptions()", "decodable_opts = NnetSimpleComputationOptions() decodable_opts.extra_left_context = 79 decodable_opts.extra_right_context = 21 decodable_opts.extra_left_context_initial", "seg.process(out[\"alignment\"]) seg.write(key, segments, s) print(\"segments:\", segments, flush=True) print(\"stats:\", stats, flush=True)", "= 150 decodable_opts.acoustic_scale = 0.3 sad = NnetSAD(model, transform, graph,", "#!/usr/bin/env python from __future__ import print_function from kaldi.segmentation import NnetSAD,", "out = sad.segment(feats) segments, stats = seg.process(out[\"alignment\"]) seg.write(key, segments, s)", "model = NnetSAD.read_model(\"final.raw\") post = NnetSAD.read_average_posteriors(\"post_output.vec\") transform = NnetSAD.make_sad_transform(post) graph", "= 0 decodable_opts.frames_per_chunk = 150 decodable_opts.acoustic_scale = 0.3 sad =", "SegmentationProcessor(target_labels=[2]) # Define feature pipeline as a Kaldi rspecifier feats_rspec", "sad.segment(feats) segments, stats = seg.process(out[\"alignment\"]) seg.write(key, segments, s) print(\"segments:\", segments,", "graph, decodable_opts=decodable_opts) seg = SegmentationProcessor(target_labels=[2]) # Define feature pipeline as", "for key, feats in f: out = sad.segment(feats) segments, stats", "decodable_opts.acoustic_scale = 0.3 sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts) seg" ]
[ "labels = dataset[split]['class'] self.labels = torch.tensor(labels, dtype=torch.float32) def __len__(self): return", "DataFrameDataset(Dataset): def __init__(self, tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column:", "pd.DataFrame, text_column: str, label_column: str, max_length: int = 256, padding:", "load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text'] inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True,", "padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"]", "str, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, max_length: int = 256, padding:", "torch.stack(soft_labels) def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:", "def dataloader(self, **kwargs) -> DataLoader: return DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset):", "torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column:", "str=\"cuda\") -> None: super().__init__(split, student_tokenizer, max_length, padding) teacher_ds = ApeachDataset(split,", "str = \"max_length\") -> None: super().__init__() inputs = tokenizer(df[text_column].to_list(), padding=padding,", "= 'cuda') -> None: super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding)", "dataloader(self, **kwargs) -> DataLoader: return DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset): def", "return DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model: torch.nn.Module, teacher_tokenizer:", "padding: str = \"max_length\") -> None: super().__init__() dataset = load_dataset(\"jason9693/APEACH\")", "<reponame>HeegyuKim/CurseFilter from cProfile import label from matplotlib.pyplot import text import", "padding ) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model,", "len(df[label_column].unique()) > 2 else np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self):", "tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks =", "from cProfile import label from matplotlib.pyplot import text import pandas", "int = 256, padding: str = \"max_length\", device: str=\"cuda\") ->", "Dataset, DataLoader from typing import Dict, Any, Tuple from datasets", "Any, Tuple from datasets import load_dataset class DataFrameDataset(Dataset): def __init__(self,", "import load_dataset class DataFrameDataset(Dataset): def __init__(self, tokenizer: Tokenizer, df: pd.DataFrame,", "padding) teacher_ds = DataFrameDataset( teacher_tokenizer, df, text_column, label_column, max_length, padding", "dtype = np.int64 if len(df[label_column].unique()) > 2 else np.float32 self.labels", "teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device) for", "[self._get_soft_label(teacher_model, teacher_ds, i, device) for i in range(len(self))] self.soft_labels =", "None: super().__init__() inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids", "__len__(self): return self.input_ids.shape[0] def __getitem__(self, index: Any) -> Dict: return", "\"max_length\", device: str=\"cuda\") -> None: super().__init__(split, student_tokenizer, max_length, padding) teacher_ds", "= torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index: Any)", "torch.utils.data import Dataset, DataLoader from typing import Dict, Any, Tuple", "= np.int64 if len(df[label_column].unique()) > 2 else np.float32 self.labels =", "student_tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column: str, max_length: int", "str, max_length: int = 256, padding: str = \"max_length\") ->", "2 else np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return self.input_ids.shape[0]", "with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device) for i", "= 256, padding: str = \"max_length\", device: str = 'cuda')", "split: str, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, max_length: int = 256,", "padding) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds,", "256, padding: str = \"max_length\") -> None: super().__init__() dataset =", "max_length, padding) teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding) teacher_model =", "torch.Tensor, torch.Tensor, torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self, model, teacher_ds,", "ids, mask, _ = teacher_ds[index] ids = ids.unsqueeze(0).to(device) mask =", "dataset[split]['class'] self.labels = torch.tensor(labels, dtype=torch.float32) def __len__(self): return self.input_ids.shape[0] def", "Tokenizer, df: pd.DataFrame, text_column: str, label_column: str, max_length: int =", "= 256, padding: str = \"max_length\") -> None: super().__init__() inputs", "= mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset): def __init__(self, split:", "max_length: int = 256, padding: str = \"max_length\", device: str=\"cuda\")", "label from matplotlib.pyplot import text import pandas as pd import", "from torch.utils.data import Dataset, DataLoader from typing import Dict, Any,", "= tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks", "mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset): def __init__(self, split: str,", "typing import Dict, Any, Tuple from datasets import load_dataset class", "__init__(self, tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column: str, max_length:", "range(len(self))] self.soft_labels = torch.stack(soft_labels) def __getitem__(self, index: Any) -> Tuple[torch.Tensor,", "inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] dtype = np.int64 if len(df[label_column].unique()) >", "tokenizer: Tokenizer, max_length: int = 256, padding: str = \"max_length\")", "__init__(self, teacher_model: torch.nn.Module, split: str, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, max_length:", "self.input_ids.shape[0] def __getitem__(self, index: Any) -> Dict: return self.input_ids[index], self.attention_masks[index],", "str = \"max_length\") -> None: super().__init__() dataset = load_dataset(\"jason9693/APEACH\") texts", "teacher_ds = DataFrameDataset( teacher_tokenizer, df, text_column, label_column, max_length, padding )", "= \"max_length\") -> None: super().__init__() inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length,", "import Dataset, DataLoader from typing import Dict, Any, Tuple from", "index, device): ids, mask, _ = teacher_ds[index] ids = ids.unsqueeze(0).to(device)", "import Tokenizer import torch from torch.utils.data import Dataset, DataLoader from", "class DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer,", "else np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return self.input_ids.shape[0] def", "def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index: Any) -> Dict:", "return DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model: torch.nn.Module, split:", "**kwargs) class DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer:", "from datasets import load_dataset class DataFrameDataset(Dataset): def __init__(self, tokenizer: Tokenizer,", "'cuda') -> None: super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding) teacher_ds", "return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] dtype = np.int64", "> 2 else np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return", "max_length: int = 256, padding: str = \"max_length\") -> None:", "model, teacher_ds, index, device): ids, mask, _ = teacher_ds[index] ids", "pd import numpy as np from tokenizers import Tokenizer import", "self.soft_labels = torch.stack(soft_labels) def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor,", "self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] labels = dataset[split]['class'] self.labels", "i in range(len(self))] self.soft_labels = torch.stack(soft_labels) def __getitem__(self, index: Any)", "= inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] labels = dataset[split]['class'] self.labels =", "= inputs[\"attention_mask\"] dtype = np.int64 if len(df[label_column].unique()) > 2 else", "_get_soft_label(self, model, teacher_ds, index, device): ids, mask, _ = teacher_ds[index]", "= tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks", "teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, max_length: int = 256, padding: str", "padding) teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding) teacher_model = teacher_model.to(device)", "load_dataset class DataFrameDataset(Dataset): def __init__(self, tokenizer: Tokenizer, df: pd.DataFrame, text_column:", "soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device) for i in range(len(self))]", "**kwargs) class ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model: torch.nn.Module, split: str, teacher_tokenizer:", "str, max_length: int = 256, padding: str = \"max_length\", device:", "ApeachDataset(split, teacher_tokenizer, max_length, padding) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels", "= \"max_length\", device: str = 'cuda') -> None: super().__init__(student_tokenizer, df,", "Tuple from datasets import load_dataset class DataFrameDataset(Dataset): def __init__(self, tokenizer:", "__getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: return *super().__getitem__(index),", "teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, df: pd.DataFrame, text_column: str,", "label_column, max_length, padding) teacher_ds = DataFrameDataset( teacher_tokenizer, df, text_column, label_column,", "import label from matplotlib.pyplot import text import pandas as pd", "teacher_ds, i, device) for i in range(len(self))] self.soft_labels = torch.stack(soft_labels)", "self.attention_masks[index], self.labels[index] def dataloader(self, **kwargs) -> DataLoader: return DataLoader(self, **kwargs)", "= teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device)", "import Dict, Any, Tuple from datasets import load_dataset class DataFrameDataset(Dataset):", "student_tokenizer: Tokenizer, max_length: int = 256, padding: str = \"max_length\",", "super().__init__() dataset = load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text'] inputs = tokenizer(texts,", "self.labels[index] def dataloader(self, **kwargs) -> DataLoader: return DataLoader(self, **kwargs) class", "for i in range(len(self))] self.soft_labels = torch.stack(soft_labels) def __getitem__(self, index:", "return *super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self, model, teacher_ds, index, device): ids,", "DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer,", "def __init__(self, teacher_model: torch.nn.Module, split: str, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer,", "return model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset): def __init__(self, split: str, tokenizer:", "dataset = load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text'] inputs = tokenizer(texts, padding=padding,", "-> None: super().__init__() dataset = load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text'] inputs", "self.attention_masks = inputs[\"attention_mask\"] dtype = np.int64 if len(df[label_column].unique()) > 2", "torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index: Any) ->", "inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"]", "256, padding: str = \"max_length\", device: str=\"cuda\") -> None: super().__init__(split,", "torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device) for i in", "= ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset):", "__init__(self, split: str, tokenizer: Tokenizer, max_length: int = 256, padding:", "= \"max_length\") -> None: super().__init__() dataset = load_dataset(\"jason9693/APEACH\") texts =", "pandas as pd import numpy as np from tokenizers import", "int = 256, padding: str = \"max_length\") -> None: super().__init__()", "Tokenizer, student_tokenizer: Tokenizer, max_length: int = 256, padding: str =", "device) for i in range(len(self))] self.soft_labels = torch.stack(soft_labels) def __getitem__(self,", "padding: str = \"max_length\", device: str=\"cuda\") -> None: super().__init__(split, student_tokenizer,", "split: str, tokenizer: Tokenizer, max_length: int = 256, padding: str", "-> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self,", "max_length: int = 256, padding: str = \"max_length\", device: str", "self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] dtype = np.int64 if", "str, tokenizer: Tokenizer, max_length: int = 256, padding: str =", "str = 'cuda') -> None: super().__init__(student_tokenizer, df, text_column, label_column, max_length,", "df, text_column, label_column, max_length, padding) teacher_ds = DataFrameDataset( teacher_tokenizer, df,", "from typing import Dict, Any, Tuple from datasets import load_dataset", "-> DataLoader: return DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model:", "-> None: super().__init__() inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\")", "def __init__(self, teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, df: pd.DataFrame,", "= 256, padding: str = \"max_length\", device: str=\"cuda\") -> None:", "256, padding: str = \"max_length\") -> None: super().__init__() inputs =", "max_length, padding ) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels =", "**kwargs) -> DataLoader: return DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset): def __init__(self,", "self.soft_labels[index] def _get_soft_label(self, model, teacher_ds, index, device): ids, mask, _", "model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset): def __init__(self, split: str, tokenizer: Tokenizer,", "label_column: str, max_length: int = 256, padding: str = \"max_length\")", "teacher_model: torch.nn.Module, split: str, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, max_length: int", "Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index] def", "__getitem__(self, index: Any) -> Dict: return self.input_ids[index], self.attention_masks[index], self.labels[index] def", "as pd import numpy as np from tokenizers import Tokenizer", "numpy as np from tokenizers import Tokenizer import torch from", "padding: str = \"max_length\") -> None: super().__init__() inputs = tokenizer(df[text_column].to_list(),", "def __init__(self, tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column: str,", "texts = dataset[split]['text'] inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\")", "DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model: torch.nn.Module, split: str,", "Tokenizer import torch from torch.utils.data import Dataset, DataLoader from typing", "ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model: torch.nn.Module, split: str, teacher_tokenizer: Tokenizer, student_tokenizer:", "= 256, padding: str = \"max_length\") -> None: super().__init__() dataset", "**kwargs) -> DataLoader: return DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset): def __init__(self,", "dtype=torch.float32) def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index: Any) ->", "None: super().__init__(split, student_tokenizer, max_length, padding) teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length,", "text import pandas as pd import numpy as np from", "dataset[split]['text'] inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids =", ") teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds,", "truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] dtype =", "super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding) teacher_ds = DataFrameDataset( teacher_tokenizer,", "datasets import load_dataset class DataFrameDataset(Dataset): def __init__(self, tokenizer: Tokenizer, df:", "__init__(self, teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, df: pd.DataFrame, text_column:", "from tokenizers import Tokenizer import torch from torch.utils.data import Dataset,", "label_column: str, max_length: int = 256, padding: str = \"max_length\",", "def __getitem__(self, index: Any) -> Dict: return self.input_ids[index], self.attention_masks[index], self.labels[index]", "= ApeachDataset(split, teacher_tokenizer, max_length, padding) teacher_model = teacher_model.to(device) with torch.no_grad():", "i, device) for i in range(len(self))] self.soft_labels = torch.stack(soft_labels) def", "self.input_ids[index], self.attention_masks[index], self.labels[index] def dataloader(self, **kwargs) -> DataLoader: return DataLoader(self,", "ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset): def", "import numpy as np from tokenizers import Tokenizer import torch", "Tokenizer, max_length: int = 256, padding: str = \"max_length\", device:", "if len(df[label_column].unique()) > 2 else np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def", "self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index:", "inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"]", "cProfile import label from matplotlib.pyplot import text import pandas as", "DataLoader from typing import Dict, Any, Tuple from datasets import", "class ApeachDataset(Dataset): def __init__(self, split: str, tokenizer: Tokenizer, max_length: int", "256, padding: str = \"max_length\", device: str = 'cuda') ->", "text_column, label_column, max_length, padding ) teacher_model = teacher_model.to(device) with torch.no_grad():", "np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype)) def __len__(self): return self.input_ids.shape[0] def __getitem__(self,", "DataLoader: return DataLoader(self, **kwargs) class DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model: torch.nn.Module,", "None: super().__init__() dataset = load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text'] inputs =", "torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self, model, teacher_ds, index, device):", "as np from tokenizers import Tokenizer import torch from torch.utils.data", "None: super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding) teacher_ds = DataFrameDataset(", "return self.input_ids.shape[0] def __getitem__(self, index: Any) -> Dict: return self.input_ids[index],", "max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] labels", "str = \"max_length\", device: str=\"cuda\") -> None: super().__init__(split, student_tokenizer, max_length,", "torch.Tensor, torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self, model, teacher_ds, index,", "inputs[\"attention_mask\"] labels = dataset[split]['class'] self.labels = torch.tensor(labels, dtype=torch.float32) def __len__(self):", "truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] labels =", "index: Any) -> Dict: return self.input_ids[index], self.attention_masks[index], self.labels[index] def dataloader(self,", "def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: return", "tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column: str, max_length: int", "= [self._get_soft_label(teacher_model, teacher_ds, i, device) for i in range(len(self))] self.soft_labels", "import torch from torch.utils.data import Dataset, DataLoader from typing import", "mask, _ = teacher_ds[index] ids = ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device)", "Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self, model,", "mask).cpu().squeeze(0) class ApeachDataset(Dataset): def __init__(self, split: str, tokenizer: Tokenizer, max_length:", "= torch.tensor(labels, dtype=torch.float32) def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index:", "teacher_tokenizer, max_length, padding) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels =", "tokenizers import Tokenizer import torch from torch.utils.data import Dataset, DataLoader", "inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] labels = dataset[split]['class'] self.labels = torch.tensor(labels,", "text_column: str, label_column: str, max_length: int = 256, padding: str", "teacher_tokenizer, df, text_column, label_column, max_length, padding ) teacher_model = teacher_model.to(device)", "from matplotlib.pyplot import text import pandas as pd import numpy", "Any) -> Dict: return self.input_ids[index], self.attention_masks[index], self.labels[index] def dataloader(self, **kwargs)", "super().__init__() inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids =", "*super().__getitem__(index), self.soft_labels[index] def _get_soft_label(self, model, teacher_ds, index, device): ids, mask,", "ids = ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0) class", "inputs[\"attention_mask\"] dtype = np.int64 if len(df[label_column].unique()) > 2 else np.float32", "-> None: super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding) teacher_ds =", "teacher_ds[index] ids = ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0)", "Dict, Any, Tuple from datasets import load_dataset class DataFrameDataset(Dataset): def", "import pandas as pd import numpy as np from tokenizers", "student_tokenizer, max_length, padding) teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding) teacher_model", "str, label_column: str, max_length: int = 256, padding: str =", "def __init__(self, split: str, tokenizer: Tokenizer, max_length: int = 256,", "class DataFrameDataset(Dataset): def __init__(self, tokenizer: Tokenizer, df: pd.DataFrame, text_column: str,", "self.attention_masks = inputs[\"attention_mask\"] labels = dataset[split]['class'] self.labels = torch.tensor(labels, dtype=torch.float32)", "np from tokenizers import Tokenizer import torch from torch.utils.data import", "teacher_ds, index, device): ids, mask, _ = teacher_ds[index] ids =", "matplotlib.pyplot import text import pandas as pd import numpy as", "max_length, padding) teacher_ds = DataFrameDataset( teacher_tokenizer, df, text_column, label_column, max_length,", "max_length, padding) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model,", "_ = teacher_ds[index] ids = ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device) return", "text_column, label_column, max_length, padding) teacher_ds = DataFrameDataset( teacher_tokenizer, df, text_column,", "int = 256, padding: str = \"max_length\", device: str =", "tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks =", "df, text_column, label_column, max_length, padding ) teacher_model = teacher_model.to(device) with", "torch from torch.utils.data import Dataset, DataLoader from typing import Dict,", "\"max_length\") -> None: super().__init__() dataset = load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text']", "= dataset[split]['class'] self.labels = torch.tensor(labels, dtype=torch.float32) def __len__(self): return self.input_ids.shape[0]", "-> DataLoader: return DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model:", "DataLoader: return DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model: torch.nn.Module,", "-> Dict: return self.input_ids[index], self.attention_masks[index], self.labels[index] def dataloader(self, **kwargs) ->", "padding: str = \"max_length\", device: str = 'cuda') -> None:", "max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] dtype", "str = \"max_length\", device: str = 'cuda') -> None: super().__init__(student_tokenizer,", "DataFrameDataset( teacher_tokenizer, df, text_column, label_column, max_length, padding ) teacher_model =", "mask = mask.unsqueeze(0).to(device) return model(ids, mask).cpu().squeeze(0) class ApeachDataset(Dataset): def __init__(self,", "ApeachDataset(Dataset): def __init__(self, split: str, tokenizer: Tokenizer, max_length: int =", "self.labels = torch.tensor(labels, dtype=torch.float32) def __len__(self): return self.input_ids.shape[0] def __getitem__(self,", "class ApeachStudentDataset(ApeachDataset): def __init__(self, teacher_model: torch.nn.Module, split: str, teacher_tokenizer: Tokenizer,", "in range(len(self))] self.soft_labels = torch.stack(soft_labels) def __getitem__(self, index: Any) ->", "device: str = 'cuda') -> None: super().__init__(student_tokenizer, df, text_column, label_column,", "dataloader(self, **kwargs) -> DataLoader: return DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset): def", "= \"max_length\", device: str=\"cuda\") -> None: super().__init__(split, student_tokenizer, max_length, padding)", "df: pd.DataFrame, text_column: str, label_column: str, max_length: int = 256,", "= teacher_ds[index] ids = ids.unsqueeze(0).to(device) mask = mask.unsqueeze(0).to(device) return model(ids,", "Tokenizer, student_tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column: str, max_length:", "np.int64 if len(df[label_column].unique()) > 2 else np.float32 self.labels = torch.from_numpy(df[label_column].values.astype(dtype))", "\"max_length\") -> None: super().__init__() inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True,", "return self.input_ids[index], self.attention_masks[index], self.labels[index] def dataloader(self, **kwargs) -> DataLoader: return", "= inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] dtype = np.int64 if len(df[label_column].unique())", "return_tensors=\"pt\") self.input_ids = inputs[\"input_ids\"] self.attention_masks = inputs[\"attention_mask\"] labels = dataset[split]['class']", "def dataloader(self, **kwargs) -> DataLoader: return DataLoader(self, **kwargs) class ApeachStudentDataset(ApeachDataset):", "torch.nn.Module, split: str, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, max_length: int =", "= torch.stack(soft_labels) def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor,", "= inputs[\"attention_mask\"] labels = dataset[split]['class'] self.labels = torch.tensor(labels, dtype=torch.float32) def", "-> None: super().__init__(split, student_tokenizer, max_length, padding) teacher_ds = ApeachDataset(split, teacher_tokenizer,", "= DataFrameDataset( teacher_tokenizer, df, text_column, label_column, max_length, padding ) teacher_model", "import text import pandas as pd import numpy as np", "def _get_soft_label(self, model, teacher_ds, index, device): ids, mask, _ =", "device: str=\"cuda\") -> None: super().__init__(split, student_tokenizer, max_length, padding) teacher_ds =", "super().__init__(split, student_tokenizer, max_length, padding) teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding)", "\"max_length\", device: str = 'cuda') -> None: super().__init__(student_tokenizer, df, text_column,", "index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: return *super().__getitem__(index), self.soft_labels[index]", "Tokenizer, max_length: int = 256, padding: str = \"max_length\") ->", "torch.tensor(labels, dtype=torch.float32) def __len__(self): return self.input_ids.shape[0] def __getitem__(self, index: Any)", "DataFrameStudentDataset(DataFrameDataset): def __init__(self, teacher_model: torch.nn.Module, teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, df:", "= load_dataset(\"jason9693/APEACH\") texts = dataset[split]['text'] inputs = tokenizer(texts, padding=padding, max_length=max_length,", "= dataset[split]['text'] inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors=\"pt\") self.input_ids", "Dict: return self.input_ids[index], self.attention_masks[index], self.labels[index] def dataloader(self, **kwargs) -> DataLoader:", "teacher_tokenizer: Tokenizer, student_tokenizer: Tokenizer, df: pd.DataFrame, text_column: str, label_column: str,", "label_column, max_length, padding ) teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels", "teacher_model = teacher_model.to(device) with torch.no_grad(): soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i,", "teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding) teacher_model = teacher_model.to(device) with", "device): ids, mask, _ = teacher_ds[index] ids = ids.unsqueeze(0).to(device) mask" ]
[ "not remove my logo screen player = Popen(['omxplayer', '--adev', 'both',", "RPi.GPIO as GPIO FNULL = open(os.devnull, \"w\") # setup GPIO", "<NAME> 2019 import os import sys import glob from subprocess", "= 234) GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime = 1234)", "remove my logo screen player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL)", "glob from subprocess import Popen, PIPE import RPi.GPIO as GPIO", "def buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\") # add event listener", "'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the loop while(1): for files in", "called by event listener def buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\")", "234) GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime = 1234) #", "buttonNext(channel): player.stdin.write(\"q\") # add event listener GPIO.add_event_detect(11, GPIO.FALLING, callback =", "= GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) # functions to", "'/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the loop while(1): for files in sorted(glob.glob(r'/media/*/*.mp4')):", "to be called by event listener def buttonPause(channel): player.stdin.write(\"p\") def", "player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\") # add event listener GPIO.add_event_detect(11, GPIO.FALLING,", "functions to be called by event listener def buttonPause(channel): player.stdin.write(\"p\")", "1234) # please do not remove my logo screen player", "'--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the loop while(1): for files", "GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime = 1234) # please", "import os import sys import glob from subprocess import Popen,", "be called by event listener def buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel):", "subprocess import Popen, PIPE import RPi.GPIO as GPIO FNULL =", "# setup GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN)", "def buttonNext(channel): player.stdin.write(\"q\") # add event listener GPIO.add_event_detect(11, GPIO.FALLING, callback", "loop while(1): for files in sorted(glob.glob(r'/media/*/*.mp4')): player = Popen(['omxplayer','--adev', 'both',files],stdin=PIPE,stdout=FNULL)", "mp4museum.org by <NAME> 2019 import os import sys import glob", "player.wait() # the loop while(1): for files in sorted(glob.glob(r'/media/*/*.mp4')): player", "open(os.devnull, \"w\") # setup GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down", "# add event listener GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime", "GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime = 234) GPIO.add_event_detect(13, GPIO.FALLING,", "import sys import glob from subprocess import Popen, PIPE import", "sys import glob from subprocess import Popen, PIPE import RPi.GPIO", "\"w\") # setup GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down =", "please do not remove my logo screen player = Popen(['omxplayer',", "pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down", "by event listener def buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\") #", "as GPIO FNULL = open(os.devnull, \"w\") # setup GPIO pin", "player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the loop", "PIPE import RPi.GPIO as GPIO FNULL = open(os.devnull, \"w\") #", "bouncetime = 234) GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime =", "# the loop while(1): for files in sorted(glob.glob(r'/media/*/*.mp4')): player =", "pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) # functions", "setup GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13,", "while(1): for files in sorted(glob.glob(r'/media/*/*.mp4')): player = Popen(['omxplayer','--adev', 'both',files],stdin=PIPE,stdout=FNULL) player.wait()", "GPIO.IN, pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) #", "= GPIO.PUD_DOWN) # functions to be called by event listener", "Popen, PIPE import RPi.GPIO as GPIO FNULL = open(os.devnull, \"w\")", "my logo screen player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait()", "buttonNext, bouncetime = 1234) # please do not remove my", "event listener def buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\") # add", "event listener GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime = 234)", "Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the loop while(1): for", "GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) # functions to be called", "the loop while(1): for files in sorted(glob.glob(r'/media/*/*.mp4')): player = Popen(['omxplayer','--adev',", "2019 import os import sys import glob from subprocess import", "FNULL = open(os.devnull, \"w\") # setup GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11,", "= 1234) # please do not remove my logo screen", "listener def buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\") # add event", "# functions to be called by event listener def buttonPause(channel):", "= buttonPause, bouncetime = 234) GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext,", "import RPi.GPIO as GPIO FNULL = open(os.devnull, \"w\") # setup", "GPIO.FALLING, callback = buttonNext, bouncetime = 1234) # please do", "os import sys import glob from subprocess import Popen, PIPE", "GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) # functions to be", "player.stdin.write(\"q\") # add event listener GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause,", "import Popen, PIPE import RPi.GPIO as GPIO FNULL = open(os.devnull,", "= Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the loop while(1):", "# mp4museum.org by <NAME> 2019 import os import sys import", "GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN)", "GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down =", "from subprocess import Popen, PIPE import RPi.GPIO as GPIO FNULL", "GPIO FNULL = open(os.devnull, \"w\") # setup GPIO pin GPIO.setmode(GPIO.BOARD)", "add event listener GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime =", "GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN,", "GPIO.FALLING, callback = buttonPause, bouncetime = 234) GPIO.add_event_detect(13, GPIO.FALLING, callback", "#!/usr/bin/python # mp4museum.org by <NAME> 2019 import os import sys", "= open(os.devnull, \"w\") # setup GPIO pin GPIO.setmode(GPIO.BOARD) GPIO.setup(11, GPIO.IN,", "# please do not remove my logo screen player =", "callback = buttonPause, bouncetime = 234) GPIO.add_event_detect(13, GPIO.FALLING, callback =", "logo screen player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() #", "<gh_stars>0 #!/usr/bin/python # mp4museum.org by <NAME> 2019 import os import", "GPIO.PUD_DOWN) # functions to be called by event listener def", "bouncetime = 1234) # please do not remove my logo", "listener GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime = 234) GPIO.add_event_detect(13,", "buttonPause, bouncetime = 234) GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime", "GPIO.IN, pull_up_down = GPIO.PUD_DOWN) # functions to be called by", "do not remove my logo screen player = Popen(['omxplayer', '--adev',", "callback = buttonNext, bouncetime = 1234) # please do not", "screen player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL) player.wait() # the", "= buttonNext, bouncetime = 1234) # please do not remove", "by <NAME> 2019 import os import sys import glob from", "pull_up_down = GPIO.PUD_DOWN) # functions to be called by event", "import glob from subprocess import Popen, PIPE import RPi.GPIO as", "buttonPause(channel): player.stdin.write(\"p\") def buttonNext(channel): player.stdin.write(\"q\") # add event listener GPIO.add_event_detect(11," ]
[ "Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def", "the pattern k (int): The number of test cases Returns:", "PATTERN test_cases = generate_test_cases(PATTERN, n, TEST_CASES) else: # generate test", "def main(): done = False print(\"m = Length of pattern\\nn", "the naive substring search algorithm. The runtimes for both algorithms", "print(\"That is not a valid number.\") max_m = input(\"Upper limit", "test cases Returns: A list of test cases, i.e. strings", "= [] for n in x: print('n =', n) bm_result", "plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\")", "in x: print('n =', n) bm_result = [] naive_result =", "plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def vary_m(max_m): x =", "and int(max_m) > 1): print(\"That is not a valid number.\")", "numpy as np import string import time import random from", "import random from bm_alg import boyer_moore_match, naive_match # number of", "random.choice(string.ascii_lowercase) + text # 1 --> Right else: text =", "input(\"Your choice: \") if choice == '1': max_n = input(\"Upper", "pattern k (int): The number of test cases Returns: A", "y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring", "a valid number.\") max_m = input(\"Upper limit of m: \")", "TEST_CASES) for test_case in test_cases: start = time.time() naive_match(test_case, PATTERN)", "= [] y_naive = [] for n in x: print('n", "this text (vary_m) TEXT = PATTERN * 50 def generate_test_cases(pattern,", "module!' # test cases generated based on this text (vary_m)", "Algorithm Efficiency\") plt.legend() plt.show() def main(): done = False print(\"m", "x = [m for m in range(1, max_m + 1)]", "length n, which do not (and can not possibly) contain", "* 50 def generate_test_cases(pattern, length, k): \"\"\" Generates <k> test", "cases generated based on this pattern (vary_n) PATTERN = 'ICT1002", "y_bm = [] y_naive = [] for m in x:", "of m: \") while not (max_m.isnumeric() and int(max_m) > 1):", "generate_test_cases('', m, TEST_CASES) for test_case in test_cases: start = time.time()", "generate_test_cases('', n, TEST_CASES) for test_case in test_cases: start = time.time()", "= pattern while len(text) < length: direction = random.choice((0, 1))", "of n: \") while not (max_n.isnumeric() and int(max_n) > 1):", "for m in x: print('m =', m) bm_result = []", "plt.show() def vary_m(max_m): x = [m for m in range(1,", "= input(\"Your choice: \") if choice == '1': max_n =", "text. length (int): The length of the pattern k (int):", "= [] naive_result = [] if n >= len(PATTERN): #", "test_cases: start = time.time() naive_match(TEXT, test_case) naive_result.append(time.time() - start) start", "vary_n(max_n): x = [n for n in range(1, max_n +", "test_cases = generate_test_cases('', m, TEST_CASES) for test_case in test_cases: start", "[] y_naive = [] for n in x: print('n =',", "_ in range(k): text = pattern while len(text) < length:", "[] naive_result = [] if n >= len(PATTERN): # generate", "text (vary_m) TEXT = PATTERN * 50 def generate_test_cases(pattern, length,", "k): \"\"\" Generates <k> test cases with text of length", "test cases with text of length <length> containing <pattern> Args:", "range(1, max_n + 1)] y_bm = [] y_naive = []", "as np import string import time import random from bm_alg", "max_n + 1)] y_bm = [] y_naive = [] for", "m: \") while not (max_m.isnumeric() and int(max_m) > 1): print(\"That", "n, which contain PATTERN test_cases = generate_test_cases(PATTERN, n, TEST_CASES) else:", "import boyer_moore_match, naive_match # number of test cases for each", "length <length> containing <pattern> Args: pattern (str): A pattern within", "start = time.time() naive_match(TEXT, test_case) naive_result.append(time.time() - start) start =", "length of the pattern k (int): The number of test", "n in x: print('n =', n) bm_result = [] naive_result", "= Length of text\\n\") print(\"1. Constant m, vary n\") print(\"2.", "<k> test cases with text of length <length> containing <pattern>", "is affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2])", "n >= len(PATTERN): # generate test cases of length n,", "n) bm_result = [] naive_result = [] if n >=", "= random.choice((0, 1)) # 0 --> Left if direction ==", "+ 1)] y_bm = [] y_naive = [] for n", "from bm_alg import boyer_moore_match, naive_match # number of test cases", "\") vary_n(int(max_n)) elif choice == '2': max_m = input(\"Upper limit", "A list of test cases, i.e. strings that contain <pattern>", "=', n) bm_result = [] naive_result = [] if n", "the text. length (int): The length of the pattern k", "time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() - start) # obtain median runtime", "# test cases generated based on this text (vary_m) TEXT", "plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def vary_m(max_m):", "bm_result.append(time.time() - start) # obtain median runtime (mean is affected", "is a really great module!' # test cases generated based", "len(text) < length: direction = random.choice((0, 1)) # 0 -->", "not a valid number.\") max_m = input(\"Upper limit of m:", "cases with text of length <length> containing <pattern> Args: pattern", "boyer_moore_match, naive_match # number of test cases for each iteration", "on this pattern (vary_n) PATTERN = 'ICT1002 is a really", "# generate test cases of length n, which do not", "test cases generated based on this pattern (vary_n) PATTERN =", "of test cases Returns: A list of test cases, i.e.", "max_m = input(\"Upper limit of m: \") while not (max_m.isnumeric()", "do not (and can not possibly) contain PATTERN test_cases =", "not a valid number.\") max_n = input(\"Upper limit of n:", "PATTERN test_cases = generate_test_cases('', n, TEST_CASES) for test_case in test_cases:", "y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring", "plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def main(): done", "choice == '3': done = True else: print(\"That is not", "'1': max_n = input(\"Upper limit of n: \") while not", "this pattern (vary_n) PATTERN = 'ICT1002 is a really great", "print('m =', m) bm_result = [] naive_result = [] #", "test_cases: start = time.time() naive_match(test_case, PATTERN) naive_result.append(time.time() - start) start", "generate test cases of length n, which do not (and", "label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search", "(str): A pattern within the text. length (int): The length", "// 2]) plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\")", "print(\"3. Quit\\n\") while not done: choice = input(\"Your choice: \")", "The number of test cases Returns: A list of test", "generate test cases of length n test_cases = generate_test_cases('', m,", "naive_result = [] # generate test cases of length n", "= [m for m in range(1, max_m + 1)] y_bm", "Returns: A list of test cases, i.e. strings that contain", "Quit\\n\") while not done: choice = input(\"Your choice: \") if", "for each iteration TEST_CASES = 100 # test cases generated", "based on this text (vary_m) TEXT = PATTERN * 50", "between the efficiency of the Boyer-Moore algorithm and the naive", "in test_cases: start = time.time() naive_match(TEXT, test_case) naive_result.append(time.time() - start)", "direction == 0: text = random.choice(string.ascii_lowercase) + text # 1", "2]) plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\")", "naive_match # number of test cases for each iteration TEST_CASES", "= [] naive_result = [] # generate test cases of", "'3': done = True else: print(\"That is not a valid", "def vary_m(max_m): x = [m for m in range(1, max_m", "with text of length <length> containing <pattern> Args: pattern (str):", "by outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x, y_naive,", "algorithm. The runtimes for both algorithms are plotted on the", "else: text = text + random.choice(string.ascii_lowercase) result.append(text) return result def", "random.choice((0, 1)) # 0 --> Left if direction == 0:", "import matplotlib.pyplot as plt import numpy as np import string", "bm_result = [] naive_result = [] if n >= len(PATTERN):", "input(\"Upper limit of m: \") vary_m(int(max_m)) elif choice == '3':", "the efficiency of the Boyer-Moore algorithm and the naive substring", "main(): done = False print(\"m = Length of pattern\\nn =", "length n, which contain PATTERN test_cases = generate_test_cases(PATTERN, n, TEST_CASES)", "elif choice == '3': done = True else: print(\"That is", "number.\") max_m = input(\"Upper limit of m: \") vary_m(int(max_m)) elif", "else: # generate test cases of length n, which do", "m) bm_result = [] naive_result = [] # generate test", "Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm", "\"\"\" Generates <k> test cases with text of length <length>", "else: print(\"That is not a valid option.\") if __name__ ==", "test cases of length n, which do not (and can", "naive_result.append(time.time() - start) start = time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() -", "[] for _ in range(k): text = pattern while len(text)", "test_case in test_cases: start = time.time() naive_match(test_case, PATTERN) naive_result.append(time.time() -", "of the Boyer-Moore algorithm and the naive substring search algorithm.", "(int): The length of the pattern k (int): The number", "# obtain median runtime (mean is affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES", "cases of length n, which do not (and can not", "m, vary n\") print(\"2. Constant n, vary m\") print(\"3. Quit\\n\")", "on this text (vary_m) TEXT = PATTERN * 50 def", "x: print('m =', m) bm_result = [] naive_result = []", "Args: pattern (str): A pattern within the text. length (int):", "median runtime (mean is affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2])", "A pattern within the text. length (int): The length of", "within the text. length (int): The length of the pattern", "of length n, which contain PATTERN test_cases = generate_test_cases(PATTERN, n,", "[] # generate test cases of length n test_cases =", "= time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() - start) # obtain median", "max_n = input(\"Upper limit of n: \") while not (max_n.isnumeric()", "are plotted on the same axes. \"\"\" import matplotlib.pyplot as", "# test cases generated based on this pattern (vary_n) PATTERN", "m, TEST_CASES) for test_case in test_cases: start = time.time() naive_match(TEXT,", "choice == '2': max_m = input(\"Upper limit of m: \")", "1 --> Right else: text = text + random.choice(string.ascii_lowercase) result.append(text)", "y_naive = [] for n in x: print('n =', n)", "a really great module!' # test cases generated based on", "for test_case in test_cases: start = time.time() naive_match(TEXT, test_case) naive_result.append(time.time()", "generate_test_cases(pattern, length, k): \"\"\" Generates <k> test cases with text", "for _ in range(k): text = pattern while len(text) <", "in test_cases: start = time.time() naive_match(test_case, PATTERN) naive_result.append(time.time() - start)", "= [] # generate test cases of length n test_cases", "print(\"1. Constant m, vary n\") print(\"2. Constant n, vary m\")", "Comparison between the efficiency of the Boyer-Moore algorithm and the", "in range(1, max_m + 1)] y_bm = [] y_naive =", "n, TEST_CASES) for test_case in test_cases: start = time.time() naive_match(test_case,", "range(1, max_m + 1)] y_bm = [] y_naive = []", "length, k): \"\"\" Generates <k> test cases with text of", "in range(1, max_n + 1)] y_bm = [] y_naive =", "pattern (vary_n) PATTERN = 'ICT1002 is a really great module!'", "label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show()", "[] for m in x: print('m =', m) bm_result =", "iteration TEST_CASES = 100 # test cases generated based on", "(int): The number of test cases Returns: A list of", "(and can not possibly) contain PATTERN test_cases = generate_test_cases('', n,", "max_m = input(\"Upper limit of m: \") vary_m(int(max_m)) elif choice", "done = False print(\"m = Length of pattern\\nn = Length", "choice = input(\"Your choice: \") if choice == '1': max_n", "1)) # 0 --> Left if direction == 0: text", "test cases, i.e. strings that contain <pattern> \"\"\" result =", "start) start = time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() - start) #", "text # 1 --> Right else: text = text +", "plt.legend() plt.show() def main(): done = False print(\"m = Length", "- start) start = time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time() - start)", "n\") print(\"2. Constant n, vary m\") print(\"3. Quit\\n\") while not", "PATTERN = 'ICT1002 is a really great module!' # test", "cases of length n test_cases = generate_test_cases('', m, TEST_CASES) for", "text = random.choice(string.ascii_lowercase) + text # 1 --> Right else:", "y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend()", "plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def main():", "can not possibly) contain PATTERN test_cases = generate_test_cases('', n, TEST_CASES)", "print(\"2. Constant n, vary m\") print(\"3. Quit\\n\") while not done:", "Left if direction == 0: text = random.choice(string.ascii_lowercase) + text", "m in range(1, max_m + 1)] y_bm = [] y_naive", "number of test cases for each iteration TEST_CASES = 100", "same axes. \"\"\" import matplotlib.pyplot as plt import numpy as", "result.append(text) return result def vary_n(max_n): x = [n for n", "y_naive = [] for m in x: print('m =', m)", "valid number.\") max_n = input(\"Upper limit of n: \") vary_n(int(max_n))", "Length of text\\n\") print(\"1. Constant m, vary n\") print(\"2. Constant", "'2': max_m = input(\"Upper limit of m: \") while not", "== 0: text = random.choice(string.ascii_lowercase) + text # 1 -->", "<pattern> Args: pattern (str): A pattern within the text. length", "start = time.time() naive_match(test_case, PATTERN) naive_result.append(time.time() - start) start =", "plt.show() def main(): done = False print(\"m = Length of", "[] for n in x: print('n =', n) bm_result =", "[] naive_result = [] # generate test cases of length", "= generate_test_cases(PATTERN, n, TEST_CASES) else: # generate test cases of", "strings that contain <pattern> \"\"\" result = [] for _", "\") while not (max_n.isnumeric() and int(max_n) > 1): print(\"That is", "contain PATTERN test_cases = generate_test_cases(PATTERN, n, TEST_CASES) else: # generate", "if choice == '1': max_n = input(\"Upper limit of n:", "plt import numpy as np import string import time import", "number.\") max_n = input(\"Upper limit of n: \") vary_n(int(max_n)) elif", "# generate test cases of length n, which contain PATTERN", "test_case in test_cases: start = time.time() naive_match(TEXT, test_case) naive_result.append(time.time() -", "a valid number.\") max_n = input(\"Upper limit of n: \")", "[] y_naive = [] for m in x: print('m =',", "elif choice == '2': max_m = input(\"Upper limit of m:", "return result def vary_n(max_n): x = [n for n in", "=', m) bm_result = [] naive_result = [] # generate", "2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm,", "of m: \") vary_m(int(max_m)) elif choice == '3': done =", "= random.choice(string.ascii_lowercase) + text # 1 --> Right else: text", "= True else: print(\"That is not a valid option.\") if", "[m for m in range(1, max_m + 1)] y_bm =", "boyer_moore_match(TEXT, test_case) bm_result.append(time.time() - start) # obtain median runtime (mean", "length n test_cases = generate_test_cases('', m, TEST_CASES) for test_case in", "is not a valid number.\") max_n = input(\"Upper limit of", "and int(max_n) > 1): print(\"That is not a valid number.\")", "Boyer-Moore algorithm and the naive substring search algorithm. The runtimes", "random from bm_alg import boyer_moore_match, naive_match # number of test", "n: \") vary_n(int(max_n)) elif choice == '2': max_m = input(\"Upper", "bm_alg import boyer_moore_match, naive_match # number of test cases for", "import numpy as np import string import time import random", "m in x: print('m =', m) bm_result = [] naive_result", "vary n\") print(\"2. Constant n, vary m\") print(\"3. Quit\\n\") while", "of length n test_cases = generate_test_cases('', m, TEST_CASES) for test_case", "Constant n, vary m\") print(\"3. Quit\\n\") while not done: choice", "x = [n for n in range(1, max_n + 1)]", "test cases of length n test_cases = generate_test_cases('', m, TEST_CASES)", "The length of the pattern k (int): The number of", "boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() - start) # obtain median runtime (mean", "not (max_m.isnumeric() and int(max_m) > 1): print(\"That is not a", "print(\"m = Length of pattern\\nn = Length of text\\n\") print(\"1.", "\"\"\" result = [] for _ in range(k): text =", "TEXT = PATTERN * 50 def generate_test_cases(pattern, length, k): \"\"\"", "TEST_CASES) for test_case in test_cases: start = time.time() naive_match(TEXT, test_case)", "# 0 --> Left if direction == 0: text =", "plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\")", "<pattern> \"\"\" result = [] for _ in range(k): text", "direction = random.choice((0, 1)) # 0 --> Left if direction", "string import time import random from bm_alg import boyer_moore_match, naive_match", "x: print('n =', n) bm_result = [] naive_result = []", "each iteration TEST_CASES = 100 # test cases generated based", "matplotlib.pyplot as plt import numpy as np import string import", "in x: print('m =', m) bm_result = [] naive_result =", "contain <pattern> \"\"\" result = [] for _ in range(k):", "(max_m.isnumeric() and int(max_m) > 1): print(\"That is not a valid", "y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore", "(vary_m) TEXT = PATTERN * 50 def generate_test_cases(pattern, length, k):", "algorithms are plotted on the same axes. \"\"\" import matplotlib.pyplot", "n, TEST_CASES) else: # generate test cases of length n,", "\") if choice == '1': max_n = input(\"Upper limit of", "generated based on this text (vary_m) TEXT = PATTERN *", "pattern within the text. length (int): The length of the", "start = time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() - start) # obtain", "= input(\"Upper limit of m: \") vary_m(int(max_m)) elif choice ==", "for n in x: print('n =', n) bm_result = []", "- start) start = time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time() - start)", "while not done: choice = input(\"Your choice: \") if choice", "choice == '1': max_n = input(\"Upper limit of n: \")", "length (int): The length of the pattern k (int): The", "great module!' # test cases generated based on this text", "--> Left if direction == 0: text = random.choice(string.ascii_lowercase) +", "for test_case in test_cases: start = time.time() naive_match(test_case, PATTERN) naive_result.append(time.time()", "plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def vary_m(max_m): x", "of test cases for each iteration TEST_CASES = 100 #", "The runtimes for both algorithms are plotted on the same", "vary_m(max_m): x = [m for m in range(1, max_m +", "Search Algorithm Efficiency\") plt.legend() plt.show() def main(): done = False", "n, which do not (and can not possibly) contain PATTERN", "done: choice = input(\"Your choice: \") if choice == '1':", "while not (max_n.isnumeric() and int(max_n) > 1): print(\"That is not", "text = text + random.choice(string.ascii_lowercase) result.append(text) return result def vary_n(max_n):", "PATTERN) naive_result.append(time.time() - start) start = time.time() boyer_moore_match(test_case, PATTERN) bm_result.append(time.time()", "= input(\"Upper limit of n: \") while not (max_n.isnumeric() and", "not done: choice = input(\"Your choice: \") if choice ==", "time import random from bm_alg import boyer_moore_match, naive_match # number", "efficiency of the Boyer-Moore algorithm and the naive substring search", "print('n =', n) bm_result = [] naive_result = [] if", "+ text # 1 --> Right else: text = text", "1): print(\"That is not a valid number.\") max_n = input(\"Upper", "containing <pattern> Args: pattern (str): A pattern within the text.", "\"\"\" Comparison between the efficiency of the Boyer-Moore algorithm and", "False print(\"m = Length of pattern\\nn = Length of text\\n\")", "= input(\"Upper limit of n: \") vary_n(int(max_n)) elif choice ==", "pattern (str): A pattern within the text. length (int): The", "= [] if n >= len(PATTERN): # generate test cases", "time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time() - start) # obtain median runtime", "naive_match(test_case, PATTERN) naive_result.append(time.time() - start) start = time.time() boyer_moore_match(test_case, PATTERN)", "and the naive substring search algorithm. The runtimes for both", "--> Right else: text = text + random.choice(string.ascii_lowercase) result.append(text) return", "> 1): print(\"That is not a valid number.\") max_n =", "limit of n: \") vary_n(int(max_n)) elif choice == '2': max_m", "search algorithm. The runtimes for both algorithms are plotted on", "[n for n in range(1, max_n + 1)] y_bm =", "of text\\n\") print(\"1. Constant m, vary n\") print(\"2. Constant n,", "test_cases = generate_test_cases('', n, TEST_CASES) for test_case in test_cases: start", "int(max_m) > 1): print(\"That is not a valid number.\") max_m", ">= len(PATTERN): # generate test cases of length n, which", "the same axes. \"\"\" import matplotlib.pyplot as plt import numpy", "cases Returns: A list of test cases, i.e. strings that", "+ random.choice(string.ascii_lowercase) result.append(text) return result def vary_n(max_n): x = [n", "= 100 # test cases generated based on this pattern", "plt.legend() plt.show() def vary_m(max_m): x = [m for m in", "(max_n.isnumeric() and int(max_n) > 1): print(\"That is not a valid", "Search Algorithm Efficiency\") plt.legend() plt.show() def vary_m(max_m): x = [m", "text = pattern while len(text) < length: direction = random.choice((0,", "Algorithm Efficiency\") plt.legend() plt.show() def vary_m(max_m): x = [m for", "= 'ICT1002 is a really great module!' # test cases", "of length <length> containing <pattern> Args: pattern (str): A pattern", "that contain <pattern> \"\"\" result = [] for _ in", "+ 1)] y_bm = [] y_naive = [] for m", "= time.time() naive_match(test_case, PATTERN) naive_result.append(time.time() - start) start = time.time()", "k (int): The number of test cases Returns: A list", "plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\")", "input(\"Upper limit of n: \") while not (max_n.isnumeric() and int(max_n)", "axes. \"\"\" import matplotlib.pyplot as plt import numpy as np", "for n in range(1, max_n + 1)] y_bm = []", "pattern while len(text) < length: direction = random.choice((0, 1)) #", "test_cases = generate_test_cases(PATTERN, n, TEST_CASES) else: # generate test cases", "substring search algorithm. The runtimes for both algorithms are plotted", "n: \") while not (max_n.isnumeric() and int(max_n) > 1): print(\"That", "not (max_n.isnumeric() and int(max_n) > 1): print(\"That is not a", "max_n = input(\"Upper limit of n: \") vary_n(int(max_n)) elif choice", "generated based on this pattern (vary_n) PATTERN = 'ICT1002 is", "while not (max_m.isnumeric() and int(max_m) > 1): print(\"That is not", "result def vary_n(max_n): x = [n for n in range(1,", "vary_n(int(max_n)) elif choice == '2': max_m = input(\"Upper limit of", "pattern\\nn = Length of text\\n\") print(\"1. Constant m, vary n\")", "not (and can not possibly) contain PATTERN test_cases = generate_test_cases('',", "test cases generated based on this text (vary_m) TEXT =", "generate_test_cases(PATTERN, n, TEST_CASES) else: # generate test cases of length", "max_m + 1)] y_bm = [] y_naive = [] for", "(vary_n) PATTERN = 'ICT1002 is a really great module!' #", "print(\"That is not a valid number.\") max_n = input(\"Upper limit", "y_bm = [] y_naive = [] for n in x:", "def vary_n(max_n): x = [n for n in range(1, max_n", "test_case) bm_result.append(time.time() - start) # obtain median runtime (mean is", "is not a valid option.\") if __name__ == '__main__': main()", "cases generated based on this text (vary_m) TEXT = PATTERN", "m\") print(\"3. Quit\\n\") while not done: choice = input(\"Your choice:", "Constant m, vary n\") print(\"2. Constant n, vary m\") print(\"3.", "of n: \") vary_n(int(max_n)) elif choice == '2': max_m =", "= False print(\"m = Length of pattern\\nn = Length of", "length: direction = random.choice((0, 1)) # 0 --> Left if", "for both algorithms are plotted on the same axes. \"\"\"", "PATTERN * 50 def generate_test_cases(pattern, length, k): \"\"\" Generates <k>", "TEST_CASES = 100 # test cases generated based on this", "> 1): print(\"That is not a valid number.\") max_m =", "naive substring search algorithm. The runtimes for both algorithms are", "for m in range(1, max_m + 1)] y_bm = []", "the Boyer-Moore algorithm and the naive substring search algorithm. The", "runtime (mean is affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES", "algorithm and the naive substring search algorithm. The runtimes for", "TEST_CASES) else: # generate test cases of length n, which", "plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def main(): done =", "based on this pattern (vary_n) PATTERN = 'ICT1002 is a", "in range(k): text = pattern while len(text) < length: direction", "= generate_test_cases('', m, TEST_CASES) for test_case in test_cases: start =", "choice: \") if choice == '1': max_n = input(\"Upper limit", "start) # obtain median runtime (mean is affected by outliers)", "cases, i.e. strings that contain <pattern> \"\"\" result = []", "runtimes for both algorithms are plotted on the same axes.", "input(\"Upper limit of n: \") vary_n(int(max_n)) elif choice == '2':", "<length> containing <pattern> Args: pattern (str): A pattern within the", "0 --> Left if direction == 0: text = random.choice(string.ascii_lowercase)", "2]) plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\")", "n in range(1, max_n + 1)] y_bm = [] y_naive", "= [] for m in x: print('m =', m) bm_result", "# 1 --> Right else: text = text + random.choice(string.ascii_lowercase)", "- start) # obtain median runtime (mean is affected by", "= generate_test_cases('', n, TEST_CASES) for test_case in test_cases: start =", "\") vary_m(int(max_m)) elif choice == '3': done = True else:", "= time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time() - start) # obtain median", "== '1': max_n = input(\"Upper limit of n: \") while", "print(\"That is not a valid option.\") if __name__ == '__main__':", "limit of n: \") while not (max_n.isnumeric() and int(max_n) >", "text of length <length> containing <pattern> Args: pattern (str): A", "1)] y_bm = [] y_naive = [] for m in", "\"\"\" import matplotlib.pyplot as plt import numpy as np import", "Generates <k> test cases with text of length <length> containing", "i.e. strings that contain <pattern> \"\"\" result = [] for", "while len(text) < length: direction = random.choice((0, 1)) # 0", "== '3': done = True else: print(\"That is not a", "# number of test cases for each iteration TEST_CASES =", "n test_cases = generate_test_cases('', m, TEST_CASES) for test_case in test_cases:", "limit of m: \") vary_m(int(max_m)) elif choice == '3': done", "start) start = time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time() - start) #", "= [n for n in range(1, max_n + 1)] y_bm", "np import string import time import random from bm_alg import", "int(max_n) > 1): print(\"That is not a valid number.\") max_n", "of the pattern k (int): The number of test cases", "test cases of length n, which contain PATTERN test_cases =", "[] if n >= len(PATTERN): # generate test cases of", "start = time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time() - start) # obtain", "contain PATTERN test_cases = generate_test_cases('', n, TEST_CASES) for test_case in", "50 def generate_test_cases(pattern, length, k): \"\"\" Generates <k> test cases", "PATTERN) bm_result.append(time.time() - start) # obtain median runtime (mean is", "= text + random.choice(string.ascii_lowercase) result.append(text) return result def vary_n(max_n): x", "which contain PATTERN test_cases = generate_test_cases(PATTERN, n, TEST_CASES) else: #", "1)] y_bm = [] y_naive = [] for n in", "time.time() naive_match(TEXT, test_case) naive_result.append(time.time() - start) start = time.time() boyer_moore_match(TEXT,", "= PATTERN * 50 def generate_test_cases(pattern, length, k): \"\"\" Generates", "label=\"Naive Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search", "vary_m(int(max_m)) elif choice == '3': done = True else: print(\"That", "m: \") vary_m(int(max_m)) elif choice == '3': done = True", "vary m\") print(\"3. Quit\\n\") while not done: choice = input(\"Your", "cases of length n, which contain PATTERN test_cases = generate_test_cases(PATTERN,", "# generate test cases of length n test_cases = generate_test_cases('',", "if n >= len(PATTERN): # generate test cases of length", "Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show() def", "of length n, which do not (and can not possibly)", "done = True else: print(\"That is not a valid option.\")", "'ICT1002 is a really great module!' # test cases generated", "Efficiency\") plt.legend() plt.show() def main(): done = False print(\"m =", "y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend()", "n, vary m\") print(\"3. Quit\\n\") while not done: choice =", "1): print(\"That is not a valid number.\") max_m = input(\"Upper", "affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x,", "number of test cases Returns: A list of test cases,", "time.time() naive_match(test_case, PATTERN) naive_result.append(time.time() - start) start = time.time() boyer_moore_match(test_case,", "import string import time import random from bm_alg import boyer_moore_match,", "range(k): text = pattern while len(text) < length: direction =", "y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x, y_naive, label=\"Naive Algorithm\")", "Algorithm\") plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"m\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm", "outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x, y_naive, label=\"Naive", "of test cases, i.e. strings that contain <pattern> \"\"\" result", "0: text = random.choice(string.ascii_lowercase) + text # 1 --> Right", "\") while not (max_m.isnumeric() and int(max_m) > 1): print(\"That is", "naive_result.append(time.time() - start) start = time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time() -", "obtain median runtime (mean is affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES //", "Length of pattern\\nn = Length of text\\n\") print(\"1. Constant m,", "list of test cases, i.e. strings that contain <pattern> \"\"\"", "which do not (and can not possibly) contain PATTERN test_cases", "naive_result = [] if n >= len(PATTERN): # generate test", "as plt import numpy as np import string import time", "= [] y_naive = [] for m in x: print('m", "(mean is affected by outliers) y_naive.append(sorted(naive_result)[TEST_CASES // 2]) y_bm.append(sorted(bm_result)[TEST_CASES //", "valid number.\") max_m = input(\"Upper limit of m: \") vary_m(int(max_m))", "is not a valid number.\") max_m = input(\"Upper limit of", "text + random.choice(string.ascii_lowercase) result.append(text) return result def vary_n(max_n): x =", "input(\"Upper limit of m: \") while not (max_m.isnumeric() and int(max_m)", "label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\") plt.legend() plt.show()", "possibly) contain PATTERN test_cases = generate_test_cases('', n, TEST_CASES) for test_case", "not possibly) contain PATTERN test_cases = generate_test_cases('', n, TEST_CASES) for", "< length: direction = random.choice((0, 1)) # 0 --> Left", "on the same axes. \"\"\" import matplotlib.pyplot as plt import", "cases for each iteration TEST_CASES = 100 # test cases", "= time.time() naive_match(TEXT, test_case) naive_result.append(time.time() - start) start = time.time()", "result = [] for _ in range(k): text = pattern", "really great module!' # test cases generated based on this", "= Length of pattern\\nn = Length of text\\n\") print(\"1. Constant", "== '2': max_m = input(\"Upper limit of m: \") while", "def generate_test_cases(pattern, length, k): \"\"\" Generates <k> test cases with", "import time import random from bm_alg import boyer_moore_match, naive_match #", "limit of m: \") while not (max_m.isnumeric() and int(max_m) >", "text\\n\") print(\"1. Constant m, vary n\") print(\"2. Constant n, vary", "100 # test cases generated based on this pattern (vary_n)", "naive_match(TEXT, test_case) naive_result.append(time.time() - start) start = time.time() boyer_moore_match(TEXT, test_case)", "= input(\"Upper limit of m: \") while not (max_m.isnumeric() and", "random.choice(string.ascii_lowercase) result.append(text) return result def vary_n(max_n): x = [n for", "generate test cases of length n, which contain PATTERN test_cases", "plt.plot(x, y_bm, label=\"Boyer-Moore Algorithm\") plt.xlabel(\"n\") plt.ylabel(\"Runtime\") plt.title(\"Substring Search Algorithm Efficiency\")", "both algorithms are plotted on the same axes. \"\"\" import", "if direction == 0: text = random.choice(string.ascii_lowercase) + text #", "len(PATTERN): # generate test cases of length n, which contain", "Right else: text = text + random.choice(string.ascii_lowercase) result.append(text) return result", "bm_result = [] naive_result = [] # generate test cases", "= [] for _ in range(k): text = pattern while", "test_case) naive_result.append(time.time() - start) start = time.time() boyer_moore_match(TEXT, test_case) bm_result.append(time.time()", "True else: print(\"That is not a valid option.\") if __name__", "Efficiency\") plt.legend() plt.show() def vary_m(max_m): x = [m for m", "// 2]) y_bm.append(sorted(bm_result)[TEST_CASES // 2]) plt.plot(x, y_naive, label=\"Naive Algorithm\") plt.plot(x,", "of pattern\\nn = Length of text\\n\") print(\"1. Constant m, vary", "test cases for each iteration TEST_CASES = 100 # test", "plotted on the same axes. \"\"\" import matplotlib.pyplot as plt" ]
[ "+= create_tree(ch) + 1 ans[p] = res return res except:", "int(input()) for i in range(0, n-1): child, parent = input().split()", "[child] if n > 0: for k in pairs: create_tree(k)", "pairs: pairs[parent].append(child) else: pairs[parent] = [child] if n > 0:", "pairs = dict() def create_tree(p): if p in ans: return", "n-1): child, parent = input().split() if parent in pairs: pairs[parent].append(child)", "i in range(0, n-1): child, parent = input().split() if parent", "else: try: res = 0 if p in pairs: for", "1 ans[p] = res return res except: pass n =", "= [child] if n > 0: for k in pairs:", "return ans[p] else: try: res = 0 if p in", "p in pairs: for ch in pairs[p]: res += create_tree(ch)", "dict() def create_tree(p): if p in ans: return ans[p] else:", "parent in pairs: pairs[parent].append(child) else: pairs[parent] = [child] if n", "else: pairs[parent] = [child] if n > 0: for k", "res except: pass n = int(input()) for i in range(0,", "def create_tree(p): if p in ans: return ans[p] else: try:", "pairs[parent].append(child) else: pairs[parent] = [child] if n > 0: for", "for ch in pairs[p]: res += create_tree(ch) + 1 ans[p]", "if p in pairs: for ch in pairs[p]: res +=", "ans[p] = res return res except: pass n = int(input())", "except: pass n = int(input()) for i in range(0, n-1):", "for k in pairs: create_tree(k) for key in sorted(ans.keys()): print(key,", "= dict() def create_tree(p): if p in ans: return ans[p]", "in pairs[p]: res += create_tree(ch) + 1 ans[p] = res", "res += create_tree(ch) + 1 ans[p] = res return res", "= int(input()) for i in range(0, n-1): child, parent =", "for i in range(0, n-1): child, parent = input().split() if", "ans: return ans[p] else: try: res = 0 if p", "ans = dict() pairs = dict() def create_tree(p): if p", "= input().split() if parent in pairs: pairs[parent].append(child) else: pairs[parent] =", "ans[p] else: try: res = 0 if p in pairs:", "pairs: for ch in pairs[p]: res += create_tree(ch) + 1", "if parent in pairs: pairs[parent].append(child) else: pairs[parent] = [child] if", "= 0 if p in pairs: for ch in pairs[p]:", "create_tree(p): if p in ans: return ans[p] else: try: res", "if p in ans: return ans[p] else: try: res =", "= res return res except: pass n = int(input()) for", "in pairs: pairs[parent].append(child) else: pairs[parent] = [child] if n >", "pass n = int(input()) for i in range(0, n-1): child,", "return res except: pass n = int(input()) for i in", "child, parent = input().split() if parent in pairs: pairs[parent].append(child) else:", "res = 0 if p in pairs: for ch in", "range(0, n-1): child, parent = input().split() if parent in pairs:", "parent = input().split() if parent in pairs: pairs[parent].append(child) else: pairs[parent]", "if n > 0: for k in pairs: create_tree(k) for", "= dict() pairs = dict() def create_tree(p): if p in", "res return res except: pass n = int(input()) for i", "input().split() if parent in pairs: pairs[parent].append(child) else: pairs[parent] = [child]", "pairs[parent] = [child] if n > 0: for k in", "0: for k in pairs: create_tree(k) for key in sorted(ans.keys()):", "in ans: return ans[p] else: try: res = 0 if", "dict() pairs = dict() def create_tree(p): if p in ans:", "+ 1 ans[p] = res return res except: pass n", "k in pairs: create_tree(k) for key in sorted(ans.keys()): print(key, ans[key])", "n > 0: for k in pairs: create_tree(k) for key", "0 if p in pairs: for ch in pairs[p]: res", "> 0: for k in pairs: create_tree(k) for key in", "in pairs: for ch in pairs[p]: res += create_tree(ch) +", "pairs[p]: res += create_tree(ch) + 1 ans[p] = res return", "ch in pairs[p]: res += create_tree(ch) + 1 ans[p] =", "n = int(input()) for i in range(0, n-1): child, parent", "in range(0, n-1): child, parent = input().split() if parent in", "create_tree(ch) + 1 ans[p] = res return res except: pass", "try: res = 0 if p in pairs: for ch", "p in ans: return ans[p] else: try: res = 0" ]
[ "if b: res += '{\\\\i}' + e else: res +=", "False res = '' for e in s: if e", "'{i}' b=not b else: res += e return res def", "res += '{\\\\i}' + e else: res += e +", "F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X)) return if __name__ == \"__main__\": main()", "e == '\"': if b: res += '{\\\\i}' + e", "b = False res = '' for e in s:", "in s: if e == '\"': if b: res +=", "= '' for e in s: if e == '\"':", "+ '{i}' b=not b else: res += e return res", "else: res += e return res def main(): F=open('test_in.txt','r') X=F.read()", "if e == '\"': if b: res += '{\\\\i}' +", "def main(): F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X)) return if __name__ ==", "+= e return res def main(): F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X))", "'\"': if b: res += '{\\\\i}' + e else: res", "italicize(s): b = False res = '' for e in", "res def main(): F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X)) return if __name__", "res = '' for e in s: if e ==", "+ e else: res += e + '{i}' b=not b", "res += e + '{i}' b=not b else: res +=", "main(): F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X)) return if __name__ == \"__main__\":", "= False res = '' for e in s: if", "def italicize(s): b = False res = '' for e", "== '\"': if b: res += '{\\\\i}' + e else:", "for e in s: if e == '\"': if b:", "e in s: if e == '\"': if b: res", "s: if e == '\"': if b: res += '{\\\\i}'", "+= '{\\\\i}' + e else: res += e + '{i}'", "return res def main(): F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X)) return if", "b: res += '{\\\\i}' + e else: res += e", "'{\\\\i}' + e else: res += e + '{i}' b=not", "e + '{i}' b=not b else: res += e return", "e return res def main(): F=open('test_in.txt','r') X=F.read() F.close() print(italicize(X)) return", "b=not b else: res += e return res def main():", "else: res += e + '{i}' b=not b else: res", "+= e + '{i}' b=not b else: res += e", "'' for e in s: if e == '\"': if", "res += e return res def main(): F=open('test_in.txt','r') X=F.read() F.close()", "e else: res += e + '{i}' b=not b else:", "b else: res += e return res def main(): F=open('test_in.txt','r')" ]
[ "DataLayer from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings import VIEW_CACHE_TTL", "from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from rest_framework", "media_type = 'application/geo+json' format = 'geojson' def render(self, data, media_type=None,", "format = 'geojson' def render(self, data, media_type=None, renderer_context=None): return json.dumps(data)", "from rest_framework import viewsets, filters from rest_framework.exceptions import NotFound from", "filter_backends = [filters.SearchFilter, ] def get_serializer_class(self): if self.action == 'list':", "import json from typing import Type, TYPE_CHECKING from django.core.exceptions import", "GeoJSONRenderer() return renderer, renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes = [AllowAny, ]", "headers = { 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' } return Response(geojson, headers=headers,", "permission_classes = [IsAuthenticatedOrReadOnly, ] filter_backends = [filters.SearchFilter, ] def get_serializer_class(self):", "select_parser(self, request, parsers): return super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def select_renderer(self, request:", "on available geo types raise NotFound except ObjectDoesNotExist as e:", "DataLayer.objects.all() serializer_class = DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly, ] filter_backends =", "media_type=None, renderer_context=None): return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content negotiation", "with info on available geo types raise NotFound except ObjectDoesNotExist", "import APIView from indicators.models import Variable, DataViz from indicators.utils import", "Response from rest_framework.views import APIView from indicators.models import Variable, DataViz", "for downloading geojson files `JSONRenderer` is used for ajax calls.", "return DataLayerDetailsSerializer media_type = 'application/geo+json' format = 'geojson' def render(self,", "as e: # when the geog is wrong todo: make", "\"\"\" def select_parser(self, request, parsers): return super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def", "import ObjectDoesNotExist from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page", "renderers, format_suffix=None): renderer = renderers[0] if request.query_params.get('download', False): renderer =", "files `JSONRenderer` is used for ajax calls. \"\"\" def select_parser(self,", "super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def select_renderer(self, request: Request, renderers, format_suffix=None): renderer", "get_geog_model from indicators.views import GeoJSONRenderer from maps.models import DataLayer from", "DataViz from indicators.utils import get_geog_model from indicators.views import GeoJSONRenderer from", "from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request import Request from", "NotFound if request.query_params.get('download', False): headers = { 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"'", "AllowAny from rest_framework.request import Request from rest_framework.response import Response from", "] filter_backends = [filters.SearchFilter, ] def get_serializer_class(self): if self.action ==", "'geojson' def render(self, data, media_type=None, renderer_context=None): return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation):", "is wrong todo: make 400 malformed with info on available", "NotFound except ObjectDoesNotExist as e: raise NotFound if request.query_params.get('download', False):", "import method_decorator from django.views.decorators.cache import cache_page from rest_framework import viewsets,", "maps.models import DataLayer from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings", "content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request: Request, map_slug=None): try:", "import BaseContentNegotiation from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request import", "wrong todo: make 400 malformed with info on available geo", "'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' } return Response(geojson, headers=headers, content_type='application/geo+json') return Response(geojson)", "import viewsets, filters from rest_framework.exceptions import NotFound from rest_framework.negotiation import", "if request.query_params.get('download', False): renderer = GeoJSONRenderer() return renderer, renderer.media_type class", "import VIEW_CACHE_TTL if TYPE_CHECKING: from geo.models import AdminRegion from indicators.models.viz", "as e: raise NotFound if request.query_params.get('download', False): headers = {", "from indicators.views import GeoJSONRenderer from maps.models import DataLayer from maps.serializers", "== 'list': return DataLayerSerializer return DataLayerDetailsSerializer media_type = 'application/geo+json' format", "False): renderer = GeoJSONRenderer() return renderer, renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes", "GeoJSONRenderer from maps.models import DataLayer from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer", "= data_layer.as_geojson() except KeyError as e: # when the geog", "cache_page from rest_framework import viewsets, filters from rest_framework.exceptions import NotFound", "used for downloading geojson files `JSONRenderer` is used for ajax", "self.action == 'list': return DataLayerSerializer return DataLayerDetailsSerializer media_type = 'application/geo+json'", "= renderers[0] if request.query_params.get('download', False): renderer = GeoJSONRenderer() return renderer,", "rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request import Request from rest_framework.response", "request.query_params.get('download', False): renderer = GeoJSONRenderer() return renderer, renderer.media_type class GeoJSONDataLayerView(APIView):", "rest_framework.response import Response from rest_framework.views import APIView from indicators.models import", "KeyError as e: # when the geog is wrong todo:", "Variable, DataViz from indicators.utils import get_geog_model from indicators.views import GeoJSONRenderer", "geojson = data_layer.as_geojson() except KeyError as e: # when the", "Request, map_slug=None): try: data_layer: DataLayer = DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson()", "the geog is wrong todo: make 400 malformed with info", "indicators.models import Variable, DataViz from indicators.utils import get_geog_model from indicators.views", "geo types raise NotFound except ObjectDoesNotExist as e: raise NotFound", "available geo types raise NotFound except ObjectDoesNotExist as e: raise", "return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content negotiation scheme for", "django.views.decorators.cache import cache_page from rest_framework import viewsets, filters from rest_framework.exceptions", "import Response from rest_framework.views import APIView from indicators.models import Variable,", "= GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request: Request, map_slug=None): try: data_layer:", "scheme for GeoJSON files. `GeoJSONRenderer` is used for downloading geojson", "False): headers = { 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' } return Response(geojson,", "renderer_context=None): return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content negotiation scheme", "try: data_layer: DataLayer = DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson() except KeyError", "files. `GeoJSONRenderer` is used for downloading geojson files `JSONRenderer` is", "map_slug=None): try: data_layer: DataLayer = DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson() except", "indicators.utils import get_geog_model from indicators.views import GeoJSONRenderer from maps.models import", "from profiles.settings import VIEW_CACHE_TTL if TYPE_CHECKING: from geo.models import AdminRegion", "renderer, renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes = [AllowAny, ] content_negotiation_class =", "malformed with info on available geo types raise NotFound except", "render(self, data, media_type=None, renderer_context=None): return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom", "= [IsAuthenticatedOrReadOnly, ] filter_backends = [filters.SearchFilter, ] def get_serializer_class(self): if", "select_renderer(self, request: Request, renderers, format_suffix=None): renderer = renderers[0] if request.query_params.get('download',", "= 'application/geo+json' format = 'geojson' def render(self, data, media_type=None, renderer_context=None):", "Request, renderers, format_suffix=None): renderer = renderers[0] if request.query_params.get('download', False): renderer", "`JSONRenderer` is used for ajax calls. \"\"\" def select_parser(self, request,", "def select_parser(self, request, parsers): return super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def select_renderer(self,", "from indicators.models.viz import MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all() serializer_class", "\"\"\" Custom content negotiation scheme for GeoJSON files. `GeoJSONRenderer` is", "= DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson() except KeyError as e: #", "make 400 malformed with info on available geo types raise", "from maps.models import DataLayer from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer from", "def select_renderer(self, request: Request, renderers, format_suffix=None): renderer = renderers[0] if", "viewsets, filters from rest_framework.exceptions import NotFound from rest_framework.negotiation import BaseContentNegotiation", "renderers[0] if request.query_params.get('download', False): renderer = GeoJSONRenderer() return renderer, renderer.media_type", "is used for downloading geojson files `JSONRenderer` is used for", "except ObjectDoesNotExist as e: raise NotFound if request.query_params.get('download', False): headers", "get_serializer_class(self): if self.action == 'list': return DataLayerSerializer return DataLayerDetailsSerializer media_type", "import IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request import Request from rest_framework.response import", "from rest_framework.request import Request from rest_framework.response import Response from rest_framework.views", "] def get_serializer_class(self): if self.action == 'list': return DataLayerSerializer return", "request: Request, renderers, format_suffix=None): renderer = renderers[0] if request.query_params.get('download', False):", "get(self, request: Request, map_slug=None): try: data_layer: DataLayer = DataLayer.objects.get(slug=map_slug) geojson", "from indicators.models import Variable, DataViz from indicators.utils import get_geog_model from", "if TYPE_CHECKING: from geo.models import AdminRegion from indicators.models.viz import MiniMap", "geo.models import AdminRegion from indicators.models.viz import MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset", "from geo.models import AdminRegion from indicators.models.viz import MiniMap class DataLayerViewSet(viewsets.ModelViewSet):", "filters from rest_framework.exceptions import NotFound from rest_framework.negotiation import BaseContentNegotiation from", "GeoJSONDataLayerView(APIView): permission_classes = [AllowAny, ] content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def", "'list': return DataLayerSerializer return DataLayerDetailsSerializer media_type = 'application/geo+json' format =", "rest_framework.exceptions import NotFound from rest_framework.negotiation import BaseContentNegotiation from rest_framework.permissions import", "raise NotFound except ObjectDoesNotExist as e: raise NotFound if request.query_params.get('download',", "is used for ajax calls. \"\"\" def select_parser(self, request, parsers):", "from django.core.exceptions import ObjectDoesNotExist from django.utils.decorators import method_decorator from django.views.decorators.cache", "[IsAuthenticatedOrReadOnly, ] filter_backends = [filters.SearchFilter, ] def get_serializer_class(self): if self.action", "= DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly, ] filter_backends = [filters.SearchFilter, ]", "DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly, ] filter_backends = [filters.SearchFilter, ] def", "{ 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' } return Response(geojson, headers=headers, content_type='application/geo+json') return", "except KeyError as e: # when the geog is wrong", "todo: make 400 malformed with info on available geo types", "format_suffix=None): renderer = renderers[0] if request.query_params.get('download', False): renderer = GeoJSONRenderer()", "from typing import Type, TYPE_CHECKING from django.core.exceptions import ObjectDoesNotExist from", "import Request from rest_framework.response import Response from rest_framework.views import APIView", "DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings import VIEW_CACHE_TTL if TYPE_CHECKING: from geo.models", "import DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings import VIEW_CACHE_TTL if TYPE_CHECKING: from", "for GeoJSON files. `GeoJSONRenderer` is used for downloading geojson files", "ajax calls. \"\"\" def select_parser(self, request, parsers): return super(GeoJSONContentNegotiation, self).select_parser(request,", "] content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request: Request, map_slug=None):", "raise NotFound if request.query_params.get('download', False): headers = { 'Content-Disposition': f'attachment;", "json from typing import Type, TYPE_CHECKING from django.core.exceptions import ObjectDoesNotExist", "GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request: Request, map_slug=None): try: data_layer: DataLayer", "return renderer, renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes = [AllowAny, ] content_negotiation_class", "class GeoJSONDataLayerView(APIView): permission_classes = [AllowAny, ] content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL))", "geog is wrong todo: make 400 malformed with info on", "DataLayerDetailsSerializer from profiles.settings import VIEW_CACHE_TTL if TYPE_CHECKING: from geo.models import", "DataLayer = DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson() except KeyError as e:", "renderer = GeoJSONRenderer() return renderer, renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes =", "geojson files `JSONRenderer` is used for ajax calls. \"\"\" def", "= [filters.SearchFilter, ] def get_serializer_class(self): if self.action == 'list': return", "return super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def select_renderer(self, request: Request, renderers, format_suffix=None):", "django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from rest_framework import", "request.query_params.get('download', False): headers = { 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' } return", "# when the geog is wrong todo: make 400 malformed", "DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all() serializer_class = DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly,", "GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content negotiation scheme for GeoJSON files. `GeoJSONRenderer`", "= DataLayer.objects.all() serializer_class = DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly, ] filter_backends", "from rest_framework.negotiation import BaseContentNegotiation from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny from", "TYPE_CHECKING: from geo.models import AdminRegion from indicators.models.viz import MiniMap class", "import AdminRegion from indicators.models.viz import MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset =", "data, media_type=None, renderer_context=None): return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content", "parsers): return super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def select_renderer(self, request: Request, renderers,", "Custom content negotiation scheme for GeoJSON files. `GeoJSONRenderer` is used", "e: raise NotFound if request.query_params.get('download', False): headers = { 'Content-Disposition':", "NotFound from rest_framework.negotiation import BaseContentNegotiation from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny", "from indicators.utils import get_geog_model from indicators.views import GeoJSONRenderer from maps.models", "= [AllowAny, ] content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request:", "= GeoJSONRenderer() return renderer, renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes = [AllowAny,", "info on available geo types raise NotFound except ObjectDoesNotExist as", "import Type, TYPE_CHECKING from django.core.exceptions import ObjectDoesNotExist from django.utils.decorators import", "TYPE_CHECKING from django.core.exceptions import ObjectDoesNotExist from django.utils.decorators import method_decorator from", "IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request import Request from rest_framework.response import Response", "django.core.exceptions import ObjectDoesNotExist from django.utils.decorators import method_decorator from django.views.decorators.cache import", "profiles.settings import VIEW_CACHE_TTL if TYPE_CHECKING: from geo.models import AdminRegion from", "import GeoJSONRenderer from maps.models import DataLayer from maps.serializers import DataLayerSerializer,", "MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all() serializer_class = DataLayerSerializer permission_classes", "used for ajax calls. \"\"\" def select_parser(self, request, parsers): return", "negotiation scheme for GeoJSON files. `GeoJSONRenderer` is used for downloading", "self).select_parser(request, parsers) def select_renderer(self, request: Request, renderers, format_suffix=None): renderer =", "class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content negotiation scheme for GeoJSON files.", "permission_classes = [AllowAny, ] content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self,", "data_layer.as_geojson() except KeyError as e: # when the geog is", "400 malformed with info on available geo types raise NotFound", "from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings import VIEW_CACHE_TTL if", "DataLayerDetailsSerializer media_type = 'application/geo+json' format = 'geojson' def render(self, data,", "from rest_framework.views import APIView from indicators.models import Variable, DataViz from", "[filters.SearchFilter, ] def get_serializer_class(self): if self.action == 'list': return DataLayerSerializer", "def render(self, data, media_type=None, renderer_context=None): return json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\"", "json.dumps(data) class GeoJSONContentNegotiation(BaseContentNegotiation): \"\"\" Custom content negotiation scheme for GeoJSON", "if request.query_params.get('download', False): headers = { 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' }", "calls. \"\"\" def select_parser(self, request, parsers): return super(GeoJSONContentNegotiation, self).select_parser(request, parsers)", "GeoJSON files. `GeoJSONRenderer` is used for downloading geojson files `JSONRenderer`", "indicators.views import GeoJSONRenderer from maps.models import DataLayer from maps.serializers import", "from rest_framework.exceptions import NotFound from rest_framework.negotiation import BaseContentNegotiation from rest_framework.permissions", "DataLayerSerializer return DataLayerDetailsSerializer media_type = 'application/geo+json' format = 'geojson' def", "import cache_page from rest_framework import viewsets, filters from rest_framework.exceptions import", "`GeoJSONRenderer` is used for downloading geojson files `JSONRenderer` is used", "Type, TYPE_CHECKING from django.core.exceptions import ObjectDoesNotExist from django.utils.decorators import method_decorator", "data_layer: DataLayer = DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson() except KeyError as", "APIView from indicators.models import Variable, DataViz from indicators.utils import get_geog_model", "rest_framework import viewsets, filters from rest_framework.exceptions import NotFound from rest_framework.negotiation", "queryset = DataLayer.objects.all() serializer_class = DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly, ]", "BaseContentNegotiation from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request import Request", "when the geog is wrong todo: make 400 malformed with", "return DataLayerSerializer return DataLayerDetailsSerializer media_type = 'application/geo+json' format = 'geojson'", "request: Request, map_slug=None): try: data_layer: DataLayer = DataLayer.objects.get(slug=map_slug) geojson =", "VIEW_CACHE_TTL if TYPE_CHECKING: from geo.models import AdminRegion from indicators.models.viz import", "typing import Type, TYPE_CHECKING from django.core.exceptions import ObjectDoesNotExist from django.utils.decorators", "serializer_class = DataLayerSerializer permission_classes = [IsAuthenticatedOrReadOnly, ] filter_backends = [filters.SearchFilter,", "if self.action == 'list': return DataLayerSerializer return DataLayerDetailsSerializer media_type =", "for ajax calls. \"\"\" def select_parser(self, request, parsers): return super(GeoJSONContentNegotiation,", "AdminRegion from indicators.models.viz import MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all()", "= 'geojson' def render(self, data, media_type=None, renderer_context=None): return json.dumps(data) class", "= { 'Content-Disposition': f'attachment; filename=\"{map_slug}.geojson\"' } return Response(geojson, headers=headers, content_type='application/geo+json')", "Request from rest_framework.response import Response from rest_framework.views import APIView from", "def get_serializer_class(self): if self.action == 'list': return DataLayerSerializer return DataLayerDetailsSerializer", "renderer = renderers[0] if request.query_params.get('download', False): renderer = GeoJSONRenderer() return", "rest_framework.views import APIView from indicators.models import Variable, DataViz from indicators.utils", "renderer.media_type class GeoJSONDataLayerView(APIView): permission_classes = [AllowAny, ] content_negotiation_class = GeoJSONContentNegotiation", "ObjectDoesNotExist from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from", "import MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all() serializer_class = DataLayerSerializer", "e: # when the geog is wrong todo: make 400", "'application/geo+json' format = 'geojson' def render(self, data, media_type=None, renderer_context=None): return", "request, parsers): return super(GeoJSONContentNegotiation, self).select_parser(request, parsers) def select_renderer(self, request: Request,", "from django.views.decorators.cache import cache_page from rest_framework import viewsets, filters from", "import DataLayer from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings import", "types raise NotFound except ObjectDoesNotExist as e: raise NotFound if", "class DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all() serializer_class = DataLayerSerializer permission_classes =", "method_decorator from django.views.decorators.cache import cache_page from rest_framework import viewsets, filters", "content negotiation scheme for GeoJSON files. `GeoJSONRenderer` is used for", "@method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request: Request, map_slug=None): try: data_layer: DataLayer =", "def get(self, request: Request, map_slug=None): try: data_layer: DataLayer = DataLayer.objects.get(slug=map_slug)", "import get_geog_model from indicators.views import GeoJSONRenderer from maps.models import DataLayer", "ObjectDoesNotExist as e: raise NotFound if request.query_params.get('download', False): headers =", "downloading geojson files `JSONRenderer` is used for ajax calls. \"\"\"", "rest_framework.request import Request from rest_framework.response import Response from rest_framework.views import", "[AllowAny, ] content_negotiation_class = GeoJSONContentNegotiation @method_decorator(cache_page(VIEW_CACHE_TTL)) def get(self, request: Request,", "DataLayer.objects.get(slug=map_slug) geojson = data_layer.as_geojson() except KeyError as e: # when", "indicators.models.viz import MiniMap class DataLayerViewSet(viewsets.ModelViewSet): queryset = DataLayer.objects.all() serializer_class =", "rest_framework.negotiation import BaseContentNegotiation from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny from rest_framework.request", "from rest_framework.response import Response from rest_framework.views import APIView from indicators.models", "parsers) def select_renderer(self, request: Request, renderers, format_suffix=None): renderer = renderers[0]", "import Variable, DataViz from indicators.utils import get_geog_model from indicators.views import", "maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer from profiles.settings import VIEW_CACHE_TTL if TYPE_CHECKING:", "import NotFound from rest_framework.negotiation import BaseContentNegotiation from rest_framework.permissions import IsAuthenticatedOrReadOnly," ]
[ "if op == \"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary) else: setattr(BitsType, op,", "setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for op in (\"__add__\", \"__sub__\",", "op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\" ): if op ==", "\"__le__\", \"__gt__\", \"__ge__\" ): if op == \"__invert__\": setattr(_BitType, op,", "\"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary) else: setattr(BitType, op, raise_mantle_import_error_binary) for op", "op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else: setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\", op,", "type_ == \"binary\" def wrapped(self, other): raise UndefinedOperatorError( f\"{operator} is", "(\"__and__\", \"__or__\", \"__xor__\", \"__invert__\" ): if op == \"__invert__\": setattr(BitType,", "else: setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for op in", "not defined until mantle has been imported\") def raise_mantle_import_error_binary(self, other):", "for {type_str}\") else: assert type_ == \"binary\" def wrapped(self, other):", "because it's used for assignment on inputs # \"__le__\", \"__gt__\",", "setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for op in (\"__and__\",", "pass def raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators are not defined until", "wrapped(self, other): raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") return", "for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\" ): if op", "\"__xor__\", \"__invert__\", \"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped", "\"__xor__\", \"__invert__\" ): if op == \"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary)", "inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op,", "\"binary\")) for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\" ): if", "SIntType class MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self): raise", "until mantle has been imported\") def raise_mantle_import_error_binary(self, other): raise MantleImportError(", "raise_mantle_import_error_binary) for op in ( \"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__add__\",", "== \"unary\": def wrapped(self): raise UndefinedOperatorError( f\"{operator} is undefined for", "{type_str}\") else: assert type_ == \"binary\" def wrapped(self, other): raise", "are not defined until mantle has been imported\") def define_raise_undefined_operator_error(type_str,", "imported\") def define_raise_undefined_operator_error(type_str, operator, type_): if type_ == \"unary\": def", "type_): if type_ == \"unary\": def wrapped(self): raise UndefinedOperatorError( f\"{operator}", "): if op == \"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\"))", "op in ( \"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__add__\", \"__sub__\", \"__mul__\",", "else: setattr(BitsType, op, raise_mantle_import_error_binary) for op in (\"__add__\", \"__sub__\", \"__mul__\",", "op, raise_mantle_import_error_unary) else: setattr(BitType, op, raise_mantle_import_error_binary) for op in (\"__and__\",", "raise_mantle_import_error_unary) else: setattr(BitType, op, raise_mantle_import_error_binary) for op in (\"__and__\", \"__or__\",", "( \"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\",", "until mantle has been imported\") def define_raise_undefined_operator_error(type_str, operator, type_): if", "\"binary\")) for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", #", "def define_raise_undefined_operator_error(type_str, operator, type_): if type_ == \"unary\": def wrapped(self):", "\"__lt__\", # __le__ skipped because it's used for assignment on", "\"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\", ): if op == \"__invert__\": setattr(BitsType,", "for assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(SIntType,", "op, raise_mantle_import_error_binary) for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\",", "\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", #", "used for assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ):", "\"unary\": def wrapped(self): raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\")", "define_raise_undefined_operator_error(type_str, operator, type_): if type_ == \"unary\": def wrapped(self): raise", "are not defined until mantle has been imported\") def raise_mantle_import_error_binary(self,", "setattr(BitType, op, raise_mantle_import_error_unary) else: setattr(BitType, op, raise_mantle_import_error_binary) for op in", "# \"__le__\", \"__gt__\", \"__ge__\" ): setattr(SIntType, op, raise_mantle_import_error_binary) setattr(UIntType, op,", "in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\", ): if op", "undefined for {type_str}\") else: assert type_ == \"binary\" def wrapped(self,", "_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for op in (\"__and__\", \"__or__\",", "op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\", ): if", "been imported\") def define_raise_undefined_operator_error(type_str, operator, type_): if type_ == \"unary\":", "it's used for assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\"", "for assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(BitsType,", "assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(SIntType, op,", "imported\") def raise_mantle_import_error_binary(self, other): raise MantleImportError( \"Operators are not defined", "return wrapped for op in (\"__eq__\", \"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary)", "# \"__le__\", \"__gt__\", \"__ge__\" ): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\"))", "in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\" ): if op == \"__invert__\":", "\"__or__\", \"__xor__\", \"__invert__\" ): if op == \"__invert__\": setattr(BitType, op,", "\"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary) else: setattr(BitsType, op, raise_mantle_import_error_binary) for op", "f\"{operator} is undefined for {type_str}\") return wrapped for op in", "op, \"unary\")) else: setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for", "\"unary\")) else: setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for op", "\"__ge__\" ): if op == \"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op,", "from magma import _BitType, BitType, BitsType, UIntType, SIntType class MantleImportError(RuntimeError):", "mantle has been imported\") def raise_mantle_import_error_binary(self, other): raise MantleImportError( \"Operators", "in (\"__eq__\", \"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary) for op in (", "operator, type_): if type_ == \"unary\": def wrapped(self): raise UndefinedOperatorError(", "BitType, BitsType, UIntType, SIntType class MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError): pass", "== \"binary\" def wrapped(self, other): raise UndefinedOperatorError( f\"{operator} is undefined", "for assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): if", "op, \"binary\")) for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\" ):", "UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") else: assert type_ ==", "(\"__eq__\", \"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary) for op in ( \"__and__\",", "import _BitType, BitType, BitsType, UIntType, SIntType class MantleImportError(RuntimeError): pass class", "mantle has been imported\") def define_raise_undefined_operator_error(type_str, operator, type_): if type_", "\"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped because it's used for", "define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else: setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\"))", "for {type_str}\") return wrapped for op in (\"__eq__\", \"__ne__\"): setattr(_BitType,", "type_ == \"unary\": def wrapped(self): raise UndefinedOperatorError( f\"{operator} is undefined", "has been imported\") def define_raise_undefined_operator_error(type_str, operator, type_): if type_ ==", "inputs # \"__le__\", \"__gt__\", \"__ge__\" ): if op == \"__invert__\":", "been imported\") def raise_mantle_import_error_binary(self, other): raise MantleImportError( \"Operators are not", "raise_mantle_import_error_unary) else: setattr(BitsType, op, raise_mantle_import_error_binary) for op in (\"__add__\", \"__sub__\",", "\"__gt__\", \"__ge__\" ): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for op", "def raise_mantle_import_error_binary(self, other): raise MantleImportError( \"Operators are not defined until", "op == \"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary) else: setattr(BitType, op, raise_mantle_import_error_binary)", "setattr(BitsType, op, raise_mantle_import_error_unary) else: setattr(BitsType, op, raise_mantle_import_error_binary) for op in", "op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped", "setattr(BitsType, op, raise_mantle_import_error_binary) for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\",", "op, raise_mantle_import_error_unary) else: setattr(BitsType, op, raise_mantle_import_error_binary) for op in (\"__add__\",", "op, raise_mantle_import_error_binary) for op in ( \"__and__\", \"__or__\", \"__xor__\", \"__invert__\",", "class MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self): raise MantleImportError(", "): if op == \"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary) else: setattr(BitType,", "\"__div__\", \"__lt__\", # __le__ skipped because it's used for assignment", "op == \"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary) else: setattr(BitsType, op, raise_mantle_import_error_binary)", "op, define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for op in (\"__and__\", \"__or__\", \"__xor__\",", "\"__rshift__\", ): if op == \"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary) else:", "raise MantleImportError( \"Operators are not defined until mantle has been", "setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else: setattr( _BitType, op, define_raise_undefined_operator_error(\"_BitType\",", "raise_mantle_import_error_binary(self, other): raise MantleImportError( \"Operators are not defined until mantle", "\"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped because it's used", "else: assert type_ == \"binary\" def wrapped(self, other): raise UndefinedOperatorError(", "on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(SIntType, op, raise_mantle_import_error_binary)", "\"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary) for op in ( \"__and__\", \"__or__\",", "for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\", ):", "assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(BitsType, op,", "UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") return wrapped for op", "wrapped for op in (\"__eq__\", \"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary) for", "(\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\", ): if op ==", "\"__or__\", \"__xor__\", \"__invert__\", \"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__", "define_raise_undefined_operator_error(\"_BitType\", op, \"binary\")) for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\"", "if type_ == \"unary\": def wrapped(self): raise UndefinedOperatorError( f\"{operator} is", "assert type_ == \"binary\" def wrapped(self, other): raise UndefinedOperatorError( f\"{operator}", "== \"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary) else: setattr(BitsType, op, raise_mantle_import_error_binary) for", "== \"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary) else: setattr(BitType, op, raise_mantle_import_error_binary) for", "on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): if op ==", "def wrapped(self): raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") else:", "raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") else: assert type_", "UIntType, SIntType class MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self):", "raise_mantle_import_error_binary) for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\",", "pass class UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators are", "op, \"binary\")) for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\",", "\"__invert__\", \"__lshift__\", \"__rshift__\", ): if op == \"__invert__\": setattr(BitsType, op,", "on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\",", "def wrapped(self, other): raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\")", "is undefined for {type_str}\") return wrapped for op in (\"__eq__\",", "setattr(BitType, op, raise_mantle_import_error_binary) for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\",", "in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped because", "if op == \"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else:", "\"__lshift__\", \"__rshift__\", ): if op == \"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary)", "other): raise MantleImportError( \"Operators are not defined until mantle has", "\"__le__\", \"__gt__\", \"__ge__\" ): setattr(SIntType, op, raise_mantle_import_error_binary) setattr(UIntType, op, raise_mantle_import_error_binary)", "\"__invert__\" ): if op == \"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary) else:", "has been imported\") def raise_mantle_import_error_binary(self, other): raise MantleImportError( \"Operators are", "op in (\"__eq__\", \"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary) for op in", "if op == \"__invert__\": setattr(BitType, op, raise_mantle_import_error_unary) else: setattr(BitType, op,", "MantleImportError( \"Operators are not defined until mantle has been imported\")", "f\"{operator} is undefined for {type_str}\") else: assert type_ == \"binary\"", "for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__", "for op in (\"__eq__\", \"__ne__\"): setattr(_BitType, op, raise_mantle_import_error_binary) for op", "{type_str}\") return wrapped for op in (\"__eq__\", \"__ne__\"): setattr(_BitType, op,", "\"__invert__\", \"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped because", "): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for op in (\"__add__\",", "raise_mantle_import_error_binary) for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", #", "op == \"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else: setattr(", "skipped because it's used for assignment on inputs # \"__le__\",", "== \"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else: setattr( _BitType,", "assignment on inputs # \"__le__\", \"__gt__\", \"__ge__\" ): if op", "(\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped because it's", "for op in ( \"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__add__\", \"__sub__\",", "defined until mantle has been imported\") def define_raise_undefined_operator_error(type_str, operator, type_):", "other): raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") return wrapped", "raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") return wrapped for", "undefined for {type_str}\") return wrapped for op in (\"__eq__\", \"__ne__\"):", "\"__le__\", \"__gt__\", \"__ge__\" ): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for", "inputs # \"__le__\", \"__gt__\", \"__ge__\" ): setattr(SIntType, op, raise_mantle_import_error_binary) setattr(UIntType,", "UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators are not defined", "magma import _BitType, BitType, BitsType, UIntType, SIntType class MantleImportError(RuntimeError): pass", "# \"__le__\", \"__gt__\", \"__ge__\" ): if op == \"__invert__\": setattr(_BitType,", "wrapped(self): raise UndefinedOperatorError( f\"{operator} is undefined for {type_str}\") else: assert", "not defined until mantle has been imported\") def define_raise_undefined_operator_error(type_str, operator,", "else: setattr(BitType, op, raise_mantle_import_error_binary) for op in (\"__and__\", \"__or__\", \"__xor__\",", "op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for op in (\"__add__\", \"__sub__\", \"__mul__\",", "BitsType, UIntType, SIntType class MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError): pass def", "\"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\", \"__rshift__\", ): if op == \"__invert__\":", "_BitType, BitType, BitsType, UIntType, SIntType class MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError):", "in ( \"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__add__\", \"__sub__\", \"__mul__\", \"__div__\",", "setattr(_BitType, op, raise_mantle_import_error_binary) for op in ( \"__and__\", \"__or__\", \"__xor__\",", "\"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\", op, \"unary\")) else: setattr( _BitType, op,", "MantleImportError(RuntimeError): pass class UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators", "op, raise_mantle_import_error_binary) for op in (\"__and__\", \"__or__\", \"__xor__\", \"__invert__\", \"__lshift__\",", "\"__add__\", \"__sub__\", \"__mul__\", \"__div__\", \"__lt__\", # __le__ skipped because it's", "class UndefinedOperatorError(RuntimeError): pass def raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators are not", "defined until mantle has been imported\") def raise_mantle_import_error_binary(self, other): raise", "): if op == \"__invert__\": setattr(BitsType, op, raise_mantle_import_error_unary) else: setattr(BitsType,", "\"__gt__\", \"__ge__\" ): if op == \"__invert__\": setattr(_BitType, op, define_raise_undefined_operator_error(\"_BitType\",", "\"binary\" def wrapped(self, other): raise UndefinedOperatorError( f\"{operator} is undefined for", "\"Operators are not defined until mantle has been imported\") def", "\"__ge__\" ): setattr(BitsType, op, define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for op in", "define_raise_undefined_operator_error(\"BitsType\", op, \"binary\")) for op in (\"__add__\", \"__sub__\", \"__mul__\", \"__div__\",", "__le__ skipped because it's used for assignment on inputs #", "raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators are not defined until mantle has", "def raise_mantle_import_error_unary(self): raise MantleImportError( \"Operators are not defined until mantle", "is undefined for {type_str}\") else: assert type_ == \"binary\" def", "# __le__ skipped because it's used for assignment on inputs" ]
[ "is any traceback, otherwise does nothing. :param always_print: print the", "self.__display_exception_debug_information() if self._halt_on_nonzero: raise self._exception def __display_exception_debug_information(self): def echo_debug_info(key): if", "try: self.rc = process.returncode except: pass self.__stdout = stdout.strip().splitlines() if", "not self.__stdout.empty(): try: line = self.__stdout.get_nowait() stdout.append(line) except: pass else:", "and len(self._context) > 0: self.__echo.warn(\"\\t - %s: %s\" % (key,", "self.is_complete: break else: time.sleep(0.1) pipe.close() def write_input(self): for line in", "if self._streaming: self.__stdin.put(line) @property def traceback(self): \"\"\" Converts traceback string", "or always_print: self.__echo.critical(\"--{ STDERR }---\" + \"-\" * 100) self.__format_lines_error(self.stderr)", "or always_print: self.__echo.info(\"---------------\" + \"-\" * 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" +", "process, commands, context, streaming=False, exception=None, halt_on_nonzero=False): super(Result, self).__init__() self._process =", "if self._halt_on_nonzero and self.rc != 0: self.dump_exception() sys.exit() def dump_exception(self):", "% self._commands) # traceback self.print_traceback() # standard out self.print_stdout() #", "of the command was a failure. True for failure, False", "\"-\" * 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\" * 100) def", "false) \"\"\" if self._exception or always_print: self.__echo.critical(\"--{ TRACEBACK }\" +", "stderr def stdin(self, line): \"\"\" Sends input to stdin. \"\"\"", "\"\"\" if self.__stdout or always_print: self.__echo.info(\"---------------\" + \"-\" * 100)", "self.__echo.critical(\"---------------\" + \"-\" * 100) def print_traceback(self, always_print=False): \"\"\" Prints", "self.__stderr return stderr def stdin(self, line): \"\"\" Sends input to", "streaming self.rc = None self._halt_on_nonzero=halt_on_nonzero if process and streaming: self.is_complete", "False self.__stdout = Queue() self.__stderr = Queue() self.__stdin = Queue()", "line = self.__stderr.get_nowait() stderr.append(line) except: pass else: stderr = self.__stderr", "def traceback(self): \"\"\" Converts traceback string to a list. \"\"\"", "* 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\" * 100) def print_stderr(self,", "self._halt_on_nonzero and self.rc != 0: print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr)", "of the command was a success. True for success, False", "\"\"\" if self._exception: return traceback.format_exc().split(\"\\n\") else: return [] @property def", "there is any stdout, otherwise does nothing. :param always_print: print", "stderr, even if there is nothing in the buffer (default:", "0: print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) # self.dump_exception() def read_output(self,", "Result(Base): \"\"\" Class that encompasses the result of a POpen", "write_input(self): for line in iter(self.__stdin.get, None): if line.endswith(\"\\n\"): self._process.stdin.write(line) else:", "self._exception = e self.__echo.critical(\"Unable to run '%s'\" % self._commands) #", "def stdout(self): \"\"\" Converts stdout string to a list. \"\"\"", "= Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t = Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process)", "TRACEBACK }\" + \"-\" * 100) self.__format_lines_error(self.traceback) self.__echo.critical(\"---------------\" + \"-\"", "time import traceback from queue import Queue from sultan.core import", "return stdout @property def stderr(self): \"\"\" Converts stderr string to", ":param always_print: print the traceback, even if there is nothing", "q): for line in iter(pipe.readline, b''): if line: q.put(line.strip()) elif", "are additional information that can be used to debug this", "the stdout, even if there is nothing in the buffer", "for t in (self._stdout_t, self._stderr_t, self._stdin_t): t.join() if self._halt_on_nonzero and", "traceback from queue import Queue from sultan.core import Base from", "%s\" % (key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following are additional information", "a POpen command. \"\"\" def __init__(self, process, commands, context, streaming=False,", "* 100) def print_stderr(self, always_print=False): \"\"\" Prints the stderr to", "(default: false) \"\"\" if self.__stdout or always_print: self.__echo.info(\"---------------\" + \"-\"", "= None, None try: self.rc = process.returncode except: pass self.__stdout", "context used to run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging')", "self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t = Thread(target=self.write_input) self._wait_t =", "for line in lines: self.__echo.info(self.__format_line(line)) @property def stdout(self): \"\"\" Converts", "self._context = context self._exception = exception self.__echo = Echo() self._streaming", "lines: self.__echo.info(self.__format_line(line)) @property def stdout(self): \"\"\" Converts stdout string to", "self._exception or always_print: self.__echo.critical(\"--{ TRACEBACK }\" + \"-\" * 100)", "self.is_complete = True for t in (self._stdout_t, self._stderr_t, self._stdin_t): t.join()", "echo_debug_info(key): if self._context and len(self._context) > 0: self.__echo.warn(\"\\t - %s:", "stderr string to a list. \"\"\" if self._streaming: stderr =", "self._stdin_t): t.join() if self._halt_on_nonzero and self.rc != 0: self.dump_exception() sys.exit()", "> 0: self.__echo.warn(\"\\t - %s: %s\" % (key, self._context[0].get(key, 'N/A')))", "Converts stdout string to a list. \"\"\" if self._streaming: stdout", "\"\"\" Returns if the result of the command was a", "== 0 @property def has_exception(self): ''' Returns True if self._exception", "'\\n'.join(self.stdout) def __format_line(self, msg): return '| %s' % msg def", "is nothing in the buffer (default: false) \"\"\" if self.__stderr", "sys import time import traceback from queue import Queue from", "print the traceback, even if there is nothing in the", "else [] self.__stderr = stderr.strip().splitlines() if stderr else [] if", "exception=None, halt_on_nonzero=False): super(Result, self).__init__() self._process = process self._commands = commands", "for line in iter(pipe.readline, b''): if line: q.put(line.strip()) elif self.is_complete:", "__format_line(self, msg): return '| %s' % msg def __format_lines_error(self, lines):", "false) \"\"\" if self.__stderr or always_print: self.__echo.critical(\"--{ STDERR }---\" +", "def __format_lines_info(self, lines): for line in lines: self.__echo.info(self.__format_line(line)) @property def", "while not self.__stderr.empty(): try: line = self.__stderr.get_nowait() stderr.append(line) except: pass", "from queue import Queue from sultan.core import Base from sultan.echo", "(self._stdout_t, self._stderr_t, self._stdin_t): t.join() if self._halt_on_nonzero and self.rc != 0:", "self.__stderr = Queue() self.__stdin = Queue() self._stdout_t = Thread(target=self.read_output, args=(process.stdout,", "import Thread class Result(Base): \"\"\" Class that encompasses the result", "stdout, otherwise does nothing. :param always_print: print the stdout, even", "True t.start() else: self.is_complete = True try: stdout, stderr =", "halt_on_nonzero=False): super(Result, self).__init__() self._process = process self._commands = commands self._context", "the stdout to console - if there is any stdout,", "dump_exception(self): if not self._exception: try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except", "# standard out self.print_stdout() # standard error self.print_stderr() # print", "except: pass self.__stdout = stdout.strip().splitlines() if stdout else [] self.__stderr", "sultan.echo import Echo from threading import Thread class Result(Base): \"\"\"", "\"\"\" if self._streaming: stdout = [] while not self.__stdout.empty(): try:", "stdout, otherwise does nothing. :param always_print: print the stderr, even", "Returns if the result of the command was a failure.", "nothing in the buffer (default: false) \"\"\" if self._exception or", "exception self.__echo = Echo() self._streaming = streaming self.rc = None", "None, None try: self.rc = process.returncode except: pass self.__stdout =", "q.put(line.strip()) elif self.is_complete: break else: time.sleep(0.1) pipe.close() def write_input(self): for", "line = self.__stdout.get_nowait() stdout.append(line) except: pass else: stdout = self.__stdout", "can be used to debug this exception.\") self.__echo.warn(\"The following is", "if there is nothing in the buffer (default: false) \"\"\"", "self._wait_t): t.daemon = True t.start() else: self.is_complete = True try:", "= Queue() self.__stdin = Queue() self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout))", "self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\" * 100) def print_stderr(self, always_print=False): \"\"\"", "stderr = None, None try: self.rc = process.returncode except: pass", "if self.__stderr or always_print: self.__echo.critical(\"--{ STDERR }---\" + \"-\" *", "\"\"\" if self._streaming: self.__stdin.put(line) @property def traceback(self): \"\"\" Converts traceback", "= e self.__echo.critical(\"Unable to run '%s'\" % self._commands) # traceback", "self.__echo.info(\"---------------\" + \"-\" * 100) def print_stderr(self, always_print=False): \"\"\" Prints", "= Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t", "}\" + \"-\" * 100) self.__format_lines_error(self.traceback) self.__echo.critical(\"---------------\" + \"-\" *", "time.sleep(0.1) pipe.close() def write_input(self): for line in iter(self.__stdin.get, None): if", "echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self): return '\\n'.join(self.stdout)", "if the result of the command was a success. True", "error self.print_stderr() # print debug information self.__display_exception_debug_information() if self._halt_on_nonzero: raise", "the traceback to console - if there is any traceback,", "True for failure, False for succes. \"\"\" return self.is_complete and", "self._exception is not empty. ''' return bool(self._exception) def print_stdout(self, always_print=False):", "for line in iter(self.__stdin.get, None): if line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line", "pass self.__stdout = stdout.strip().splitlines() if stdout else [] self.__stderr =", "does nothing. :param always_print: print the stdout, even if there", "len(self._context) > 0: self.__echo.warn(\"\\t - %s: %s\" % (key, self._context[0].get(key,", "Prints the stdout to console - if there is any", "stdout else [] self.__stderr = stderr.strip().splitlines() if stderr else []", "True if self._exception is not empty. ''' return bool(self._exception) def", "subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except subprocess.CalledProcessError as e: self._exception = e", "Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t = Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process) for", "(default: false) \"\"\" if self._exception or always_print: self.__echo.critical(\"--{ TRACEBACK }\"", "was a failure. True for failure, False for succes. \"\"\"", "exception.\") self.__echo.warn(\"The following is the context used to run:\") echo_debug_info('cwd')", "msg def __format_lines_error(self, lines): for line in lines: self.__echo.critical(self.__format_line(line)) def", "= True try: stdout, stderr = process.communicate() except: stdout, stderr", "def __str__(self): return '\\n'.join(self.stdout) def __format_line(self, msg): return '| %s'", "run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src')", "+ \"-\" * 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\" * 100)", "[] if self._halt_on_nonzero and self.rc != 0: print(self.stderr) raise subprocess.CalledProcessError(self.rc,", "t.join() if self._halt_on_nonzero and self.rc != 0: self.dump_exception() sys.exit() def", "def __format_lines_error(self, lines): for line in lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self,", "self.rc = None self._halt_on_nonzero=halt_on_nonzero if process and streaming: self.is_complete =", "t.daemon = True t.start() else: self.is_complete = True try: stdout,", "import subprocess import sys import time import traceback from queue", "always_print=False): \"\"\" Prints the stderr to console - if there", "self.__stdin = Queue() self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t =", "def print_stdout(self, always_print=False): \"\"\" Prints the stdout to console -", "always_print: self.__echo.critical(\"--{ TRACEBACK }\" + \"-\" * 100) self.__format_lines_error(self.traceback) self.__echo.critical(\"---------------\"", "0 @property def is_failure(self): \"\"\" Returns if the result of", "result of the command was a failure. True for failure,", "- %s: %s\" % (key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following are", "pass else: stderr = self.__stderr return stderr def stdin(self, line):", "list. \"\"\" if self._exception: return traceback.format_exc().split(\"\\n\") else: return [] @property", "= process.communicate() except: stdout, stderr = None, None try: self.rc", "= [] while not self.__stdout.empty(): try: line = self.__stdout.get_nowait() stdout.append(line)", "a list. \"\"\" if self._streaming: stderr = [] while not", "always_print=False): \"\"\" Prints the traceback to console - if there", "# self.dump_exception() def read_output(self, pipe, q): for line in iter(pipe.readline,", "%s: %s\" % (key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following are additional", "[] self.__stderr = stderr.strip().splitlines() if stderr else [] if self._halt_on_nonzero", "echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self): return '\\n'.join(self.stdout) def __format_line(self,", "True for t in (self._stdout_t, self._stderr_t, self._stdin_t): t.join() if self._halt_on_nonzero", "self).__init__() self._process = process self._commands = commands self._context = context", "else: self.is_complete = True try: stdout, stderr = process.communicate() except:", "self._halt_on_nonzero: raise self._exception def __display_exception_debug_information(self): def echo_debug_info(key): if self._context and", "= self.__stdout.get_nowait() stdout.append(line) except: pass else: stdout = self.__stdout return", "is nothing in the buffer (default: false) \"\"\" if self._exception", "self._stdin_t, self._wait_t): t.daemon = True t.start() else: self.is_complete = True", "100) def print_stderr(self, always_print=False): \"\"\" Prints the stderr to console", "self.__stderr.empty(): try: line = self.__stderr.get_nowait() stderr.append(line) except: pass else: stderr", "if self.__stdout or always_print: self.__echo.info(\"---------------\" + \"-\" * 100) self.__format_lines_info(self.stdout)", "= False self.__stdout = Queue() self.__stderr = Queue() self.__stdin =", "encompasses the result of a POpen command. \"\"\" def __init__(self,", "print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) # self.dump_exception() def read_output(self, pipe,", "def __display_exception_debug_information(self): def echo_debug_info(key): if self._context and len(self._context) > 0:", "Queue() self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t = Thread(target=self.read_output, args=(process.stderr,", "console - if there is any traceback, otherwise does nothing.", "stderr(self): \"\"\" Converts stderr string to a list. \"\"\" if", "@property def stdout(self): \"\"\" Converts stdout string to a list.", "the buffer (default: false) \"\"\" if self._exception or always_print: self.__echo.critical(\"--{", "debug this exception.\") self.__echo.warn(\"The following is the context used to", "= self.__stdout return stdout @property def stderr(self): \"\"\" Converts stderr", "= True t.start() else: self.is_complete = True try: stdout, stderr", "traceback to console - if there is any traceback, otherwise", "buffer (default: false) \"\"\" if self._exception or always_print: self.__echo.critical(\"--{ TRACEBACK", "args=(process.stdout, self.__stdout)) self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t = Thread(target=self.write_input)", "for line in lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines): for line", "in (self._stdout_t, self._stderr_t, self._stdin_t): t.join() if self._halt_on_nonzero and self.rc !=", "\"\"\" Prints the stderr to console - if there is", "pipe.close() def write_input(self): for line in iter(self.__stdin.get, None): if line.endswith(\"\\n\"):", "Prints the stderr to console - if there is any", "any traceback, otherwise does nothing. :param always_print: print the traceback,", "Converts traceback string to a list. \"\"\" if self._exception: return", "def has_exception(self): ''' Returns True if self._exception is not empty.", "self.__stdout = stdout.strip().splitlines() if stdout else [] self.__stderr = stderr.strip().splitlines()", "stderr = self.__stderr return stderr def stdin(self, line): \"\"\" Sends", "echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self):", "following are additional information that can be used to debug", "if self._exception or always_print: self.__echo.critical(\"--{ TRACEBACK }\" + \"-\" *", "traceback self.print_traceback() # standard out self.print_stdout() # standard error self.print_stderr()", "traceback, otherwise does nothing. :param always_print: print the traceback, even", "''.join(self._commands), self.stderr) # self.dump_exception() def read_output(self, pipe, q): for line", "buffer (default: false) \"\"\" if self.__stderr or always_print: self.__echo.critical(\"--{ STDERR", "\"-\" * 100) def print_traceback(self, always_print=False): \"\"\" Prints the traceback", "except: pass else: stderr = self.__stderr return stderr def stdin(self,", "% (key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following are additional information that", "self._halt_on_nonzero and self.rc != 0: self.dump_exception() sys.exit() def dump_exception(self): if", "= streaming self.rc = None self._halt_on_nonzero=halt_on_nonzero if process and streaming:", "Converts stderr string to a list. \"\"\" if self._streaming: stderr", "== 0 @property def is_failure(self): \"\"\" Returns if the result", "\"\"\" Prints the traceback to console - if there is", "[] @property def is_success(self): \"\"\" Returns if the result of", "out self.print_stdout() # standard error self.print_stderr() # print debug information", "self._streaming: stderr = [] while not self.__stderr.empty(): try: line =", "line in iter(self.__stdin.get, None): if line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line +", "stderr.strip().splitlines() if stderr else [] if self._halt_on_nonzero and self.rc !=", "@property def stderr(self): \"\"\" Converts stderr string to a list.", "[] while not self.__stderr.empty(): try: line = self.__stderr.get_nowait() stderr.append(line) except:", "b''): if line: q.put(line.strip()) elif self.is_complete: break else: time.sleep(0.1) pipe.close()", "args=(process.stderr, self.__stderr)) self._stdin_t = Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process) for t", "if stdout else [] self.__stderr = stderr.strip().splitlines() if stderr else", "stdout.strip().splitlines() if stdout else [] self.__stderr = stderr.strip().splitlines() if stderr", "stderr else [] if self._halt_on_nonzero and self.rc != 0: print(self.stderr)", "command. \"\"\" def __init__(self, process, commands, context, streaming=False, exception=None, halt_on_nonzero=False):", "is the context used to run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname')", "following is the context used to run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user')", "for failure. \"\"\" return self.is_complete and self.rc == 0 @property", "always_print: self.__echo.info(\"---------------\" + \"-\" * 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\"", "None try: self.rc = process.returncode except: pass self.__stdout = stdout.strip().splitlines()", "this exception.\") self.__echo.warn(\"The following is the context used to run:\")", "a list. \"\"\" if self._exception: return traceback.format_exc().split(\"\\n\") else: return []", "echo_debug_info('src') def __str__(self): return '\\n'.join(self.stdout) def __format_line(self, msg): return '|", "echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self): return '\\n'.join(self.stdout) def __format_line(self, msg): return", "nothing in the buffer (default: false) \"\"\" if self.__stderr or", "self.is_complete and self.rc == 0 @property def is_failure(self): \"\"\" Returns", "False for failure. \"\"\" return self.is_complete and self.rc == 0", "self.__echo.critical(\"Unable to run '%s'\" % self._commands) # traceback self.print_traceback() #", "there is any traceback, otherwise does nothing. :param always_print: print", "pass else: stdout = self.__stdout return stdout @property def stderr(self):", "to run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config')", "def wait_on_process(self): self.rc = self._process.wait() self.__stdin.put(None) self.is_complete = True for", "for t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t): t.daemon = True", "wait_on_process(self): self.rc = self._process.wait() self.__stdin.put(None) self.is_complete = True for t", "(key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following are additional information that can", "traceback(self): \"\"\" Converts traceback string to a list. \"\"\" if", "is nothing in the buffer (default: false) \"\"\" if self.__stdout", "false) \"\"\" if self.__stdout or always_print: self.__echo.info(\"---------------\" + \"-\" *", "self.__echo.info(\"---------------\" + \"-\" * 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\" *", "self.__stdin.put(line) @property def traceback(self): \"\"\" Converts traceback string to a", "if there is any traceback, otherwise does nothing. :param always_print:", "\"\"\" return self.is_complete and not self.rc == 0 @property def", "self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following are additional information that can be", "if self._streaming: stderr = [] while not self.__stderr.empty(): try: line", "otherwise does nothing. :param always_print: print the stdout, even if", "not self._exception: try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except subprocess.CalledProcessError as", "return '\\n'.join(self.stdout) def __format_line(self, msg): return '| %s' % msg", "None self._halt_on_nonzero=halt_on_nonzero if process and streaming: self.is_complete = False self.__stdout", "\"\"\" Prints the stdout to console - if there is", "= Queue() self.__stderr = Queue() self.__stdin = Queue() self._stdout_t =", "!= 0: self.dump_exception() sys.exit() def dump_exception(self): if not self._exception: try:", "return self.is_complete and self.rc == 0 @property def is_failure(self): \"\"\"", "+ \"-\" * 100) self.__format_lines_error(self.traceback) self.__echo.critical(\"---------------\" + \"-\" * 100)", "not empty. ''' return bool(self._exception) def print_stdout(self, always_print=False): \"\"\" Prints", "import Echo from threading import Thread class Result(Base): \"\"\" Class", "True try: stdout, stderr = process.communicate() except: stdout, stderr =", "and self.rc != 0: print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) #", "self.print_traceback() # standard out self.print_stdout() # standard error self.print_stderr() #", "= stdout.strip().splitlines() if stdout else [] self.__stderr = stderr.strip().splitlines() if", "if line: q.put(line.strip()) elif self.is_complete: break else: time.sleep(0.1) pipe.close() def", "[] while not self.__stdout.empty(): try: line = self.__stdout.get_nowait() stdout.append(line) except:", "def write_input(self): for line in iter(self.__stdin.get, None): if line.endswith(\"\\n\"): self._process.stdin.write(line)", "while not self.__stdout.empty(): try: line = self.__stdout.get_nowait() stdout.append(line) except: pass", "import traceback from queue import Queue from sultan.core import Base", "if self._exception: return traceback.format_exc().split(\"\\n\") else: return [] @property def is_success(self):", "string to a list. \"\"\" if self._streaming: stdout = []", "stdout string to a list. \"\"\" if self._streaming: stdout =", "def __init__(self, process, commands, context, streaming=False, exception=None, halt_on_nonzero=False): super(Result, self).__init__()", "else: stderr = self.__stderr return stderr def stdin(self, line): \"\"\"", "\"\"\" return self.is_complete and self.rc == 0 @property def is_failure(self):", "not self.rc == 0 @property def has_exception(self): ''' Returns True", "from sultan.core import Base from sultan.echo import Echo from threading", "console - if there is any stdout, otherwise does nothing.", "streaming=False, exception=None, halt_on_nonzero=False): super(Result, self).__init__() self._process = process self._commands =", "a list. \"\"\" if self._streaming: stdout = [] while not", "debug information self.__display_exception_debug_information() if self._halt_on_nonzero: raise self._exception def __display_exception_debug_information(self): def", "if stderr else [] if self._halt_on_nonzero and self.rc != 0:", "and self.rc == 0 @property def is_failure(self): \"\"\" Returns if", "as e: self._exception = e self.__echo.critical(\"Unable to run '%s'\" %", "+ \"-\" * 100) def print_traceback(self, always_print=False): \"\"\" Prints the", "raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except subprocess.CalledProcessError as e: self._exception =", "self._exception = exception self.__echo = Echo() self._streaming = streaming self.rc", "self.dump_exception() def read_output(self, pipe, q): for line in iter(pipe.readline, b''):", "self._process.stdin.write(line) else: self._process.stdin.write(line + \"\\n\") def wait_on_process(self): self.rc = self._process.wait()", "# traceback self.print_traceback() # standard out self.print_stdout() # standard error", "for succes. \"\"\" return self.is_complete and not self.rc == 0", "from sultan.echo import Echo from threading import Thread class Result(Base):", "STDERR }---\" + \"-\" * 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\"", "t in (self._stdout_t, self._stderr_t, self._stdin_t): t.join() if self._halt_on_nonzero and self.rc", "''' Returns True if self._exception is not empty. ''' return", "self.__echo.critical(\"--{ STDERR }---\" + \"-\" * 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" +", "a failure. True for failure, False for succes. \"\"\" return", "Base from sultan.echo import Echo from threading import Thread class", "context self._exception = exception self.__echo = Echo() self._streaming = streaming", "self.__stderr = stderr.strip().splitlines() if stderr else [] if self._halt_on_nonzero and", "stdin. \"\"\" if self._streaming: self.__stdin.put(line) @property def traceback(self): \"\"\" Converts", "+ \"-\" * 100) def print_stderr(self, always_print=False): \"\"\" Prints the", "does nothing. :param always_print: print the stderr, even if there", "self._stderr_t, self._stdin_t, self._wait_t): t.daemon = True t.start() else: self.is_complete =", "print debug information self.__display_exception_debug_information() if self._halt_on_nonzero: raise self._exception def __display_exception_debug_information(self):", "sultan.core import Base from sultan.echo import Echo from threading import", "def print_traceback(self, always_print=False): \"\"\" Prints the traceback to console -", "commands self._context = context self._exception = exception self.__echo = Echo()", "line in iter(pipe.readline, b''): if line: q.put(line.strip()) elif self.is_complete: break", "success. True for success, False for failure. \"\"\" return self.is_complete", "in the buffer (default: false) \"\"\" if self.__stderr or always_print:", "def read_output(self, pipe, q): for line in iter(pipe.readline, b''): if", "self.rc != 0: print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) # self.dump_exception()", "the buffer (default: false) \"\"\" if self.__stderr or always_print: self.__echo.critical(\"--{", "echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self): return '\\n'.join(self.stdout) def __format_line(self, msg):", "process.communicate() except: stdout, stderr = None, None try: self.rc =", "stdout, stderr = process.communicate() except: stdout, stderr = None, None", "= Echo() self._streaming = streaming self.rc = None self._halt_on_nonzero=halt_on_nonzero if", "else [] if self._halt_on_nonzero and self.rc != 0: print(self.stderr) raise", "to console - if there is any stdout, otherwise does", "else: return [] @property def is_success(self): \"\"\" Returns if the", "if the result of the command was a failure. True", "def print_stderr(self, always_print=False): \"\"\" Prints the stderr to console -", "nothing. :param always_print: print the stdout, even if there is", "Returns True if self._exception is not empty. ''' return bool(self._exception)", "empty. ''' return bool(self._exception) def print_stdout(self, always_print=False): \"\"\" Prints the", "or always_print: self.__echo.critical(\"--{ TRACEBACK }\" + \"-\" * 100) self.__format_lines_error(self.traceback)", "@property def is_failure(self): \"\"\" Returns if the result of the", "!= 0: print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) # self.dump_exception() def", "import Base from sultan.echo import Echo from threading import Thread", "to stdin. \"\"\" if self._streaming: self.__stdin.put(line) @property def traceback(self): \"\"\"", "stderr.append(line) except: pass else: stderr = self.__stderr return stderr def", "except: stdout, stderr = None, None try: self.rc = process.returncode", "e self.__echo.critical(\"Unable to run '%s'\" % self._commands) # traceback self.print_traceback()", "\"\"\" Sends input to stdin. \"\"\" if self._streaming: self.__stdin.put(line) @property", "# standard error self.print_stderr() # print debug information self.__display_exception_debug_information() if", "Sends input to stdin. \"\"\" if self._streaming: self.__stdin.put(line) @property def", "for success, False for failure. \"\"\" return self.is_complete and self.rc", "information self.__display_exception_debug_information() if self._halt_on_nonzero: raise self._exception def __display_exception_debug_information(self): def echo_debug_info(key):", "failure. \"\"\" return self.is_complete and self.rc == 0 @property def", "super(Result, self).__init__() self._process = process self._commands = commands self._context =", "False for succes. \"\"\" return self.is_complete and not self.rc ==", "to a list. \"\"\" if self._streaming: stderr = [] while", "self.__stdout)) self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t = Thread(target=self.write_input) self._wait_t", "True for success, False for failure. \"\"\" return self.is_complete and", "stderr to console - if there is any stdout, otherwise", "stdout = self.__stdout return stdout @property def stderr(self): \"\"\" Converts", "context, streaming=False, exception=None, halt_on_nonzero=False): super(Result, self).__init__() self._process = process self._commands", "__display_exception_debug_information(self): def echo_debug_info(key): if self._context and len(self._context) > 0: self.__echo.warn(\"\\t", "else: self._process.stdin.write(line + \"\\n\") def wait_on_process(self): self.rc = self._process.wait() self.__stdin.put(None)", "__format_lines_info(self, lines): for line in lines: self.__echo.info(self.__format_line(line)) @property def stdout(self):", "run '%s'\" % self._commands) # traceback self.print_traceback() # standard out", "self.rc = self._process.wait() self.__stdin.put(None) self.is_complete = True for t in", "msg): return '| %s' % msg def __format_lines_error(self, lines): for", "failure. True for failure, False for succes. \"\"\" return self.is_complete", "result of a POpen command. \"\"\" def __init__(self, process, commands,", "the stderr to console - if there is any stdout,", "used to debug this exception.\") self.__echo.warn(\"The following is the context", "print_stdout(self, always_print=False): \"\"\" Prints the stdout to console - if", "- if there is any traceback, otherwise does nothing. :param", "information that can be used to debug this exception.\") self.__echo.warn(\"The", "self.is_complete = False self.__stdout = Queue() self.__stderr = Queue() self.__stdin", "stderr = process.communicate() except: stdout, stderr = None, None try:", "= self.__stderr return stderr def stdin(self, line): \"\"\" Sends input", "}---\" + \"-\" * 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\" *", "__format_lines_error(self, lines): for line in lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines):", "stdout(self): \"\"\" Converts stdout string to a list. \"\"\" if", "''' return bool(self._exception) def print_stdout(self, always_print=False): \"\"\" Prints the stdout", "succes. \"\"\" return self.is_complete and not self.rc == 0 @property", "Queue() self.__stderr = Queue() self.__stdin = Queue() self._stdout_t = Thread(target=self.read_output,", "\"\"\" if self.__stderr or always_print: self.__echo.critical(\"--{ STDERR }---\" + \"-\"", "self.__stdin.put(None) self.is_complete = True for t in (self._stdout_t, self._stderr_t, self._stdin_t):", "= self._process.wait() self.__stdin.put(None) self.is_complete = True for t in (self._stdout_t,", "is not empty. ''' return bool(self._exception) def print_stdout(self, always_print=False): \"\"\"", "used to run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable')", "otherwise does nothing. :param always_print: print the traceback, even if", "iter(pipe.readline, b''): if line: q.put(line.strip()) elif self.is_complete: break else: time.sleep(0.1)", "in iter(pipe.readline, b''): if line: q.put(line.strip()) elif self.is_complete: break else:", "else: stdout = self.__stdout return stdout @property def stderr(self): \"\"\"", "of a POpen command. \"\"\" def __init__(self, process, commands, context,", "traceback, even if there is nothing in the buffer (default:", "Thread(target=self.wait_on_process) for t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t): t.daemon =", "command was a success. True for success, False for failure.", "# print debug information self.__display_exception_debug_information() if self._halt_on_nonzero: raise self._exception def", "0: self.dump_exception() sys.exit() def dump_exception(self): if not self._exception: try: raise", "otherwise does nothing. :param always_print: print the stderr, even if", "@property def has_exception(self): ''' Returns True if self._exception is not", "string to a list. \"\"\" if self._exception: return traceback.format_exc().split(\"\\n\") else:", "print the stdout, even if there is nothing in the", "lines): for line in lines: self.__echo.info(self.__format_line(line)) @property def stdout(self): \"\"\"", "self.stderr) except subprocess.CalledProcessError as e: self._exception = e self.__echo.critical(\"Unable to", "the result of the command was a failure. True for", "0: self.__echo.warn(\"\\t - %s: %s\" % (key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The", "self.rc == 0 @property def has_exception(self): ''' Returns True if", "print_stderr(self, always_print=False): \"\"\" Prints the stderr to console - if", "@property def traceback(self): \"\"\" Converts traceback string to a list.", "list. \"\"\" if self._streaming: stdout = [] while not self.__stdout.empty():", "if process and streaming: self.is_complete = False self.__stdout = Queue()", "self._exception: try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except subprocess.CalledProcessError as e:", "'| %s' % msg def __format_lines_error(self, lines): for line in", "= [] while not self.__stderr.empty(): try: line = self.__stderr.get_nowait() stderr.append(line)", "traceback.format_exc().split(\"\\n\") else: return [] @property def is_success(self): \"\"\" Returns if", "in lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines): for line in lines:", "try: line = self.__stdout.get_nowait() stdout.append(line) except: pass else: stdout =", "def dump_exception(self): if not self._exception: try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr)", "self._commands = commands self._context = context self._exception = exception self.__echo", "always_print: print the stderr, even if there is nothing in", "line in lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines): for line in", "self.__stderr)) self._stdin_t = Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process) for t in", "\"\"\" def __init__(self, process, commands, context, streaming=False, exception=None, halt_on_nonzero=False): super(Result,", "line: q.put(line.strip()) elif self.is_complete: break else: time.sleep(0.1) pipe.close() def write_input(self):", "+ \"\\n\") def wait_on_process(self): self.rc = self._process.wait() self.__stdin.put(None) self.is_complete =", "to a list. \"\"\" if self._streaming: stdout = [] while", "line in lines: self.__echo.info(self.__format_line(line)) @property def stdout(self): \"\"\" Converts stdout", "'N/A'))) self.__echo.warn(\"The following are additional information that can be used", "lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines): for line in lines: self.__echo.info(self.__format_line(line))", "subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) # self.dump_exception() def read_output(self, pipe, q): for", "nothing in the buffer (default: false) \"\"\" if self.__stdout or", "return bool(self._exception) def print_stdout(self, always_print=False): \"\"\" Prints the stdout to", "def stderr(self): \"\"\" Converts stderr string to a list. \"\"\"", "stdout to console - if there is any stdout, otherwise", "= None self._halt_on_nonzero=halt_on_nonzero if process and streaming: self.is_complete = False", "self.rc == 0 @property def is_failure(self): \"\"\" Returns if the", "= exception self.__echo = Echo() self._streaming = streaming self.rc =", "commands, context, streaming=False, exception=None, halt_on_nonzero=False): super(Result, self).__init__() self._process = process", "from threading import Thread class Result(Base): \"\"\" Class that encompasses", "except subprocess.CalledProcessError as e: self._exception = e self.__echo.critical(\"Unable to run", "self.__stdout.empty(): try: line = self.__stdout.get_nowait() stdout.append(line) except: pass else: stdout", "the stderr, even if there is nothing in the buffer", "self._process = process self._commands = commands self._context = context self._exception", "to debug this exception.\") self.__echo.warn(\"The following is the context used", "always_print: self.__echo.critical(\"--{ STDERR }---\" + \"-\" * 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\"", "= process.returncode except: pass self.__stdout = stdout.strip().splitlines() if stdout else", "= Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process) for t in (self._stdout_t, self._stderr_t,", "the buffer (default: false) \"\"\" if self.__stdout or always_print: self.__echo.info(\"---------------\"", "to run '%s'\" % self._commands) # traceback self.print_traceback() # standard", "self.rc != 0: self.dump_exception() sys.exit() def dump_exception(self): if not self._exception:", "there is nothing in the buffer (default: false) \"\"\" if", "string to a list. \"\"\" if self._streaming: stderr = []", "t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t): t.daemon = True t.start()", "traceback string to a list. \"\"\" if self._exception: return traceback.format_exc().split(\"\\n\")", "stdout, even if there is nothing in the buffer (default:", "if self._halt_on_nonzero and self.rc != 0: print(self.stderr) raise subprocess.CalledProcessError(self.rc, ''.join(self._commands),", "def is_failure(self): \"\"\" Returns if the result of the command", "* 100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\" * 100) def print_traceback(self,", "self.__stdout = Queue() self.__stderr = Queue() self.__stdin = Queue() self._stdout_t", "def stdin(self, line): \"\"\" Sends input to stdin. \"\"\" if", "__init__(self, process, commands, context, streaming=False, exception=None, halt_on_nonzero=False): super(Result, self).__init__() self._process", ":param always_print: print the stdout, even if there is nothing", "\"\\n\") def wait_on_process(self): self.rc = self._process.wait() self.__stdin.put(None) self.is_complete = True", "Echo from threading import Thread class Result(Base): \"\"\" Class that", "self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr))", "streaming: self.is_complete = False self.__stdout = Queue() self.__stderr = Queue()", "self.__stderr or always_print: self.__echo.critical(\"--{ STDERR }---\" + \"-\" * 100)", "\"\"\" Converts stdout string to a list. \"\"\" if self._streaming:", "to console - if there is any traceback, otherwise does", "is any stdout, otherwise does nothing. :param always_print: print the", "= commands self._context = context self._exception = exception self.__echo =", "any stdout, otherwise does nothing. :param always_print: print the stderr,", "bool(self._exception) def print_stdout(self, always_print=False): \"\"\" Prints the stdout to console", "print the stderr, even if there is nothing in the", "is_failure(self): \"\"\" Returns if the result of the command was", "Echo() self._streaming = streaming self.rc = None self._halt_on_nonzero=halt_on_nonzero if process", "read_output(self, pipe, q): for line in iter(pipe.readline, b''): if line:", "in the buffer (default: false) \"\"\" if self.__stdout or always_print:", "print_traceback(self, always_print=False): \"\"\" Prints the traceback to console - if", "subprocess.CalledProcessError as e: self._exception = e self.__echo.critical(\"Unable to run '%s'\"", "if self._context and len(self._context) > 0: self.__echo.warn(\"\\t - %s: %s\"", "(self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t): t.daemon = True t.start() else: self.is_complete", "%s' % msg def __format_lines_error(self, lines): for line in lines:", "self._context and len(self._context) > 0: self.__echo.warn(\"\\t - %s: %s\" %", "'%s'\" % self._commands) # traceback self.print_traceback() # standard out self.print_stdout()", "the context used to run:\") echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env')", "raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) # self.dump_exception() def read_output(self, pipe, q):", "= Thread(target=self.wait_on_process) for t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t): t.daemon", "None): if line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line + \"\\n\") def wait_on_process(self):", "the result of a POpen command. \"\"\" def __init__(self, process,", "\"\"\" if self._streaming: stderr = [] while not self.__stderr.empty(): try:", "POpen command. \"\"\" def __init__(self, process, commands, context, streaming=False, exception=None,", "self._process.wait() self.__stdin.put(None) self.is_complete = True for t in (self._stdout_t, self._stderr_t,", "not self.__stderr.empty(): try: line = self.__stderr.get_nowait() stderr.append(line) except: pass else:", "is_success(self): \"\"\" Returns if the result of the command was", "always_print=False): \"\"\" Prints the stdout to console - if there", "queue import Queue from sultan.core import Base from sultan.echo import", "self.__stderr.get_nowait() stderr.append(line) except: pass else: stderr = self.__stderr return stderr", "result of the command was a success. True for success,", "import time import traceback from queue import Queue from sultan.core", "additional information that can be used to debug this exception.\")", "threading import Thread class Result(Base): \"\"\" Class that encompasses the", "100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\" * 100) def print_stderr(self, always_print=False):", "Prints the traceback to console - if there is any", "self._streaming = streaming self.rc = None self._halt_on_nonzero=halt_on_nonzero if process and", "\"\"\" Class that encompasses the result of a POpen command.", "in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t): t.daemon = True t.start() else:", "Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process) for t in (self._stdout_t, self._stderr_t, self._stdin_t,", "and streaming: self.is_complete = False self.__stdout = Queue() self.__stderr =", "self.__stdout or always_print: self.__echo.info(\"---------------\" + \"-\" * 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\"", "buffer (default: false) \"\"\" if self.__stdout or always_print: self.__echo.info(\"---------------\" +", "iter(self.__stdin.get, None): if line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line + \"\\n\") def", "self.__echo.warn(\"\\t - %s: %s\" % (key, self._context[0].get(key, 'N/A'))) self.__echo.warn(\"The following", "a success. True for success, False for failure. \"\"\" return", "elif self.is_complete: break else: time.sleep(0.1) pipe.close() def write_input(self): for line", "return traceback.format_exc().split(\"\\n\") else: return [] @property def is_success(self): \"\"\" Returns", "self._exception def __display_exception_debug_information(self): def echo_debug_info(key): if self._context and len(self._context) >", "if not self._exception: try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except subprocess.CalledProcessError", "= process self._commands = commands self._context = context self._exception =", "any stdout, otherwise does nothing. :param always_print: print the stdout,", "sys.exit() def dump_exception(self): if not self._exception: try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands),", "in the buffer (default: false) \"\"\" if self._exception or always_print:", "does nothing. :param always_print: print the traceback, even if there", "def is_success(self): \"\"\" Returns if the result of the command", "standard out self.print_stdout() # standard error self.print_stderr() # print debug", "self.__echo = Echo() self._streaming = streaming self.rc = None self._halt_on_nonzero=halt_on_nonzero", "self.print_stdout() # standard error self.print_stderr() # print debug information self.__display_exception_debug_information()", "in iter(self.__stdin.get, None): if line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line + \"\\n\")", "if self._streaming: stdout = [] while not self.__stdout.empty(): try: line", "Returns if the result of the command was a success.", "import Queue from sultan.core import Base from sultan.echo import Echo", "stdout, stderr = None, None try: self.rc = process.returncode except:", "lines): for line in lines: self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines): for", "to a list. \"\"\" if self._exception: return traceback.format_exc().split(\"\\n\") else: return", "standard error self.print_stderr() # print debug information self.__display_exception_debug_information() if self._halt_on_nonzero:", "self.__echo.warn(\"The following are additional information that can be used to", "except: pass else: stdout = self.__stdout return stdout @property def", "def __format_line(self, msg): return '| %s' % msg def __format_lines_error(self,", "- if there is any stdout, otherwise does nothing. :param", "+ \"-\" * 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\" * 100)", "line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line + \"\\n\") def wait_on_process(self): self.rc =", "pipe, q): for line in iter(pipe.readline, b''): if line: q.put(line.strip())", "the command was a success. True for success, False for", ":param always_print: print the stderr, even if there is nothing", "the command was a failure. True for failure, False for", "self.print_stderr() # print debug information self.__display_exception_debug_information() if self._halt_on_nonzero: raise self._exception", "* 100) def print_traceback(self, always_print=False): \"\"\" Prints the traceback to", "be used to debug this exception.\") self.__echo.warn(\"The following is the", "self.__stdout return stdout @property def stderr(self): \"\"\" Converts stderr string", "t.start() else: self.is_complete = True try: stdout, stderr = process.communicate()", "return [] @property def is_success(self): \"\"\" Returns if the result", "self._exception: return traceback.format_exc().split(\"\\n\") else: return [] @property def is_success(self): \"\"\"", "the traceback, even if there is nothing in the buffer", "self._process.stdin.write(line + \"\\n\") def wait_on_process(self): self.rc = self._process.wait() self.__stdin.put(None) self.is_complete", "stdin(self, line): \"\"\" Sends input to stdin. \"\"\" if self._streaming:", "stderr = [] while not self.__stderr.empty(): try: line = self.__stderr.get_nowait()", "self.__echo.warn(\"The following is the context used to run:\") echo_debug_info('cwd') echo_debug_info('sudo')", "@property def is_success(self): \"\"\" Returns if the result of the", "self.__stdout.get_nowait() stdout.append(line) except: pass else: stdout = self.__stdout return stdout", "if line.endswith(\"\\n\"): self._process.stdin.write(line) else: self._process.stdin.write(line + \"\\n\") def wait_on_process(self): self.rc", "\"-\" * 100) def print_stderr(self, always_print=False): \"\"\" Prints the stderr", "self._stdin_t = Thread(target=self.write_input) self._wait_t = Thread(target=self.wait_on_process) for t in (self._stdout_t,", "input to stdin. \"\"\" if self._streaming: self.__stdin.put(line) @property def traceback(self):", "100) self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\" * 100) def print_traceback(self, always_print=False):", "return self.is_complete and not self.rc == 0 @property def has_exception(self):", "class Result(Base): \"\"\" Class that encompasses the result of a", "process and streaming: self.is_complete = False self.__stdout = Queue() self.__stderr", "self.rc = process.returncode except: pass self.__stdout = stdout.strip().splitlines() if stdout", "\"-\" * 100) self.__format_lines_info(self.stdout) self.__echo.info(\"---------------\" + \"-\" * 100) def", "self._wait_t = Thread(target=self.wait_on_process) for t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t):", "self.stderr) # self.dump_exception() def read_output(self, pipe, q): for line in", "echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self): return '\\n'.join(self.stdout) def", "in lines: self.__echo.info(self.__format_line(line)) @property def stdout(self): \"\"\" Converts stdout string", "else: time.sleep(0.1) pipe.close() def write_input(self): for line in iter(self.__stdin.get, None):", "if there is any stdout, otherwise does nothing. :param always_print:", "self.__echo.critical(self.__format_line(line)) def __format_lines_info(self, lines): for line in lines: self.__echo.info(self.__format_line(line)) @property", "Class that encompasses the result of a POpen command. \"\"\"", "always_print: print the traceback, even if there is nothing in", "was a success. True for success, False for failure. \"\"\"", "the result of the command was a success. True for", "for failure, False for succes. \"\"\" return self.is_complete and not", "self.__format_lines_error(self.stderr) self.__echo.critical(\"---------------\" + \"-\" * 100) def print_traceback(self, always_print=False): \"\"\"", "nothing. :param always_print: print the traceback, even if there is", "success, False for failure. \"\"\" return self.is_complete and self.rc ==", "failure, False for succes. \"\"\" return self.is_complete and not self.rc", "import sys import time import traceback from queue import Queue", "line): \"\"\" Sends input to stdin. \"\"\" if self._streaming: self.__stdin.put(line)", "if self._halt_on_nonzero: raise self._exception def __display_exception_debug_information(self): def echo_debug_info(key): if self._context", "raise self._exception def __display_exception_debug_information(self): def echo_debug_info(key): if self._context and len(self._context)", "% msg def __format_lines_error(self, lines): for line in lines: self.__echo.critical(self.__format_line(line))", "= True for t in (self._stdout_t, self._stderr_t, self._stdin_t): t.join() if", "\"\"\" Converts traceback string to a list. \"\"\" if self._exception:", "stdout.append(line) except: pass else: stdout = self.__stdout return stdout @property", "nothing. :param always_print: print the stderr, even if there is", "that encompasses the result of a POpen command. \"\"\" def", "try: stdout, stderr = process.communicate() except: stdout, stderr = None,", "try: raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr) except subprocess.CalledProcessError as e: self._exception", "process.returncode except: pass self.__stdout = stdout.strip().splitlines() if stdout else []", "Queue() self.__stdin = Queue() self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t", "= stderr.strip().splitlines() if stderr else [] if self._halt_on_nonzero and self.rc", "return stderr def stdin(self, line): \"\"\" Sends input to stdin.", "self.is_complete and not self.rc == 0 @property def has_exception(self): '''", "= Queue() self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t = Thread(target=self.read_output,", "def echo_debug_info(key): if self._context and len(self._context) > 0: self.__echo.warn(\"\\t -", "that can be used to debug this exception.\") self.__echo.warn(\"The following", "self._streaming: stdout = [] while not self.__stdout.empty(): try: line =", "if self._exception is not empty. ''' return bool(self._exception) def print_stdout(self,", "and self.rc != 0: self.dump_exception() sys.exit() def dump_exception(self): if not", "always_print: print the stdout, even if there is nothing in", "stdout @property def stderr(self): \"\"\" Converts stderr string to a", "\"\"\" if self._exception or always_print: self.__echo.critical(\"--{ TRACEBACK }\" + \"-\"", "process self._commands = commands self._context = context self._exception = exception", "Queue from sultan.core import Base from sultan.echo import Echo from", "''.join(self._commands), self.stderr) except subprocess.CalledProcessError as e: self._exception = e self.__echo.critical(\"Unable", "echo_debug_info('cwd') echo_debug_info('sudo') echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def", "list. \"\"\" if self._streaming: stderr = [] while not self.__stderr.empty():", "self._commands) # traceback self.print_traceback() # standard out self.print_stdout() # standard", "(default: false) \"\"\" if self.__stderr or always_print: self.__echo.critical(\"--{ STDERR }---\"", "<gh_stars>0 import subprocess import sys import time import traceback from", "and not self.rc == 0 @property def has_exception(self): ''' Returns", "self.__echo.critical(\"--{ TRACEBACK }\" + \"-\" * 100) self.__format_lines_error(self.traceback) self.__echo.critical(\"---------------\" +", "self.dump_exception() sys.exit() def dump_exception(self): if not self._exception: try: raise subprocess.CalledProcessError(self.rc,", "e: self._exception = e self.__echo.critical(\"Unable to run '%s'\" % self._commands)", "Thread(target=self.read_output, args=(process.stdout, self.__stdout)) self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr)) self._stdin_t =", "echo_debug_info('user') echo_debug_info('hostname') echo_debug_info('env') echo_debug_info('logging') echo_debug_info('executable') echo_debug_info('ssh_config') echo_debug_info('src') def __str__(self): return", "even if there is nothing in the buffer (default: false)", "break else: time.sleep(0.1) pipe.close() def write_input(self): for line in iter(self.__stdin.get,", "command was a failure. True for failure, False for succes.", "self.is_complete = True try: stdout, stderr = process.communicate() except: stdout,", "Thread class Result(Base): \"\"\" Class that encompasses the result of", "self.__echo.info(self.__format_line(line)) @property def stdout(self): \"\"\" Converts stdout string to a", "self._stderr_t, self._stdin_t): t.join() if self._halt_on_nonzero and self.rc != 0: self.dump_exception()", "self._streaming: self.__stdin.put(line) @property def traceback(self): \"\"\" Converts traceback string to", "= context self._exception = exception self.__echo = Echo() self._streaming =", "__str__(self): return '\\n'.join(self.stdout) def __format_line(self, msg): return '| %s' %", "try: line = self.__stderr.get_nowait() stderr.append(line) except: pass else: stderr =", "= self.__stderr.get_nowait() stderr.append(line) except: pass else: stderr = self.__stderr return", "\"\"\" Converts stderr string to a list. \"\"\" if self._streaming:", "subprocess import sys import time import traceback from queue import", "100) def print_traceback(self, always_print=False): \"\"\" Prints the traceback to console", "return '| %s' % msg def __format_lines_error(self, lines): for line", "has_exception(self): ''' Returns True if self._exception is not empty. '''", "0 @property def has_exception(self): ''' Returns True if self._exception is", "self._halt_on_nonzero=halt_on_nonzero if process and streaming: self.is_complete = False self.__stdout =", "stdout = [] while not self.__stdout.empty(): try: line = self.__stdout.get_nowait()" ]
[ "is the port for the sqlalchemy connection?\", default=\"5432\", show_default=True) username", "== \"5\": # dbt # dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook,", "run_id)['filepath'])) cli_message( \"\"\" ========== Data Documentation ========== To generate documentation", "msg_prompt_datasource_name, default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path)) elif data_source_selection", "data start Jupyter and open the notebook that will walk", "data_source_name)) drivername = click.prompt(\"What is the driver for the sqlalchemy", "if data_source_name != None: cli_message( \"\"\" ========== Profiling ========== Would", "source (without sampling) and may be very time consuming. As", "results are saved here:\") for profiling_result in profiling_results: data_asset_name =", "run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message( \"\"\" To view the", "a short name. \"\"\" msg_sqlalchemy_config_connection = \"\"\" Great Expectations relies", "port for the sqlalchemy connection?\", default=\"5432\", show_default=True) username = click.prompt(\"What", "\"\"\" msg_spark_go_to_notebook = \"\"\" To create expectations for your data", "profile`.\" ) if data_source_selection == \"1\": # Pandas cli_message(msg_filesys_go_to_notebook) elif", "None of the above cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration. You can", "results should be moved from great_expectations/uncommitted (ignored by git) to", "import __version__ as __version__ def add_datasource(context): cli_message( \"\"\" ========== Datasources", "web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message( \"Okay, skipping HTML documentation", "========== Data Documentation ========== To generate documentation from the data", "data_source_selection == \"1\": # Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection == \"2\":", "dbt profile (from your ~/.dbt/profiles.yml file Great Expectations \\ #", "(from your ~/.dbt/profiles.yml file Great Expectations \\ # should use", "learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) ) if click.confirm(\"Move the profiled", "# dbt # dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\") #", "file_okay=False, dir_okay=True, readable=True ), show_default=True ) if path.startswith(\"./\"): path =", "through configuring the database connection and next steps. \"\"\" msg_filesys_go_to_notebook", "\"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection == \"3\": # Spark", "\"\"\" msg_sqlalchemy_config_connection = \"\"\" Great Expectations relies on sqlalchemy to", "path.startswith(\"./\"): path = path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] default_data_source_name", "'{0:s}' to create candidate expectations and documentation? Please note: As", "print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\" ========== Data Documentation", "from the data you just profiled, the profiling results should", "looking for more types of data types to support. Please", "that this data does not contain sensitive information! To learn", "and open the notebook that will walk you through next", "about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) ) if click.confirm(\"Proceed?\",", "if data_source_selection == \"1\": # pandas path = click.prompt( msg_prompt_filesys_enter_base_path,", "have it installed. Next, we will configure database credentials and", "default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path) # if data_source_selection == \"5\":", "as __version__ def add_datasource(context): cli_message( \"\"\" ========== Datasources ========== See", "# Spark path = click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True, file_okay=False,", "this later by running `great_expectations profile`.\" ) if data_source_selection ==", ") if data_source_selection == \"1\": # Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection", "and open a tutorial notebook: To launch with jupyter notebooks:", "context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message( \"\"\"", "= click.prompt(\"What is the driver for the sqlalchemy connection?\", default=\"postgres\",", "your new data source a short name. \"\"\" msg_sqlalchemy_config_connection =", "do this later by running `great_expectations profile`.\" ) if data_source_selection", "show_default=True) port = click.prompt(\"What is the port for the sqlalchemy", "# Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\" Configure a datasource: 1.", "of data types to support. Please create a GitHub issue", "data_source_selection = click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\", \"4\"]), show_choices=False )", "file in a web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message( \"Okay,", "the great_expectations.yml file.\") return None if data_source_name != None: cli_message(", "Expectations relies on sqlalchemy to connect to relational databases. Please", "either absolute or relative to current directory.) \"\"\" msg_prompt_datasource_name =", "create expectations for your dbt models start Jupyter and open", "your CSV files start Jupyter and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb -", "a web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message( \"Okay, skipping HTML", ") else: cli_message( \"Okay, skipping profiling for now. You can", "and next steps. \"\"\" msg_filesys_go_to_notebook = \"\"\" To create expectations", "\\ # should use to connect to the database #", "\"4\": # None of the above cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration.", "\"\"\" ========== Profiling ========== Would you like to profile '{0:s}'", "\"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) ) if click.confirm(\"Proceed?\", default=True ): profiling_results =", "we recommend starting with data smaller than 100MB. To learn", "and documentation? Please note: As of v0.7.0, profiling is still", "database } context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name) elif data_source_selection", "click from .util import cli_message from great_expectations.render import DefaultJinjaPageView from", "is the host for the sqlalchemy connection?\", default=\"localhost\", show_default=True) port", "click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path) # if data_source_selection", "great_expectations/fixtures. Before committing, please make sure that this data does", "will evaluate the entire data source (without sampling) and may", "Jupyter and open a tutorial notebook: To launch with jupyter", "connection?\", default=\"\", show_default=False, hide_input=True) database = click.prompt(\"What is the database", "configuring the database connection and next steps. \"\"\" msg_filesys_go_to_notebook =", "credentials = { \"drivername\": drivername, \"host\": host, \"port\": port, \"username\":", "beta feature in Great Expectations. This generation of profilers will", "os.path.basename(basenamepath) + \"__dir\" data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True )", "path[:-1] else: basenamepath = path default_data_source_name = os.path.basename(basenamepath) + \"__dir\"", "password, \"database\": database } context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name)", "path.startswith(\"./\"): path = path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] else:", "password = click.prompt(\"What is the password for the sqlalchemy connection?\",", "\"\"\" To view the generated data documentation, open this file", "Would you like to profile '{0:s}' to create candidate expectations", "and open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it will walk", "__version__ def add_datasource(context): cli_message( \"\"\" ========== Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue>", "cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection == \"3\": # Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source =", "Great Expectations \\ # should use to connect to the", "port = click.prompt(\"What is the port for the sqlalchemy connection?\",", "click.confirm(\"Move the profiled data and build HTML documentation?\", default=True ):", "documentation, open this file in a web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\")", "file Great Expectations \\ # should use to connect to", "datasource later by editing the great_expectations.yml file.\") return None if", "more information about datasources. \"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection = click.prompt(", "dbt # dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\",", "\"\"\" To create expectations for your data start Jupyter and", "path default_data_source_name = os.path.basename(basenamepath) + \"__dir\" data_source_name = click.prompt( msg_prompt_datasource_name,", "create expectations for your CSV files start Jupyter and open", "path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] default_data_source_name = os.path.basename(basenamepath) data_source_name", "cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername = click.prompt(\"What is the driver for the", "\"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection = click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\",", "DefaultJinjaPageView from great_expectations.version import __version__ as __version__ def add_datasource(context): cli_message(", "editing the great_expectations.yml file.\") return None if data_source_name != None:", "data_source_selection == \"2\": # sqlalchemy data_source_name = click.prompt( msg_prompt_datasource_name, default=\"mydb\",", "\"\"\" Great Expectations relies on sqlalchemy to connect to relational", "data types to support. Please create a GitHub issue here:", "support. Please create a GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new In the", "import cli_message from great_expectations.render import DefaultJinjaPageView from great_expectations.version import __version__", "<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch with jupyter lab: <green>jupyter lab", "(ignored by git) to great_expectations/fixtures. Before committing, please make sure", "== \"4\": # None of the above cli_message(msg_unknown_data_source) print(\"Skipping datasource", "show_default=True) password = click.prompt(\"What is the password for the sqlalchemy", "lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook = \"\"\" To create", "~/.dbt/profiles.yml file Great Expectations \\ # should use to connect", "still a beta feature in Great Expectations. This generation of", "we will configure database credentials and store them in the", "of the dbt profile (from your ~/.dbt/profiles.yml file Great Expectations", "v0.7.0, profiling is still a beta feature in Great Expectations.", "click.prompt(\"What is the port for the sqlalchemy connection?\", default=\"5432\", show_default=True)", "if click.confirm(\"Move the profiled data and build HTML documentation?\", default=True", "contain sensitive information! To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) )", "elif data_source_selection == \"2\": # sqlalchemy data_source_name = click.prompt( msg_prompt_datasource_name,", "default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername = click.prompt(\"What is the driver", "for the sqlalchemy connection?\", default=\"localhost\", show_default=True) port = click.prompt(\"What is", "the password for the sqlalchemy connection?\", default=\"\", show_default=False, hide_input=True) database", "connection?\", default=\"5432\", show_default=True) username = click.prompt(\"What is the username for", "base_directory=os.path.join(\"..\", path)) elif data_source_selection == \"2\": # sqlalchemy data_source_name =", "host, \"port\": port, \"username\": username, \"password\": password, \"database\": database }", "\"3\": # Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\" Configure a datasource:", "configuration \"\"\" # msg_prompt_dbt_choose_profile = \"\"\" # Please specify the", "# it will walk you through next steps. # \"\"\"", "the \"{0:s}\" section of this config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source", "about datasources. \"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection = click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\",", "= click.prompt(\"What is the host for the sqlalchemy connection?\", default=\"localhost\",", "expectations for your dbt models start Jupyter and open notebook", "use to connect to the database # \"\"\" # msg_dbt_go_to_notebook", "connection?\", default=\"localhost\", show_default=True) port = click.prompt(\"What is the port for", "entire data source (without sampling) and may be very time", "CSV files start Jupyter and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it", "with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook = \"\"\"", "sqlalchemy connection?\", default=\"postgres\", show_default=True) password = click.prompt(\"What is the password", "information! To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) ) if click.confirm(\"Move", "do on CSV files. To create expectations for your CSV", "for your dbt models start Jupyter and open notebook #", "\"drivername\": drivername, \"host\": host, \"port\": port, \"username\": username, \"password\": password,", "documentation...\") context.render_full_static_site() cli_message( \"\"\" To view the generated data documentation,", "hide_input=True) database = click.prompt(\"What is the database name for the", "sure that you have it installed. Next, we will configure", "expectations for your data start Jupyter and open the notebook", "data documentation, open this file in a web browser: <green>great_expectations/uncommitted/documentation/index.html</green>", "launch with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook =", "here: https://github.com/great-expectations/great_expectations/issues/new In the meantime you can see what Great", "= click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True ),", "== \"1\": # Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection == \"2\": #", "build HTML documentation?\", default=True ): cli_message(\"\\nMoving files...\") for profiling_result in", "section of this config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source = \"\"\"", "profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message(", "= path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] default_data_source_name = os.path.basename(basenamepath)", "the profiled data and build HTML documentation?\", default=True ): cli_message(\"\\nMoving", "= \"\"\" # To create expectations for your dbt models", "to relational databases. Please make sure that you have it", "for your data, start Jupyter and open a tutorial notebook:", "relative to current directory.) \"\"\" msg_prompt_datasource_name = \"\"\" Give your", "Relational database (SQL) 3. Spark DataFrame 4. Skip datasource configuration", "Great Expectations. This generation of profilers will evaluate the entire", "datasource configuration \"\"\" # msg_prompt_dbt_choose_profile = \"\"\" # Please specify", "meantime you can see what Great Expectations can do on", "in the \"{0:s}\" section of this config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\"", "Enter the path of the root directory where the data", "<filename>great_expectations/cli/datasource.py<gh_stars>0 import os import click from .util import cli_message from", "msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\", \"4\"]), show_choices=False ) cli_message(data_source_selection) if data_source_selection", "SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection == \"3\": # Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source", "path.endswith(\"/\"): basenamepath = path[:-1] default_data_source_name = os.path.basename(basenamepath) data_source_name = click.prompt(", "with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch with jupyter", "\"1\": # pandas path = click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path(", "\"_\")) ) if click.confirm(\"Proceed?\", default=True ): profiling_results = context.profile_datasource( data_source_name,", "add a datasource later by editing the great_expectations.yml file.\") return", ") if click.confirm(\"Proceed?\", default=True ): profiling_results = context.profile_datasource( data_source_name, max_data_assets=20", "Please note: As of v0.7.0, profiling is still a beta", "show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path) # if data_source_selection == \"5\": #", "profile=dbt_profile) if data_source_selection == \"4\": # None of the above", "Please specify the name of the dbt profile (from your", "to connect to relational databases. Please make sure that you", "sqlalchemy data_source_name = click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername", "path of the root directory where the data files are", "data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\" ========== Data Documentation ========== To", "default=\"postgres\", show_default=True) credentials = { \"drivername\": drivername, \"host\": host, \"port\":", "by editing the great_expectations.yml file.\") return None if data_source_name !=", "\"3\": # Spark path = click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True,", "# SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection == \"3\": # Spark cli_message(msg_spark_go_to_notebook)", "= click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path))", "msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path) # if data_source_selection ==", "dbt models start Jupyter and open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb -", "installed. Next, we will configure database credentials and store them", "click.prompt(\"What is the host for the sqlalchemy connection?\", default=\"localhost\", show_default=True)", "cli_message( \"Okay, skipping HTML documentation for now.`.\" ) else: cli_message(", "please make sure that this data does not contain sensitive", "to support. Please create a GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new In", "be very time consuming. As a rule of thumb, we", "for now.`.\" ) else: cli_message( \"Okay, skipping profiling for now.", "<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook = \"\"\" To create expectations", "(without sampling) and may be very time consuming. As a", "data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures(", "you through configuring the database connection and next steps. \"\"\"", "\"__dir\" data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name, \"pandas\",", "open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will walk you through configuring", "), show_default=True ) if path.startswith(\"./\"): path = path[2:] if path.endswith(\"/\"):", "os import click from .util import cli_message from great_expectations.render import", "To create expectations for your data, start Jupyter and open", "= context.profile_datasource( data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results are saved here:\")", "will walk you through next steps. # \"\"\" msg_prompt_filesys_enter_base_path =", "great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook = \"\"\" To create expectations for your", "= path[:-1] else: basenamepath = path default_data_source_name = os.path.basename(basenamepath) +", "None if data_source_name != None: cli_message( \"\"\" ========== Profiling ==========", "models start Jupyter and open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - #", "show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername = click.prompt(\"What is the driver for", "\"\"\" msg_sqlalchemy_go_to_notebook = \"\"\" To create expectations for your data", "To view the generated data documentation, open this file in", "\"sqlalchemy\", profile=data_source_name) elif data_source_selection == \"3\": # Spark path =", "\"5\": # dbt # dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\")", "else: basenamepath = path default_data_source_name = os.path.basename(basenamepath) + \"__dir\" data_source_name", "dir_okay=True, readable=True ), show_default=True ) if path.startswith(\"./\"): path = path[2:]", "= click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername = click.prompt(\"What", "documentation from the data you just profiled, the profiling results", "sensitive information! To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) ) if", "default=\"postgres\", show_default=True) host = click.prompt(\"What is the host for the", ") if click.confirm(\"Move the profiled data and build HTML documentation?\",", "run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\")", "# Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection == \"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook)", "steps. To launch with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To", "click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if data_source_selection", "click.confirm(\"Proceed?\", default=True ): profiling_results = context.profile_datasource( data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling", "new data source a short name. \"\"\" msg_sqlalchemy_config_connection = \"\"\"", "Spark path = click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True, file_okay=False, dir_okay=True,", "- it will walk you through configuring the database connection", "= \"\"\" To create expectations for your data, start Jupyter", "import click from .util import cli_message from great_expectations.render import DefaultJinjaPageView", "data you just profiled, the profiling results should be moved", "for the sqlalchemy connection?\", default=\"5432\", show_default=True) username = click.prompt(\"What is", "cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message( \"\"\" To view the generated", "configure database credentials and store them in the \"{0:s}\" section", "may be very time consuming. As a rule of thumb,", "msg_unknown_data_source = \"\"\" We are looking for more types of", "notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>", "\"\"\" msg_prompt_datasource_name = \"\"\" Give your new data source a", "= click.prompt(\"What is the username for the sqlalchemy connection?\", default=\"postgres\",", "and may be very time consuming. As a rule of", "data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results are saved here:\") for profiling_result", "click.prompt(\"What is the password for the sqlalchemy connection?\", default=\"\", show_default=False,", "next steps. # \"\"\" msg_prompt_filesys_enter_base_path = \"\"\" Enter the path", "can always do this later by running `great_expectations profile`.\" )", "this config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source = \"\"\" We are", "to current directory.) \"\"\" msg_prompt_datasource_name = \"\"\" Give your new", "walk you through next steps. # \"\"\" msg_prompt_filesys_enter_base_path = \"\"\"", "Before committing, please make sure that this data does not", "data source (without sampling) and may be very time consuming.", "make sure that you have it installed. Next, we will", "\"\"\".format(__version__.replace(\".\", \"_\")) ) if click.confirm(\"Move the profiled data and build", "you through next steps. To launch with jupyter notebooks: <green>jupyter", "a datasource later by editing the great_expectations.yml file.\") return None", "and build HTML documentation?\", default=True ): cli_message(\"\\nMoving files...\") for profiling_result", "<blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) ) if click.confirm(\"Proceed?\", default=True ): profiling_results", ") context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path)) elif data_source_selection == \"2\": #", "Give your new data source a short name. \"\"\" msg_sqlalchemy_config_connection", "if path.endswith(\"/\"): basenamepath = path[:-1] else: basenamepath = path default_data_source_name", "= path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] else: basenamepath =", "a rule of thumb, we recommend starting with data smaller", "for your CSV files start Jupyter and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb", "just profiled, the profiling results should be moved from great_expectations/uncommitted", "of the root directory where the data files are stored.", "them in the \"{0:s}\" section of this config file: great_expectations/uncommitted/credentials/profiles.yml:", "sqlalchemy connection?\", default=\"postgres\", show_default=True) credentials = { \"drivername\": drivername, \"host\":", "Jupyter and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will walk you", "datasource configuration. You can add a datasource later by editing", "that will walk you through next steps. To launch with", "default=True ): profiling_results = context.profile_datasource( data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results", "the sqlalchemy connection?\", default=\"postgres\", show_default=True) password = click.prompt(\"What is the", "cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration. You can add a datasource later", "great_expectations/uncommitted (ignored by git) to great_expectations/fixtures. Before committing, please make", "Please create a GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new In the meantime", "a beta feature in Great Expectations. This generation of profilers", "database (SQL) 3. Spark DataFrame 4. Skip datasource configuration \"\"\"", "the username for the sqlalchemy connection?\", default=\"postgres\", show_default=True) password =", "cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\" Configure a datasource: 1. Pandas DataFrame", "database name for the sqlalchemy connection?\", default=\"postgres\", show_default=True) credentials =", "is the driver for the sqlalchemy connection?\", default=\"postgres\", show_default=True) host", "profilers will evaluate the entire data source (without sampling) and", ") data_source_selection = click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\", \"4\"]), show_choices=False", "rule of thumb, we recommend starting with data smaller than", "through next steps. To launch with jupyter notebooks: <green>jupyter notebook", "feature in Great Expectations. This generation of profilers will evaluate", "context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if data_source_selection == \"4\": # None of", "= profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\" ==========", "be moved from great_expectations/uncommitted (ignored by git) to great_expectations/fixtures. Before", "on sqlalchemy to connect to relational databases. Please make sure", "you have it installed. Next, we will configure database credentials", "with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook = \"\"\"", "click.prompt(\"What is the driver for the sqlalchemy connection?\", default=\"postgres\", show_default=True)", "evaluate the entire data source (without sampling) and may be", "saved here:\") for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name", "the database name for the sqlalchemy connection?\", default=\"postgres\", show_default=True) credentials", "default=\"5432\", show_default=True) username = click.prompt(\"What is the username for the", "Expectations. This generation of profilers will evaluate the entire data", "you through next steps. # \"\"\" msg_prompt_filesys_enter_base_path = \"\"\" Enter", "databases. Please make sure that you have it installed. Next,", "later by editing the great_expectations.yml file.\") return None if data_source_name", "\"\"\" Configure a datasource: 1. Pandas DataFrame 2. Relational database", "where the data files are stored. (The path may be", "# sqlalchemy data_source_name = click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name))", "data does not contain sensitive information! To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue>", "of profilers will evaluate the entire data source (without sampling)", "== \"1\": # pandas path = click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/',", "== \"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection == \"3\": #", "of thumb, we recommend starting with data smaller than 100MB.", "sampling) and may be very time consuming. As a rule", "later by running `great_expectations profile`.\" ) if data_source_selection == \"1\":", "= \"\"\" # Please specify the name of the dbt", "data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path) #", "= profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name,", "the sqlalchemy connection?\", default=\"postgres\", show_default=True) host = click.prompt(\"What is the", "HTML documentation?\", default=True ): cli_message(\"\\nMoving files...\") for profiling_result in profiling_results:", "= os.path.basename(basenamepath) + \"__dir\" data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True", "files. To create expectations for your CSV files start Jupyter", "cli_message( \"\"\" ========== Profiling ========== Would you like to profile", "To create expectations for your dbt models start Jupyter and", "= click.prompt(\"What is the port for the sqlalchemy connection?\", default=\"5432\",", "========== Profiling ========== Would you like to profile '{0:s}' to", "steps. # \"\"\" msg_prompt_filesys_enter_base_path = \"\"\" Enter the path of", "\"password\": password, \"database\": database } context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name, \"sqlalchemy\",", "elif data_source_selection == \"3\": # Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\"", "the meantime you can see what Great Expectations can do", "elif data_source_selection == \"3\": # Spark path = click.prompt( msg_prompt_filesys_enter_base_path,", "========== Would you like to profile '{0:s}' to create candidate", "show_default=False, hide_input=True) database = click.prompt(\"What is the database name for", "\"database\": database } context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name) elif", "<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook = \"\"\" To create expectations", "name for the sqlalchemy connection?\", default=\"postgres\", show_default=True) credentials = {", "credentials and store them in the \"{0:s}\" section of this", "color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if data_source_selection == \"4\": #", "elif data_source_selection == \"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection ==", "msg_spark_go_to_notebook = \"\"\" To create expectations for your data start", "this file in a web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message(", "for more information about datasources. \"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection =", "cli_message( \"\"\" ========== Data Documentation ========== To generate documentation from", "great_expectations/notebooks/create_expectations.ipynb</green> To launch with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\"", "msg_prompt_dbt_choose_profile = \"\"\" # Please specify the name of the", "be either absolute or relative to current directory.) \"\"\" msg_prompt_datasource_name", "connect to the database # \"\"\" # msg_dbt_go_to_notebook = \"\"\"", "\"pandas\", base_directory=os.path.join(\"..\", path)) elif data_source_selection == \"2\": # sqlalchemy data_source_name", "= click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if", "__version__.replace(\".\", \"_\")) ) if click.confirm(\"Proceed?\", default=True ): profiling_results = context.profile_datasource(", "the data files are stored. (The path may be either", "expectations for your CSV files start Jupyter and open notebook", "show_default=True) username = click.prompt(\"What is the username for the sqlalchemy", "\"\"\" msg_filesys_go_to_notebook = \"\"\" To create expectations for your data,", "cli_message( \"\"\" ========== Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information", "create expectations for your data start Jupyter and open the", "like to profile '{0:s}' to create candidate expectations and documentation?", "msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path( exists=False, file_okay=False, dir_okay=True, readable=True ), show_default=True", "default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path)) elif data_source_selection ==", "type=click.Path( exists=False, file_okay=False, dir_okay=True, readable=True ), show_default=True ) if path.startswith(\"./\"):", "msg_sqlalchemy_config_connection = \"\"\" Great Expectations relies on sqlalchemy to connect", "should be moved from great_expectations/uncommitted (ignored by git) to great_expectations/fixtures.", "# msg_prompt_dbt_choose_profile = \"\"\" # Please specify the name of", "for more types of data types to support. Please create", "driver for the sqlalchemy connection?\", default=\"postgres\", show_default=True) host = click.prompt(\"What", "username for the sqlalchemy connection?\", default=\"postgres\", show_default=True) password = click.prompt(\"What", "data source a short name. \"\"\" msg_sqlalchemy_config_connection = \"\"\" Great", "(SQL) 3. Spark DataFrame 4. Skip datasource configuration \"\"\" #", "sqlalchemy connection?\", default=\"localhost\", show_default=True) port = click.prompt(\"What is the port", "data_source_selection == \"3\": # Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\" Configure", "drivername = click.prompt(\"What is the driver for the sqlalchemy connection?\",", "Spark DataFrame 4. Skip datasource configuration \"\"\" # msg_prompt_dbt_choose_profile =", "profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name,", "to great_expectations/fixtures. Before committing, please make sure that this data", "{ \"drivername\": drivername, \"host\": host, \"port\": port, \"username\": username, \"password\":", "<blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) ) if click.confirm(\"Move the profiled data and", "add_datasource(context): cli_message( \"\"\" ========== Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more", "drivername, \"host\": host, \"port\": port, \"username\": username, \"password\": password, \"database\":", "connection?\", default=\"postgres\", show_default=True) host = click.prompt(\"What is the host for", "the sqlalchemy connection?\", default=\"localhost\", show_default=True) port = click.prompt(\"What is the", "3. Spark DataFrame 4. Skip datasource configuration \"\"\" # msg_prompt_dbt_choose_profile", "Great Expectations can do on CSV files. To create expectations", "Documentation ========== To generate documentation from the data you just", "running `great_expectations profile`.\" ) if data_source_selection == \"1\": # Pandas", "can see what Great Expectations can do on CSV files.", "can do on CSV files. To create expectations for your", "path = click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True", "the notebook that will walk you through next steps. To", "your data start Jupyter and open the notebook that will", "config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source = \"\"\" We are looking", "the driver for the sqlalchemy connection?\", default=\"postgres\", show_default=True) host =", "if data_source_selection == \"5\": # dbt # dbt_profile = click.prompt(msg_prompt_dbt_choose_profile)", "data_source_selection == \"4\": # None of the above cli_message(msg_unknown_data_source) print(\"Skipping", "in Great Expectations. This generation of profilers will evaluate the", "is the password for the sqlalchemy connection?\", default=\"\", show_default=False, hide_input=True)", "show_default=True) credentials = { \"drivername\": drivername, \"host\": host, \"port\": port,", "\"\"\" # msg_dbt_go_to_notebook = \"\"\" # To create expectations for", "connection and next steps. \"\"\" msg_filesys_go_to_notebook = \"\"\" To create", "the port for the sqlalchemy connection?\", default=\"5432\", show_default=True) username =", "here:\") for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name =", "git) to great_expectations/fixtures. Before committing, please make sure that this", "default=\"\", show_default=False, hide_input=True) database = click.prompt(\"What is the database name", "time consuming. As a rule of thumb, we recommend starting", "from great_expectations.render import DefaultJinjaPageView from great_expectations.version import __version__ as __version__", "with data smaller than 100MB. To learn more about profiling,", "========== Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources.", "Expectations can do on CSV files. To create expectations for", "username = click.prompt(\"What is the username for the sqlalchemy connection?\",", "show_default=True ) context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path)) elif data_source_selection == \"2\":", "To create expectations for your CSV files start Jupyter and", "open this file in a web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else:", "the name of the dbt profile (from your ~/.dbt/profiles.yml file", "path)) elif data_source_selection == \"2\": # sqlalchemy data_source_name = click.prompt(", "os.path.basename(basenamepath) data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path)", "\"{0:s}\" section of this config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source =", "data and build HTML documentation?\", default=True ): cli_message(\"\\nMoving files...\") for", "great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it will walk you through next steps.", "cli_message(\"\\nMoving files...\") for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name", "profiling for now. You can always do this later by", "specify the name of the dbt profile (from your ~/.dbt/profiles.yml", "directory where the data files are stored. (The path may", "more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) ) if", "CSV files. To create expectations for your CSV files start", "datasource: 1. Pandas DataFrame 2. Relational database (SQL) 3. Spark", "cli_message from great_expectations.render import DefaultJinjaPageView from great_expectations.version import __version__ as", "make sure that this data does not contain sensitive information!", "100MB. To learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\",", "= path default_data_source_name = os.path.basename(basenamepath) + \"__dir\" data_source_name = click.prompt(", "data_source_name = click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername =", "database credentials and store them in the \"{0:s}\" section of", "for the sqlalchemy connection?\", default=\"postgres\", show_default=True) password = click.prompt(\"What is", "# None of the above cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration. You", "default_data_source_name = os.path.basename(basenamepath) data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name,", "Jupyter and open the notebook that will walk you through", "profile (from your ~/.dbt/profiles.yml file Great Expectations \\ # should", "\"3\", \"4\"]), show_choices=False ) cli_message(data_source_selection) if data_source_selection == \"1\": #", "database = click.prompt(\"What is the database name for the sqlalchemy", "port, \"username\": username, \"password\": password, \"database\": database } context.add_profile_credentials(data_source_name, **credentials)", "cli_message(data_source_selection) if data_source_selection == \"1\": # pandas path = click.prompt(", "great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will walk you through configuring the database", "path[:-1] default_data_source_name = os.path.basename(basenamepath) data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True)", "expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id)", "show_choices=False ) cli_message(data_source_selection) if data_source_selection == \"1\": # pandas path", "relies on sqlalchemy to connect to relational databases. Please make", "sqlalchemy connection?\", default=\"5432\", show_default=True) username = click.prompt(\"What is the username", "Configure a datasource: 1. Pandas DataFrame 2. Relational database (SQL)", "on CSV files. To create expectations for your CSV files", "notebook that will walk you through next steps. To launch", "def add_datasource(context): cli_message( \"\"\" ========== Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for", "store them in the \"{0:s}\" section of this config file:", "connection?\", default=\"postgres\", show_default=True) password = click.prompt(\"What is the password for", "in a web browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message( \"Okay, skipping", "# \"\"\" msg_prompt_filesys_enter_base_path = \"\"\" Enter the path of the", "a GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new In the meantime you can", "more types of data types to support. Please create a", "configuration. You can add a datasource later by editing the", "\"dbt\", profile=dbt_profile) if data_source_selection == \"4\": # None of the", "run_id = profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\"", "- # it will walk you through next steps. #", "= \"\"\" To create expectations for your data start Jupyter", "the above cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration. You can add a", "if data_source_selection == \"1\": # Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection ==", "msg_sqlalchemy_go_to_notebook = \"\"\" To create expectations for your data start", "context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path)) elif data_source_selection == \"2\": # sqlalchemy", "if path.startswith(\"./\"): path = path[2:] if path.endswith(\"/\"): basenamepath = path[:-1]", "sqlalchemy to connect to relational databases. Please make sure that", "base_directory=path) # if data_source_selection == \"5\": # dbt # dbt_profile", "type=click.Choice([\"1\", \"2\", \"3\", \"4\"]), show_choices=False ) cli_message(data_source_selection) if data_source_selection ==", "is the database name for the sqlalchemy connection?\", default=\"postgres\", show_default=True)", "if path.endswith(\"/\"): basenamepath = path[:-1] default_data_source_name = os.path.basename(basenamepath) data_source_name =", "import os import click from .util import cli_message from great_expectations.render", "open a tutorial notebook: To launch with jupyter notebooks: <green>jupyter", "not contain sensitive information! To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\"))", "<green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message( \"Okay, skipping HTML documentation for now.`.\"", "lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook = \"\"\" To create", "profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) ) if click.confirm(\"Proceed?\", default=True", "Jupyter and open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it will", "data_source_name != None: cli_message( \"\"\" ========== Profiling ========== Would you", "your dbt models start Jupyter and open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb", "will configure database credentials and store them in the \"{0:s}\"", "= profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site()", "default_data_source_name = os.path.basename(basenamepath) + \"__dir\" data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name,", "\"2\": # sqlalchemy data_source_name = click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format(", "for the sqlalchemy connection?\", default=\"postgres\", show_default=True) credentials = { \"drivername\":", "**credentials) context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name) elif data_source_selection == \"3\": #", "database # \"\"\" # msg_dbt_go_to_notebook = \"\"\" # To create", "# \"\"\" # msg_dbt_go_to_notebook = \"\"\" # To create expectations", "data files are stored. (The path may be either absolute", "source a short name. \"\"\" msg_sqlalchemy_config_connection = \"\"\" Great Expectations", "2. Relational database (SQL) 3. Spark DataFrame 4. Skip datasource", "now.`.\" ) else: cli_message( \"Okay, skipping profiling for now. You", "Please make sure that you have it installed. Next, we", "files start Jupyter and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will", "\"_\")) ) data_source_selection = click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\", \"4\"]),", "<blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources. \"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection", "if click.confirm(\"Proceed?\", default=True ): profiling_results = context.profile_datasource( data_source_name, max_data_assets=20 )", "does not contain sensitive information! To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\",", "for now. You can always do this later by running", "): profiling_results = context.profile_datasource( data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results are", "# dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\",", "data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message( \"\"\" To", "consuming. As a rule of thumb, we recommend starting with", "# default='/data/', type=click.Path( exists=False, file_okay=False, dir_okay=True, readable=True ), show_default=True )", "tutorial notebook: To launch with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>", "else: cli_message( \"Okay, skipping HTML documentation for now.`.\" ) else:", "# To create expectations for your dbt models start Jupyter", "click.prompt(\"What is the database name for the sqlalchemy connection?\", default=\"postgres\",", "notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it will walk you through", "To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) ) if click.confirm(\"Move the", "note: As of v0.7.0, profiling is still a beta feature", "expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name,", "path = path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] else: basenamepath", "view the generated data documentation, open this file in a", "click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True ), show_default=True", "you like to profile '{0:s}' to create candidate expectations and", "walk you through configuring the database connection and next steps.", "your ~/.dbt/profiles.yml file Great Expectations \\ # should use to", "DataFrame 2. Relational database (SQL) 3. Spark DataFrame 4. Skip", "= \"\"\" Great Expectations relies on sqlalchemy to connect to", "smaller than 100MB. To learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>.", "show_default=True ) if path.startswith(\"./\"): path = path[2:] if path.endswith(\"/\"): basenamepath", "data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\",", "files...\") for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name =", "by running `great_expectations profile`.\" ) if data_source_selection == \"1\": #", "msg_filesys_go_to_notebook = \"\"\" To create expectations for your data, start", "4. Skip datasource configuration \"\"\" # msg_prompt_dbt_choose_profile = \"\"\" #", "by git) to great_expectations/fixtures. Before committing, please make sure that", "\"username\": username, \"password\": password, \"database\": database } context.add_profile_credentials(data_source_name, **credentials) context.add_datasource(", "= click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\", \"4\"]), show_choices=False ) cli_message(data_source_selection)", "cli_message(msg_filesys_go_to_notebook) elif data_source_selection == \"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection", "a datasource: 1. Pandas DataFrame 2. Relational database (SQL) 3.", "profiling is still a beta feature in Great Expectations. This", "generated data documentation, open this file in a web browser:", "show_default=True) host = click.prompt(\"What is the host for the sqlalchemy", "\"\"\" msg_prompt_filesys_enter_base_path = \"\"\" Enter the path of the root", "from .util import cli_message from great_expectations.render import DefaultJinjaPageView from great_expectations.version", "msg_dbt_go_to_notebook = \"\"\" # To create expectations for your dbt", "context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name) elif data_source_selection == \"3\":", "data_source_selection == \"1\": # pandas path = click.prompt( msg_prompt_filesys_enter_base_path, #", "profile '{0:s}' to create candidate expectations and documentation? Please note:", "profiled, the profiling results should be moved from great_expectations/uncommitted (ignored", "database connection and next steps. \"\"\" msg_filesys_go_to_notebook = \"\"\" To", "start Jupyter and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will walk", "walk you through next steps. To launch with jupyter notebooks:", "\"\"\" Give your new data source a short name. \"\"\"", "(The path may be either absolute or relative to current", "profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name,", "data_source_selection == \"5\": # dbt # dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) #", "directory.) \"\"\" msg_prompt_datasource_name = \"\"\" Give your new data source", "\"port\": port, \"username\": username, \"password\": password, \"database\": database } context.add_profile_credentials(data_source_name,", "to profile '{0:s}' to create candidate expectations and documentation? Please", "are saved here:\") for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name']", "stored. (The path may be either absolute or relative to", "DataFrame 4. Skip datasource configuration \"\"\" # msg_prompt_dbt_choose_profile = \"\"\"", "= click.prompt(\"What is the password for the sqlalchemy connection?\", default=\"\",", "data_source_selection == \"3\": # Spark path = click.prompt( msg_prompt_filesys_enter_base_path, default='/data/',", "= os.path.basename(basenamepath) data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\",", "of v0.7.0, profiling is still a beta feature in Great", "jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch with jupyter lab:", "next steps. To launch with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>", "notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will walk you through configuring the", "default=True ): cli_message(\"\\nMoving files...\") for profiling_result in profiling_results: data_asset_name =", "basenamepath = path[:-1] default_data_source_name = os.path.basename(basenamepath) data_source_name = click.prompt( msg_prompt_datasource_name,", "To create expectations for your data start Jupyter and open", "import DefaultJinjaPageView from great_expectations.version import __version__ as __version__ def add_datasource(context):", "Expectations \\ # should use to connect to the database", "profiled data and build HTML documentation?\", default=True ): cli_message(\"\\nMoving files...\")", "connection?\", default=\"postgres\", show_default=True) credentials = { \"drivername\": drivername, \"host\": host,", "if data_source_selection == \"4\": # None of the above cli_message(msg_unknown_data_source)", "To learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\"))", "it will walk you through configuring the database connection and", "the generated data documentation, open this file in a web", "As a rule of thumb, we recommend starting with data", "great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook = \"\"\" To create expectations for your", "default='/data/', type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True ), show_default=True ) if", "above cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration. You can add a datasource", "documentation? Please note: As of v0.7.0, profiling is still a", "root directory where the data files are stored. (The path", "basenamepath = path[:-1] else: basenamepath = path default_data_source_name = os.path.basename(basenamepath)", "is the username for the sqlalchemy connection?\", default=\"postgres\", show_default=True) password", "Data Documentation ========== To generate documentation from the data you", "name of the dbt profile (from your ~/.dbt/profiles.yml file Great", "the sqlalchemy connection?\", default=\"postgres\", show_default=True) credentials = { \"drivername\": drivername,", "context.profile_datasource( data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results are saved here:\") for", "msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True ), show_default=True )", "default='/data/', type=click.Path( exists=False, file_okay=False, dir_okay=True, readable=True ), show_default=True ) if", "\"\"\") else: cli_message( \"Okay, skipping HTML documentation for now.`.\" )", "path.endswith(\"/\"): basenamepath = path[:-1] else: basenamepath = path default_data_source_name =", "data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] print(\"", "sure that this data does not contain sensitive information! To", "password for the sqlalchemy connection?\", default=\"\", show_default=False, hide_input=True) database =", ") cli_message(data_source_selection) if data_source_selection == \"1\": # pandas path =", "always do this later by running `great_expectations profile`.\" ) if", "print(\"Skipping datasource configuration. You can add a datasource later by", "= \"\"\" Enter the path of the root directory where", "print(\"\\nDone.\\n\\nProfiling results are saved here:\") for profiling_result in profiling_results: data_asset_name", "Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\" Configure a datasource: 1. Pandas", "\"\"\" msg_unknown_data_source = \"\"\" We are looking for more types", "profile=data_source_name) elif data_source_selection == \"3\": # Spark path = click.prompt(", "default=\"postgres\", show_default=True) password = click.prompt(\"What is the password for the", "== \"3\": # Spark path = click.prompt( msg_prompt_filesys_enter_base_path, default='/data/', type=click.Path(", "relational databases. Please make sure that you have it installed.", "that you have it installed. Next, we will configure database", "types of data types to support. Please create a GitHub", "In the meantime you can see what Great Expectations can", "Skip datasource configuration \"\"\" # msg_prompt_dbt_choose_profile = \"\"\" # Please", "open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it will walk you", "or relative to current directory.) \"\"\" msg_prompt_datasource_name = \"\"\" Give", "!= None: cli_message( \"\"\" ========== Profiling ========== Would you like", "= \"\"\" We are looking for more types of data", "# Please specify the name of the dbt profile (from", "To launch with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook", "path = path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] default_data_source_name =", "context.render_full_static_site() cli_message( \"\"\" To view the generated data documentation, open", "= click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True) context.add_datasource(data_source_name, \"spark\", base_directory=path) # if", "documentation for now.`.\" ) else: cli_message( \"Okay, skipping profiling for", "very time consuming. As a rule of thumb, we recommend", "HTML documentation for now.`.\" ) else: cli_message( \"Okay, skipping profiling", "You can always do this later by running `great_expectations profile`.\"", "for your data start Jupyter and open the notebook that", "profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id", "starting with data smaller than 100MB. To learn more about", "skipping HTML documentation for now.`.\" ) else: cli_message( \"Okay, skipping", "msg_prompt_filesys_enter_base_path = \"\"\" Enter the path of the root directory", "to the database # \"\"\" # msg_dbt_go_to_notebook = \"\"\" #", "moved from great_expectations/uncommitted (ignored by git) to great_expectations/fixtures. Before committing,", "than 100MB. To learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name,", "candidate expectations and documentation? Please note: As of v0.7.0, profiling", "msg_prompt_datasource_name = \"\"\" Give your new data source a short", "profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\" ========== Data", "expectations and documentation? Please note: As of v0.7.0, profiling is", "from great_expectations.version import __version__ as __version__ def add_datasource(context): cli_message( \"\"\"", "profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding", "the database connection and next steps. \"\"\" msg_filesys_go_to_notebook = \"\"\"", "jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook = \"\"\" To", "name. \"\"\" msg_sqlalchemy_config_connection = \"\"\" Great Expectations relies on sqlalchemy", "To launch with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook", ") print(\"\\nDone.\\n\\nProfiling results are saved here:\") for profiling_result in profiling_results:", "and open notebook great_expectations/notebooks/using_great_expectations_with_pandas.ipynb - it will walk you through", "see what Great Expectations can do on CSV files. To", "lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook = \"\"\" To create expectations for", "it will walk you through next steps. # \"\"\" msg_prompt_filesys_enter_base_path", "from great_expectations/uncommitted (ignored by git) to great_expectations/fixtures. Before committing, please", "# context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if data_source_selection == \"4\": # None", "max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results are saved here:\") for profiling_result in", "file.\") return None if data_source_name != None: cli_message( \"\"\" ==========", "the data you just profiled, the profiling results should be", "the sqlalchemy connection?\", default=\"5432\", show_default=True) username = click.prompt(\"What is the", "the host for the sqlalchemy connection?\", default=\"localhost\", show_default=True) port =", "\"Okay, skipping profiling for now. You can always do this", "are stored. (The path may be either absolute or relative", "# if data_source_selection == \"5\": # dbt # dbt_profile =", "open the notebook that will walk you through next steps.", "\"spark\", base_directory=path) # if data_source_selection == \"5\": # dbt #", "else: cli_message( \"Okay, skipping profiling for now. You can always", "# should use to connect to the database # \"\"\"", "context.add_datasource(data_source_name, \"spark\", base_directory=path) # if data_source_selection == \"5\": # dbt", "the root directory where the data files are stored. (The", "): cli_message(\"\\nMoving files...\") for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name']", "pandas path = click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path( exists=False, file_okay=False,", "to create candidate expectations and documentation? Please note: As of", "\"\"\" ========== Data Documentation ========== To generate documentation from the", "create candidate expectations and documentation? Please note: As of v0.7.0,", "= \"\"\" Configure a datasource: 1. Pandas DataFrame 2. Relational", "== \"2\": # sqlalchemy data_source_name = click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True)", "\"Okay, skipping HTML documentation for now.`.\" ) else: cli_message( \"Okay,", "GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new In the meantime you can see", "path[2:] if path.endswith(\"/\"): basenamepath = path[:-1] else: basenamepath = path", "context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name) elif data_source_selection == \"3\": # Spark", "# log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if data_source_selection ==", "default=\"localhost\", show_default=True) port = click.prompt(\"What is the port for the", "click.prompt(\"What is the username for the sqlalchemy connection?\", default=\"postgres\", show_default=True)", "great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source = \"\"\" We are looking for more", "== \"3\": # Spark cli_message(msg_spark_go_to_notebook) msg_prompt_choose_data_source = \"\"\" Configure a", "notebook: To launch with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To", "this data does not contain sensitive information! To learn more:", "file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source = \"\"\" We are looking for", "cli_message( \"\"\" To view the generated data documentation, open this", "a tutorial notebook: To launch with jupyter notebooks: <green>jupyter notebook", "expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\" ========== Data Documentation ========== To generate", "data_source_name, \"sqlalchemy\", profile=data_source_name) elif data_source_selection == \"3\": # Spark path", "launch with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch with", "click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\", \"3\", \"4\"]), show_choices=False ) cli_message(data_source_selection) if", "visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) ) if click.confirm(\"Proceed?\", default=True ):", "the sqlalchemy connection?\", default=\"\", show_default=False, hide_input=True) database = click.prompt(\"What is", "log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile) if data_source_selection == \"4\":", "path may be either absolute or relative to current directory.)", "will walk you through configuring the database connection and next", "steps. \"\"\" msg_filesys_go_to_notebook = \"\"\" To create expectations for your", "the profiling results should be moved from great_expectations/uncommitted (ignored by", "Great Expectations relies on sqlalchemy to connect to relational databases.", "= profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\")", "may be either absolute or relative to current directory.) \"\"\"", "https://github.com/great-expectations/great_expectations/issues/new In the meantime you can see what Great Expectations", "short name. \"\"\" msg_sqlalchemy_config_connection = \"\"\" Great Expectations relies on", "jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_sqlalchemy_go_to_notebook = \"\"\" To", "cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message( \"\"\" To view the generated data", "for the sqlalchemy connection?\", default=\"postgres\", show_default=True) host = click.prompt(\"What is", "= path[:-1] default_data_source_name = os.path.basename(basenamepath) data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name,", "generation of profilers will evaluate the entire data source (without", "expectations for your data, start Jupyter and open a tutorial", "should use to connect to the database # \"\"\" #", "to connect to the database # \"\"\" # msg_dbt_go_to_notebook =", "= profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location(", "the entire data source (without sampling) and may be very", "great_expectations.yml file.\") return None if data_source_name != None: cli_message( \"\"\"", "dbt_profile = click.prompt(msg_prompt_dbt_choose_profile) # log_message(msg_dbt_go_to_notebook, color=\"blue\") # context.add_datasource(\"dbt\", \"dbt\", profile=dbt_profile)", "data, start Jupyter and open a tutorial notebook: To launch", "and store them in the \"{0:s}\" section of this config", "None: cli_message( \"\"\" ========== Profiling ========== Would you like to", "We are looking for more types of data types to", "exists=False, file_okay=False, dir_okay=True, readable=True ), show_default=True ) if path.startswith(\"./\"): path", ") if path.startswith(\"./\"): path = path[2:] if path.endswith(\"/\"): basenamepath =", "This generation of profilers will evaluate the entire data source", "is still a beta feature in Great Expectations. This generation", "profiling_results = context.profile_datasource( data_source_name, max_data_assets=20 ) print(\"\\nDone.\\n\\nProfiling results are saved", "recommend starting with data smaller than 100MB. To learn more", "more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> \"\"\".format(__version__.replace(\".\", \"_\")) ) if click.confirm(\"Move the profiled data", "profiling_result[1]['meta']['run_id'] context.move_validation_to_fixtures( data_asset_name, expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message(", "========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources. \"\"\".format(__version__.replace(\".\", \"_\"))", "sqlalchemy connection?\", default=\"postgres\", show_default=True) host = click.prompt(\"What is the host", "Profiling ========== Would you like to profile '{0:s}' to create", "data smaller than 100MB. To learn more about profiling, visit", "path = click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path( exists=False, file_okay=False, dir_okay=True,", "learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>. \"\"\".format(data_source_name, __version__.replace(\".\", \"_\")) )", "msg_prompt_choose_data_source = \"\"\" Configure a datasource: 1. Pandas DataFrame 2.", "profiling results should be moved from great_expectations/uncommitted (ignored by git)", "+ \"__dir\" data_source_name = click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name,", "`great_expectations profile`.\" ) if data_source_selection == \"1\": # Pandas cli_message(msg_filesys_go_to_notebook)", "= click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path( exists=False, file_okay=False, dir_okay=True, readable=True", "Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection == \"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook) elif", "data_source_selection == \"2\": # SQL cli_message(msg_sqlalchemy_go_to_notebook) elif data_source_selection == \"3\":", "start Jupyter and open a tutorial notebook: To launch with", "\"2\", \"3\", \"4\"]), show_choices=False ) cli_message(data_source_selection) if data_source_selection == \"1\":", "you can see what Great Expectations can do on CSV", "next steps. \"\"\" msg_filesys_go_to_notebook = \"\"\" To create expectations for", ".util import cli_message from great_expectations.render import DefaultJinjaPageView from great_expectations.version import", "host = click.prompt(\"What is the host for the sqlalchemy connection?\",", "msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername = click.prompt(\"What is the", "notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch with jupyter lab: <green>jupyter", "for the sqlalchemy connection?\", default=\"\", show_default=False, hide_input=True) database = click.prompt(\"What", "{0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath'])) cli_message( \"\"\" ========== Data Documentation ==========", "\"host\": host, \"port\": port, \"username\": username, \"password\": password, \"database\": database", "You can add a datasource later by editing the great_expectations.yml", "To launch with jupyter notebooks: <green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green> To launch", "========== To generate documentation from the data you just profiled,", "exists=True, file_okay=False, dir_okay=True, readable=True ), show_default=True ) if path.startswith(\"./\"): path", "lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook = \"\"\" To create expectations for", "your data, start Jupyter and open a tutorial notebook: To", "username, \"password\": password, \"database\": database } context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name,", "launch with jupyter lab: <green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green> \"\"\" msg_spark_go_to_notebook =", "\"\"\" Enter the path of the root directory where the", "= \"\"\" Give your new data source a short name.", "will walk you through next steps. To launch with jupyter", "\"1\": # Pandas cli_message(msg_filesys_go_to_notebook) elif data_source_selection == \"2\": # SQL", "start Jupyter and open notebook # great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it", "\"_\")) ) if click.confirm(\"Move the profiled data and build HTML", "\"\"\" # msg_prompt_dbt_choose_profile = \"\"\" # Please specify the name", "current directory.) \"\"\" msg_prompt_datasource_name = \"\"\" Give your new data", "cli_message( \"Okay, skipping profiling for now. You can always do", "what Great Expectations can do on CSV files. To create", "Next, we will configure database credentials and store them in", "expectation_suite_name, run_id) cli_message(\"\\nDone.\") cli_message(\"\\nBuilding documentation...\") context.render_full_static_site() cli_message( \"\"\" To view", "it installed. Next, we will configure database credentials and store", "thumb, we recommend starting with data smaller than 100MB. To", "= { \"drivername\": drivername, \"host\": host, \"port\": port, \"username\": username,", "\"\"\" To create expectations for your data, start Jupyter and", "1. Pandas DataFrame 2. Relational database (SQL) 3. Spark DataFrame", "skipping profiling for now. You can always do this later", "files are stored. (The path may be either absolute or", "generate documentation from the data you just profiled, the profiling", "browser: <green>great_expectations/uncommitted/documentation/index.html</green> \"\"\") else: cli_message( \"Okay, skipping HTML documentation for", "__version__ as __version__ def add_datasource(context): cli_message( \"\"\" ========== Datasources ==========", "the path of the root directory where the data files", "start Jupyter and open the notebook that will walk you", "profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id']", "type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True ), show_default=True ) if path.startswith(\"./\"):", "\"\"\" # Please specify the name of the dbt profile", "\"\"\" # To create expectations for your dbt models start", "} context.add_profile_credentials(data_source_name, **credentials) context.add_datasource( data_source_name, \"sqlalchemy\", profile=data_source_name) elif data_source_selection ==", "the dbt profile (from your ~/.dbt/profiles.yml file Great Expectations \\", "create a GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new In the meantime you", "Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources. \"\"\".format(__version__.replace(\".\",", "click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path( exists=False, file_okay=False, dir_okay=True, readable=True ),", "= profiling_result[1]['meta']['expectation_suite_name'] run_id = profiling_result[1]['meta']['run_id'] print(\" {0:s}\".format(context.get_validation_location( data_asset_name, expectation_suite_name, run_id)['filepath']))", "of the above cli_message(msg_unknown_data_source) print(\"Skipping datasource configuration. You can add", "committing, please make sure that this data does not contain", "# msg_dbt_go_to_notebook = \"\"\" # To create expectations for your", "connect to relational databases. Please make sure that you have", "click.prompt( msg_prompt_datasource_name, default=\"mydb\", show_default=True) cli_message(msg_sqlalchemy_config_connection.format( data_source_name)) drivername = click.prompt(\"What is", "great_expectations.render import DefaultJinjaPageView from great_expectations.version import __version__ as __version__ def", "\"4\"]), show_choices=False ) cli_message(data_source_selection) if data_source_selection == \"1\": # pandas", "basenamepath = path default_data_source_name = os.path.basename(basenamepath) + \"__dir\" data_source_name =", "can add a datasource later by editing the great_expectations.yml file.\")", "To generate documentation from the data you just profiled, the", "\"\"\" ========== Datasources ========== See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about", "are looking for more types of data types to support.", "datasources. \"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection = click.prompt( msg_prompt_choose_data_source, type=click.Choice([\"1\", \"2\",", "you just profiled, the profiling results should be moved from", "information about datasources. \"\"\".format(__version__.replace(\".\", \"_\")) ) data_source_selection = click.prompt( msg_prompt_choose_data_source,", "create expectations for your data, start Jupyter and open a", "As of v0.7.0, profiling is still a beta feature in", "for profiling_result in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name']", "# pandas path = click.prompt( msg_prompt_filesys_enter_base_path, # default='/data/', type=click.Path( exists=False,", "readable=True ), show_default=True ) if path.startswith(\"./\"): path = path[2:] if", "\"\"\" We are looking for more types of data types", "through next steps. # \"\"\" msg_prompt_filesys_enter_base_path = \"\"\" Enter the", "issue here: https://github.com/great-expectations/great_expectations/issues/new In the meantime you can see what", "= click.prompt(\"What is the database name for the sqlalchemy connection?\",", "click.prompt( msg_prompt_datasource_name, default=default_data_source_name, show_default=True ) context.add_datasource(data_source_name, \"pandas\", base_directory=os.path.join(\"..\", path)) elif", "absolute or relative to current directory.) \"\"\" msg_prompt_datasource_name = \"\"\"", "types to support. Please create a GitHub issue here: https://github.com/great-expectations/great_expectations/issues/new", "See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources. \"\"\".format(__version__.replace(\".\", \"_\")) )", "of this config file: great_expectations/uncommitted/credentials/profiles.yml: \"\"\" msg_unknown_data_source = \"\"\" We", "great_expectations.version import __version__ as __version__ def add_datasource(context): cli_message( \"\"\" ==========", "Pandas DataFrame 2. Relational database (SQL) 3. Spark DataFrame 4.", "return None if data_source_name != None: cli_message( \"\"\" ========== Profiling", "in profiling_results: data_asset_name = profiling_result[1]['meta']['data_asset_name'] expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name'] run_id =", "host for the sqlalchemy connection?\", default=\"localhost\", show_default=True) port = click.prompt(\"What", "sqlalchemy connection?\", default=\"\", show_default=False, hide_input=True) database = click.prompt(\"What is the", "the database # \"\"\" # msg_dbt_go_to_notebook = \"\"\" # To", "documentation?\", default=True ): cli_message(\"\\nMoving files...\") for profiling_result in profiling_results: data_asset_name", "# great_expectations/notebooks/using_great_expectations_with_dbt.ipynb - # it will walk you through next", "now. You can always do this later by running `great_expectations" ]
[ "= None return result def prepare_request(self, url, params=None): \"\"\"Build requests", "between downloads for each domain self.delay = delay # timestamp", "timeout: Download timeout \"\"\" def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1,", "web content in text format or html.\"\"\" request = self.prepare_request(url,", "= None return response def text(self, url, params=None, encoding=None): \"\"\"Download", "= self.send_request(request, self.num_retries) if response: if encoding: response.encoding = encoding", "content in text format or html.\"\"\" request = self.prepare_request(url, params)", "params=None): \"\"\"Access the api and return the json object.\"\"\" request", "or caller to call api. Args: delay: Interval between downloads", "self.session.send(request, timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError as e: logging.warn('Download error: %s'", "bs4 import BeautifulSoup class Throttle(object): \"\"\"Throttle downloading by sleeping between", "proxies=None, num_retries=1, timeout=60, cache=None, auth=None): self.session = requests.Session() self.session.headers.update({'user-agent': user_agent})", "delay # timestamp of when a domain was last accessed", "% e) if num_retries > 0 and 500 <= response.status_code", "on the provided url and parameters.\"\"\" request = requests.Request('GET', url,", "from the cache.\"\"\" result = None if self.cache: result =", "= self.domains.get(domain) if self.delay > 0 and last_accessed is not", "download of web pages or caller to call api. Args:", "datetime import logging import time import urllib.parse import requests from", "auth=None): self.session = requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies = proxies self.session.auth", "logging import time import urllib.parse import requests from bs4 import", "num_retries self.timeout = timeout self.cache = cache def get_from_cache(self, request):", "last accessed self.domains = {} def wait(self, url): domain =", "= cache def get_from_cache(self, request): \"\"\"Try to get the result", "\"\"\"Access the api and return the json object.\"\"\" request =", "def __init__(self, delay): # amount of delay between downloads for", "time.sleep(sleep_secs) self.domains[domain] = datetime.now() class Downloader(object): \"\"\"Convenient download of web", "send_request(self, request, num_retries): \"\"\"Send request and return response object.\"\"\" self.throttle.wait(request.url)", "num_retries): \"\"\"Send request and return response object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading:", "of when a domain was last accessed self.domains = {}", "%s' % request.url) response = None return response def text(self,", "cache.\"\"\" result = None if self.cache: result = self.cache.get(request.url) if", "\"\"\" Provide download function by request \"\"\" from datetime import", "self.get_from_cache(request) if result is None: response = self.send_request(request, self.num_retries) if", "%s' % request.url) response = self.session.send(request, timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError", "by request \"\"\" from datetime import datetime import logging import", "object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading: %s' % request.url) response = self.session.send(request,", "(datetime.now() - last_accessed).seconds if sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain] =", "def json(self, url, params=None): \"\"\"Access the api and return the", "call api. Args: delay: Interval between downloads (seconds) num_retries: Number", "timeout self.cache = cache def get_from_cache(self, request): \"\"\"Try to get", "e: logging.warn('Download error: %s' % e) if num_retries > 0", "wait(self, url): domain = urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain) if self.delay", "= result return result['text'] def json(self, url, params=None): \"\"\"Access the", "utf-8 -*- \"\"\" Provide download function by request \"\"\" from", "the json object.\"\"\" request = self.prepare_request(url, params) result = self.get_from_cache(request)", "result = self.get_from_cache(request) if result is None: response = self.send_request(request,", "= delay # timestamp of when a domain was last", "when a domain was last accessed self.domains = {} def", "= requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies = proxies self.session.auth = auth", "def text(self, url, params=None, encoding=None): \"\"\"Download web content in text", "request.url) response = self.session.send(request, timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError as e:", "import logging import time import urllib.parse import requests from bs4", "json(self, url, params=None): \"\"\"Access the api and return the json", "= self.session.send(request, timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError as e: logging.warn('Download error:", "and 500 <= response.status_code < 600: return self.send_request(request, num_retries -", "result = {'text': response.text, 'code': response.status_code} if self.cache: self.cache[request.url] =", "= {'text': response.text, 'code': response.status_code} if self.cache: self.cache[request.url] = result", "self.send_request(request, self.num_retries) if response: if encoding: response.encoding = encoding result", "> 0 and last_accessed is not None: sleep_secs = self.delay", "> 0 and 500 <= result['code'] < 600: result =", "self.timeout = timeout self.cache = cache def get_from_cache(self, request): \"\"\"Try", "# -*- coding: utf-8 -*- \"\"\" Provide download function by", "downloads for each domain self.delay = delay # timestamp of", "format or html.\"\"\" request = self.prepare_request(url, params) result = self.get_from_cache(request)", "self.delay > 0 and last_accessed is not None: sleep_secs =", "self.cache: result = self.cache.get(request.url) if result and self.num_retries > 0", "None return response def text(self, url, params=None, encoding=None): \"\"\"Download web", "500 <= result['code'] < 600: result = None return result", "\"\"\" from datetime import datetime import logging import time import", "coding: utf-8 -*- \"\"\" Provide download function by request \"\"\"", "Number of retries when downloading errors timeout: Download timeout \"\"\"", "domain was last accessed self.domains = {} def wait(self, url):", "-*- \"\"\" Provide download function by request \"\"\" from datetime", "result['text'] def json(self, url, params=None): \"\"\"Access the api and return", "if result and self.num_retries > 0 and 500 <= result['code']", "same domain.\"\"\" def __init__(self, delay): # amount of delay between", "return result def prepare_request(self, url, params=None): \"\"\"Build requests based on", "cache def get_from_cache(self, request): \"\"\"Try to get the result of", "except requests.exceptions.HTTPError as e: logging.warn('Download error: %s' % e) if", "when downloading errors timeout: Download timeout \"\"\" def __init__(self, delay=5,", "domain.\"\"\" def __init__(self, delay): # amount of delay between downloads", "- last_accessed).seconds if sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain] = datetime.now()", "0 and 500 <= response.status_code < 600: return self.send_request(request, num_retries", "(seconds) num_retries: Number of retries when downloading errors timeout: Download", "requests from bs4 import BeautifulSoup class Throttle(object): \"\"\"Throttle downloading by", "response = self.session.send(request, timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError as e: logging.warn('Download", "self.cache = cache def get_from_cache(self, request): \"\"\"Try to get the", "a domain was last accessed self.domains = {} def wait(self,", "timeout \"\"\" def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1, timeout=60, cache=None,", "requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies = proxies self.session.auth = auth self.throttle", "response = self.send_request(request, self.num_retries) if response: result = {'json': response.json(),", "last_accessed is not None: sleep_secs = self.delay - (datetime.now() -", "__init__(self, delay): # amount of delay between downloads for each", "- (datetime.now() - last_accessed).seconds if sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain]", "Args: delay: Interval between downloads (seconds) num_retries: Number of retries", "< 600: return self.send_request(request, num_retries - 1) except requests.exceptions.RequestException: logging.error('Download", "and parameters.\"\"\" request = requests.Request('GET', url, params=params) return self.session.prepare_request(request) def", "is not None: sleep_secs = self.delay - (datetime.now() - last_accessed).seconds", "= timeout self.cache = cache def get_from_cache(self, request): \"\"\"Try to", "< 600: result = None return result def prepare_request(self, url,", "class Downloader(object): \"\"\"Convenient download of web pages or caller to", "sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain] = datetime.now() class Downloader(object): \"\"\"Convenient", "self.num_retries = num_retries self.timeout = timeout self.cache = cache def", "last_accessed = self.domains.get(domain) if self.delay > 0 and last_accessed is", "response = self.send_request(request, self.num_retries) if response: if encoding: response.encoding =", "and 500 <= result['code'] < 600: result = None return", "auth self.throttle = Throttle(delay) self.num_retries = num_retries self.timeout = timeout", "result of the request from the cache.\"\"\" result = None", "params=None, encoding=None): \"\"\"Download web content in text format or html.\"\"\"", "self.throttle = Throttle(delay) self.num_retries = num_retries self.timeout = timeout self.cache", "is None: response = self.send_request(request, self.num_retries) if response: if encoding:", "request and return response object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading: %s' %", "user_agent}) self.session.proxies = proxies self.session.auth = auth self.throttle = Throttle(delay)", "= self.send_request(request, self.num_retries) if response: result = {'json': response.json(), 'code':", "= self.get_from_cache(request) if result is None: response = self.send_request(request, self.num_retries)", "and last_accessed is not None: sleep_secs = self.delay - (datetime.now()", "response.status_code < 600: return self.send_request(request, num_retries - 1) except requests.exceptions.RequestException:", "url, params=None, encoding=None): \"\"\"Download web content in text format or", "None: sleep_secs = self.delay - (datetime.now() - last_accessed).seconds if sleep_secs", "sleeping between requests to same domain.\"\"\" def __init__(self, delay): #", "delay: Interval between downloads (seconds) num_retries: Number of retries when", "import urllib.parse import requests from bs4 import BeautifulSoup class Throttle(object):", "user_agent='awsl', proxies=None, num_retries=1, timeout=60, cache=None, auth=None): self.session = requests.Session() self.session.headers.update({'user-agent':", "result is None: response = self.send_request(request, self.num_retries) if response: if", "the cache.\"\"\" result = None if self.cache: result = self.cache.get(request.url)", "{'json': response.json(), 'code': response.status_code} if self.cache: self.cache[request.url] = result return", "response: result = {'json': response.json(), 'code': response.status_code} if self.cache: self.cache[request.url]", "request): \"\"\"Try to get the result of the request from", "of delay between downloads for each domain self.delay = delay", "if result is None: response = self.send_request(request, self.num_retries) if response:", "= {} def wait(self, url): domain = urllib.parse.urlparse(url).netloc last_accessed =", "None return result def prepare_request(self, url, params=None): \"\"\"Build requests based", "{'text': response.text, 'code': response.status_code} if self.cache: self.cache[request.url] = result return", "get_from_cache(self, request): \"\"\"Try to get the result of the request", "by sleeping between requests to same domain.\"\"\" def __init__(self, delay):", "requests to same domain.\"\"\" def __init__(self, delay): # amount of", "1) except requests.exceptions.RequestException: logging.error('Download faild: %s' % request.url) response =", "url, params=None): \"\"\"Access the api and return the json object.\"\"\"", "import datetime import logging import time import urllib.parse import requests", "None: response = self.send_request(request, self.num_retries) if response: result = {'json':", "timestamp of when a domain was last accessed self.domains =", "not None: sleep_secs = self.delay - (datetime.now() - last_accessed).seconds if", "num_retries > 0 and 500 <= response.status_code < 600: return", "pages or caller to call api. Args: delay: Interval between", "in text format or html.\"\"\" request = self.prepare_request(url, params) result", "= encoding result = {'text': response.text, 'code': response.status_code} if self.cache:", "__init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1, timeout=60, cache=None, auth=None): self.session =", "BeautifulSoup class Throttle(object): \"\"\"Throttle downloading by sleeping between requests to", "self.send_request(request, self.num_retries) if response: result = {'json': response.json(), 'code': response.status_code}", "between requests to same domain.\"\"\" def __init__(self, delay): # amount", "urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain) if self.delay > 0 and last_accessed", "result = {'json': response.json(), 'code': response.status_code} if self.cache: self.cache[request.url] =", "json object.\"\"\" request = self.prepare_request(url, params) result = self.get_from_cache(request) if", "text(self, url, params=None, encoding=None): \"\"\"Download web content in text format", "request from the cache.\"\"\" result = None if self.cache: result", "self.session.prepare_request(request) def send_request(self, request, num_retries): \"\"\"Send request and return response", "return response object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading: %s' % request.url) response", "return the json object.\"\"\" request = self.prepare_request(url, params) result =", "html.\"\"\" request = self.prepare_request(url, params) result = self.get_from_cache(request) if result", "result = None if self.cache: result = self.cache.get(request.url) if result", "% request.url) response = None return response def text(self, url,", "request = self.prepare_request(url, params) result = self.get_from_cache(request) if result is", "def wait(self, url): domain = urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain) if", "None if self.cache: result = self.cache.get(request.url) if result and self.num_retries", "response.status_code} if self.cache: self.cache[request.url] = result return result['text'] def json(self,", "0 and last_accessed is not None: sleep_secs = self.delay -", "self.num_retries) if response: result = {'json': response.json(), 'code': response.status_code} if", "= self.cache.get(request.url) if result and self.num_retries > 0 and 500", "self.delay = delay # timestamp of when a domain was", "'code': response.status_code} if self.cache: self.cache[request.url] = result return result['text'] def", "\"\"\"Convenient download of web pages or caller to call api.", "self.throttle.wait(request.url) try: logging.info('Downloading: %s' % request.url) response = self.session.send(request, timeout=self.timeout)", "result def prepare_request(self, url, params=None): \"\"\"Build requests based on the", "num_retries: Number of retries when downloading errors timeout: Download timeout", "<= response.status_code < 600: return self.send_request(request, num_retries - 1) except", "object.\"\"\" request = self.prepare_request(url, params) result = self.get_from_cache(request) if result", "domain self.delay = delay # timestamp of when a domain", "retries when downloading errors timeout: Download timeout \"\"\" def __init__(self,", "delay between downloads for each domain self.delay = delay #", "self.session.auth = auth self.throttle = Throttle(delay) self.num_retries = num_retries self.timeout", "\"\"\"Download web content in text format or html.\"\"\" request =", "of the request from the cache.\"\"\" result = None if", "def get_from_cache(self, request): \"\"\"Try to get the result of the", "last_accessed).seconds if sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain] = datetime.now() class", "from bs4 import BeautifulSoup class Throttle(object): \"\"\"Throttle downloading by sleeping", "# timestamp of when a domain was last accessed self.domains", "600: result = None return result def prepare_request(self, url, params=None):", "url, params=None): \"\"\"Build requests based on the provided url and", "= {'json': response.json(), 'code': response.status_code} if self.cache: self.cache[request.url] = result", "timeout=60, cache=None, auth=None): self.session = requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies =", "self.session = requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies = proxies self.session.auth =", "for each domain self.delay = delay # timestamp of when", "url and parameters.\"\"\" request = requests.Request('GET', url, params=params) return self.session.prepare_request(request)", "return response def text(self, url, params=None, encoding=None): \"\"\"Download web content", "sleep_secs = self.delay - (datetime.now() - last_accessed).seconds if sleep_secs >", "= Throttle(delay) self.num_retries = num_retries self.timeout = timeout self.cache =", "and return response object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading: %s' % request.url)", "downloading by sleeping between requests to same domain.\"\"\" def __init__(self,", "request, num_retries): \"\"\"Send request and return response object.\"\"\" self.throttle.wait(request.url) try:", "result = None return result def prepare_request(self, url, params=None): \"\"\"Build", "response.text, 'code': response.status_code} if self.cache: self.cache[request.url] = result return result['text']", "or html.\"\"\" request = self.prepare_request(url, params) result = self.get_from_cache(request) if", "proxies self.session.auth = auth self.throttle = Throttle(delay) self.num_retries = num_retries", "datetime.now() class Downloader(object): \"\"\"Convenient download of web pages or caller", "text format or html.\"\"\" request = self.prepare_request(url, params) result =", "each domain self.delay = delay # timestamp of when a", "download function by request \"\"\" from datetime import datetime import", "of web pages or caller to call api. Args: delay:", "caller to call api. Args: delay: Interval between downloads (seconds)", "if sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain] = datetime.now() class Downloader(object):", "logging.warn('Download error: %s' % e) if num_retries > 0 and", "= requests.Request('GET', url, params=params) return self.session.prepare_request(request) def send_request(self, request, num_retries):", "import BeautifulSoup class Throttle(object): \"\"\"Throttle downloading by sleeping between requests", "response: if encoding: response.encoding = encoding result = {'text': response.text,", "the api and return the json object.\"\"\" request = self.prepare_request(url,", "the result of the request from the cache.\"\"\" result =", "return self.send_request(request, num_retries - 1) except requests.exceptions.RequestException: logging.error('Download faild: %s'", "downloading errors timeout: Download timeout \"\"\" def __init__(self, delay=5, user_agent='awsl',", "api. Args: delay: Interval between downloads (seconds) num_retries: Number of", "result and self.num_retries > 0 and 500 <= result['code'] <", "provided url and parameters.\"\"\" request = requests.Request('GET', url, params=params) return", "datetime import datetime import logging import time import urllib.parse import", "= urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain) if self.delay > 0 and", "0 and 500 <= result['code'] < 600: result = None", "num_retries=1, timeout=60, cache=None, auth=None): self.session = requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies", "if self.cache: result = self.cache.get(request.url) if result and self.num_retries >", "request = requests.Request('GET', url, params=params) return self.session.prepare_request(request) def send_request(self, request,", "result is None: response = self.send_request(request, self.num_retries) if response: result", "response.encoding = encoding result = {'text': response.text, 'code': response.status_code} if", "self.session.proxies = proxies self.session.auth = auth self.throttle = Throttle(delay) self.num_retries", "None: response = self.send_request(request, self.num_retries) if response: if encoding: response.encoding", "self.num_retries > 0 and 500 <= result['code'] < 600: result", "get the result of the request from the cache.\"\"\" result", "class Throttle(object): \"\"\"Throttle downloading by sleeping between requests to same", "= self.delay - (datetime.now() - last_accessed).seconds if sleep_secs > 0:", "the provided url and parameters.\"\"\" request = requests.Request('GET', url, params=params)", "% request.url) response = self.session.send(request, timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError as", "if self.cache: self.cache[request.url] = result return result['text'] def json(self, url,", "request \"\"\" from datetime import datetime import logging import time", "\"\"\" def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1, timeout=60, cache=None, auth=None):", "%s' % e) if num_retries > 0 and 500 <=", "Downloader(object): \"\"\"Convenient download of web pages or caller to call", "error: %s' % e) if num_retries > 0 and 500", "self.cache.get(request.url) if result and self.num_retries > 0 and 500 <=", "return self.session.prepare_request(request) def send_request(self, request, num_retries): \"\"\"Send request and return", "urllib.parse import requests from bs4 import BeautifulSoup class Throttle(object): \"\"\"Throttle", "\"\"\"Try to get the result of the request from the", "logging.error('Download faild: %s' % request.url) response = None return response", "params=params) return self.session.prepare_request(request) def send_request(self, request, num_retries): \"\"\"Send request and", "accessed self.domains = {} def wait(self, url): domain = urllib.parse.urlparse(url).netloc", "function by request \"\"\" from datetime import datetime import logging", "and self.num_retries > 0 and 500 <= result['code'] < 600:", "\"\"\"Throttle downloading by sleeping between requests to same domain.\"\"\" def", "= self.prepare_request(url, params) result = self.get_from_cache(request) if result is None:", "web pages or caller to call api. Args: delay: Interval", "= num_retries self.timeout = timeout self.cache = cache def get_from_cache(self,", "num_retries - 1) except requests.exceptions.RequestException: logging.error('Download faild: %s' % request.url)", "amount of delay between downloads for each domain self.delay =", "of retries when downloading errors timeout: Download timeout \"\"\" def", "params=None): \"\"\"Build requests based on the provided url and parameters.\"\"\"", "api and return the json object.\"\"\" request = self.prepare_request(url, params)", "Throttle(delay) self.num_retries = num_retries self.timeout = timeout self.cache = cache", "= datetime.now() class Downloader(object): \"\"\"Convenient download of web pages or", "result = self.cache.get(request.url) if result and self.num_retries > 0 and", "to get the result of the request from the cache.\"\"\"", "url, params=params) return self.session.prepare_request(request) def send_request(self, request, num_retries): \"\"\"Send request", "import requests from bs4 import BeautifulSoup class Throttle(object): \"\"\"Throttle downloading", "cache=None, auth=None): self.session = requests.Session() self.session.headers.update({'user-agent': user_agent}) self.session.proxies = proxies", "encoding: response.encoding = encoding result = {'text': response.text, 'code': response.status_code}", "self.domains.get(domain) if self.delay > 0 and last_accessed is not None:", "> 0: time.sleep(sleep_secs) self.domains[domain] = datetime.now() class Downloader(object): \"\"\"Convenient download", "to same domain.\"\"\" def __init__(self, delay): # amount of delay", "if num_retries > 0 and 500 <= response.status_code < 600:", "0: time.sleep(sleep_secs) self.domains[domain] = datetime.now() class Downloader(object): \"\"\"Convenient download of", "Interval between downloads (seconds) num_retries: Number of retries when downloading", "the request from the cache.\"\"\" result = None if self.cache:", "self.session.headers.update({'user-agent': user_agent}) self.session.proxies = proxies self.session.auth = auth self.throttle =", "response.raise_for_status() except requests.exceptions.HTTPError as e: logging.warn('Download error: %s' % e)", "\"\"\"Build requests based on the provided url and parameters.\"\"\" request", "response def text(self, url, params=None, encoding=None): \"\"\"Download web content in", "based on the provided url and parameters.\"\"\" request = requests.Request('GET',", "requests.exceptions.RequestException: logging.error('Download faild: %s' % request.url) response = None return", "encoding=None): \"\"\"Download web content in text format or html.\"\"\" request", "if encoding: response.encoding = encoding result = {'text': response.text, 'code':", "600: return self.send_request(request, num_retries - 1) except requests.exceptions.RequestException: logging.error('Download faild:", "500 <= response.status_code < 600: return self.send_request(request, num_retries - 1)", "delay): # amount of delay between downloads for each domain", "self.domains[domain] = datetime.now() class Downloader(object): \"\"\"Convenient download of web pages", "<= result['code'] < 600: result = None return result def", "response = None return response def text(self, url, params=None, encoding=None):", "Provide download function by request \"\"\" from datetime import datetime", "> 0 and 500 <= response.status_code < 600: return self.send_request(request,", "between downloads (seconds) num_retries: Number of retries when downloading errors", "as e: logging.warn('Download error: %s' % e) if num_retries >", "if self.delay > 0 and last_accessed is not None: sleep_secs", "except requests.exceptions.RequestException: logging.error('Download faild: %s' % request.url) response = None", "self.prepare_request(url, params) result = self.get_from_cache(request) if result is None: response", "return result['text'] def json(self, url, params=None): \"\"\"Access the api and", "faild: %s' % request.url) response = None return response def", "self.cache: self.cache[request.url] = result return result['text'] def json(self, url, params=None):", "from datetime import datetime import logging import time import urllib.parse", "was last accessed self.domains = {} def wait(self, url): domain", "prepare_request(self, url, params=None): \"\"\"Build requests based on the provided url", "# amount of delay between downloads for each domain self.delay", "errors timeout: Download timeout \"\"\" def __init__(self, delay=5, user_agent='awsl', proxies=None,", "def prepare_request(self, url, params=None): \"\"\"Build requests based on the provided", "parameters.\"\"\" request = requests.Request('GET', url, params=params) return self.session.prepare_request(request) def send_request(self,", "\"\"\"Send request and return response object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading: %s'", "Download timeout \"\"\" def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1, timeout=60,", "downloads (seconds) num_retries: Number of retries when downloading errors timeout:", "import time import urllib.parse import requests from bs4 import BeautifulSoup", "self.cache[request.url] = result return result['text'] def json(self, url, params=None): \"\"\"Access", "Throttle(object): \"\"\"Throttle downloading by sleeping between requests to same domain.\"\"\"", "try: logging.info('Downloading: %s' % request.url) response = self.session.send(request, timeout=self.timeout) response.raise_for_status()", "response.json(), 'code': response.status_code} if self.cache: self.cache[request.url] = result return result['json']", "self.delay - (datetime.now() - last_accessed).seconds if sleep_secs > 0: time.sleep(sleep_secs)", "request.url) response = None return response def text(self, url, params=None,", "result['code'] < 600: result = None return result def prepare_request(self,", "and return the json object.\"\"\" request = self.prepare_request(url, params) result", "requests.exceptions.HTTPError as e: logging.warn('Download error: %s' % e) if num_retries", "url): domain = urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain) if self.delay >", "{} def wait(self, url): domain = urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain)", "if response: if encoding: response.encoding = encoding result = {'text':", "self.num_retries) if response: if encoding: response.encoding = encoding result =", "requests based on the provided url and parameters.\"\"\" request =", "= auth self.throttle = Throttle(delay) self.num_retries = num_retries self.timeout =", "result return result['text'] def json(self, url, params=None): \"\"\"Access the api", "= None if self.cache: result = self.cache.get(request.url) if result and", "- 1) except requests.exceptions.RequestException: logging.error('Download faild: %s' % request.url) response", "-*- coding: utf-8 -*- \"\"\" Provide download function by request", "def send_request(self, request, num_retries): \"\"\"Send request and return response object.\"\"\"", "time import urllib.parse import requests from bs4 import BeautifulSoup class", "timeout=self.timeout) response.raise_for_status() except requests.exceptions.HTTPError as e: logging.warn('Download error: %s' %", "response object.\"\"\" self.throttle.wait(request.url) try: logging.info('Downloading: %s' % request.url) response =", "self.send_request(request, num_retries - 1) except requests.exceptions.RequestException: logging.error('Download faild: %s' %", "to call api. Args: delay: Interval between downloads (seconds) num_retries:", "requests.Request('GET', url, params=params) return self.session.prepare_request(request) def send_request(self, request, num_retries): \"\"\"Send", "domain = urllib.parse.urlparse(url).netloc last_accessed = self.domains.get(domain) if self.delay > 0", "delay=5, user_agent='awsl', proxies=None, num_retries=1, timeout=60, cache=None, auth=None): self.session = requests.Session()", "def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1, timeout=60, cache=None, auth=None): self.session", "logging.info('Downloading: %s' % request.url) response = self.session.send(request, timeout=self.timeout) response.raise_for_status() except", "params) result = self.get_from_cache(request) if result is None: response =", "encoding result = {'text': response.text, 'code': response.status_code} if self.cache: self.cache[request.url]", "if response: result = {'json': response.json(), 'code': response.status_code} if self.cache:", "e) if num_retries > 0 and 500 <= response.status_code <", "= proxies self.session.auth = auth self.throttle = Throttle(delay) self.num_retries =", "is None: response = self.send_request(request, self.num_retries) if response: result =", "self.domains = {} def wait(self, url): domain = urllib.parse.urlparse(url).netloc last_accessed" ]
[ "i == -1: word = '' j = i +", "or i == -1: word = '' j = i", "word: res.append(word) i -= 1 return ' '.join(res) s =", "s[j] != ' ': word += s[j] j += 1", "== -1: word = '' j = i + 1", "+= s[j] j += 1 if word: res.append(word) i -=", "s[j] j += 1 if word: res.append(word) i -= 1", "word = '' j = i + 1 while j", "= [] i = len(s) - 2 while i >=", "'' j = i + 1 while j < len(s)", "if s == '': return s res = [] i", "if word: res.append(word) i -= 1 return ' '.join(res) s", "-1: if s[i] == ' ' or i == -1:", "-1: word = '' j = i + 1 while", "s res = [] i = len(s) - 2 while", "class Solution(object): def reverseWords(self, s): if s == '': return", "i = len(s) - 2 while i >= -1: if", "': word += s[j] j += 1 if word: res.append(word)", "s): if s == '': return s res = []", "-= 1 return ' '.join(res) s = Solution() print s.reverseWords('a", "while j < len(s) and s[j] != ' ': word", ">= -1: if s[i] == ' ' or i ==", "#!/usr/bin/python class Solution(object): def reverseWords(self, s): if s == '':", "while i >= -1: if s[i] == ' ' or", "s[i] == ' ' or i == -1: word =", "j < len(s) and s[j] != ' ': word +=", "[] i = len(s) - 2 while i >= -1:", "return s res = [] i = len(s) - 2", "Solution(object): def reverseWords(self, s): if s == '': return s", "j = i + 1 while j < len(s) and", "1 return ' '.join(res) s = Solution() print s.reverseWords('a x')", "!= ' ': word += s[j] j += 1 if", "s == '': return s res = [] i =", "== '': return s res = [] i = len(s)", "i >= -1: if s[i] == ' ' or i", "if s[i] == ' ' or i == -1: word", "reverseWords(self, s): if s == '': return s res =", "res = [] i = len(s) - 2 while i", "res.append(word) i -= 1 return ' '.join(res) s = Solution()", "' or i == -1: word = '' j =", "2 while i >= -1: if s[i] == ' '", "== ' ' or i == -1: word = ''", "' ' or i == -1: word = '' j", "= len(s) - 2 while i >= -1: if s[i]", "and s[j] != ' ': word += s[j] j +=", "word += s[j] j += 1 if word: res.append(word) i", "def reverseWords(self, s): if s == '': return s res", "< len(s) and s[j] != ' ': word += s[j]", "= i + 1 while j < len(s) and s[j]", "j += 1 if word: res.append(word) i -= 1 return", "+= 1 if word: res.append(word) i -= 1 return '", "- 2 while i >= -1: if s[i] == '", "len(s) - 2 while i >= -1: if s[i] ==", "' ': word += s[j] j += 1 if word:", "1 if word: res.append(word) i -= 1 return ' '.join(res)", "i + 1 while j < len(s) and s[j] !=", "i -= 1 return ' '.join(res) s = Solution() print", "= '' j = i + 1 while j <", "len(s) and s[j] != ' ': word += s[j] j", "+ 1 while j < len(s) and s[j] != '", "'': return s res = [] i = len(s) -", "1 while j < len(s) and s[j] != ' ':" ]
[ "requests to the server. :param timeout: (int) Timeout to use", "self._s.put( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except", "SSL. :param proxies: (dict) The proxies servers requests is sent", "lambda x: x # don't let requests add auth headers", "\"\"\"Return header request to the server.\"\"\" return self._headers @headers.setter def", "in use for requests to the server.\"\"\" return self._base_url @base_url.setter", "data, **kwargs): \"\"\"Submit post request to the path. :param path:", "from .exceptions import KeycloakConnectionError class ConnectionManager(object): \"\"\" Represents a simple", "parameters of the requests to the server. :param timeout: (int)", "Can't connect to server. \"\"\" try: return self._s.get( urljoin(self.base_url, path),", "**kwargs): \"\"\"Submit put request to the path. :param path: (str)", "OR IN # CONNECTION WITH THE SOFTWARE OR THE USE", ":returns: Response the request. :raises: HttpError Can't connect to server.", "def timeout(self, value): \"\"\" \"\"\" self._timeout = value @property def", "is not None def add_param_headers(self, key, value): \"\"\"Add a single", "\"\"\" self.headers.pop(key, None) def raw_get(self, path, **kwargs): \"\"\"Submit get request", "raw_put(self, path, data, **kwargs): \"\"\"Submit put request to the path.", "connect to server (%s)\" % e) def raw_put(self, path, data,", "do so, # subject to the following conditions: # #", "server connection. :param base_url: (str) The server URL. :param headers:", "deal in # the Software without restriction, including without limitation", "def del_param_headers(self, key): \"\"\"Remove a specific parameter. :param key: (str)", "to the server.\"\"\" return self._base_url @base_url.setter def base_url(self, value): \"\"\"", "x # don't let requests add auth headers # retry", "requests to the server. :param verify: (bool) Verify server SSL.", "= HTTPAdapter(max_retries=1) # adds POST to retry whitelist allowed_methods =", "def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None): self._base_url = base_url", "request to the path. :param path: (str) Path for request.", "distribute, sublicense, and/or sell copies of # the Software, and", "\"\"\" \"\"\" self._timeout = value @property def verify(self): \"\"\"Return verify", "request. :raises: HttpError Can't connect to server. \"\"\" try: return", "\"\"\"Submit post request to the path. :param path: (str) Path", "of the requests to the server. :param timeout: (int) Timeout", "self._verify = verify self._s = requests.Session() self._s.auth = lambda x:", "\"\"\" return self.param_headers(key) is not None def add_param_headers(self, key, value):", "the server.\"\"\" return self._timeout @timeout.setter def timeout(self, value): \"\"\" \"\"\"", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN", "timeout=self.timeout, verify=self.verify, ) except Exception as e: raise KeycloakConnectionError(\"Can't connect", "\"\"\" try: return self._s.put( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout,", "added. \"\"\" self.headers[key] = value def del_param_headers(self, key): \"\"\"Remove a", "ARISING FROM, OUT OF OR IN # CONNECTION WITH THE", "portions of the Software. # # THE SOFTWARE IS PROVIDED", "by. \"\"\" def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None): self._base_url", "\"\"\" \"\"\" self._headers = value def param_headers(self, key): \"\"\" Return", "headers: (dict) The header parameters of the requests to the", "return self.param_headers(key) is not None def add_param_headers(self, key, value): \"\"\"Add", "def raw_delete(self, path, data={}, **kwargs): \"\"\"Submit delete request to the", "# # The above copyright notice and this permission notice", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR", "DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF", "param_headers(self, key): \"\"\" Return a specific header parameter. :param key:", "see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in (\"https://\", \"http://\"): adapter = HTTPAdapter(max_retries=1)", "self.headers = {} def exist_param_headers(self, key): \"\"\"Check if the parameter", "use for request to the server.\"\"\" return self._timeout @timeout.setter def", "self._base_url = base_url self._headers = headers self._timeout = timeout self._verify", "base_url self._headers = headers self._timeout = timeout self._verify = verify", "raise KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def raw_put(self,", "self._headers @headers.setter def headers(self, value): \"\"\" \"\"\" self._headers = value", "\"\"\"Remove a specific parameter. :param key: (str) Key of the", "a specific parameter. :param key: (str) Key of the header", "path: (str) Path for request. :param data: (dict) Payload for", "import urljoin import requests from requests.adapters import HTTPAdapter from .exceptions", "key): \"\"\"Remove a specific parameter. :param key: (str) Key of", "parameters exist, return its value. \"\"\" return self.headers.get(key) def clean_headers(self):", "key: (str) Header parameters key. :param value: (str) Value to", "limitation the rights to # use, copy, modify, merge, publish,", "the header parameters. \"\"\" self.headers.pop(key, None) def raw_get(self, path, **kwargs):", "Can't connect to server. \"\"\" try: return self._s.post( urljoin(self.base_url, path),", "the rights to # use, copy, modify, merge, publish, distribute,", "# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "except ImportError: from urlparse import urljoin import requests from requests.adapters", "post request to the path. :param path: (str) Path for", "hereby granted, free of charge, to any person obtaining a", "KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def raw_post(self, path,", "this permission notice shall be included in all # copies", "def raw_post(self, path, data, **kwargs): \"\"\"Submit post request to the", "\"\"\"Return timeout in use for request to the server.\"\"\" return", "x: x # don't let requests add auth headers #", "(bool) Verify server SSL. :param proxies: (dict) The proxies servers", "(str) Value to be added. \"\"\" self.headers[key] = value def", "@verify.setter def verify(self, value): \"\"\" \"\"\" self._verify = value @property", "(%s)\" % e) def raw_delete(self, path, data={}, **kwargs): \"\"\"Submit delete", "ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in (\"https://\", \"http://\"): adapter", "a specific header parameter. :param key: (str) Header parameters key.", "server URL. :param headers: (dict) The header parameters of the", "\"\"\" self._base_url = value @property def timeout(self): \"\"\"Return timeout in", "utf-8 -*- # # The MIT License (MIT) # #", "raw_delete(self, path, data={}, **kwargs): \"\"\"Submit delete request to the path.", "server. \"\"\" try: return self._s.delete( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers,", "HttpError Can't connect to server. \"\"\" try: return self._s.post( urljoin(self.base_url,", "requests from requests.adapters import HTTPAdapter from .exceptions import KeycloakConnectionError class", "(str) Path for request. :returns: Response the request. :raises: HttpError", "documentation files (the \"Software\"), to deal in # the Software", "value @property def headers(self): \"\"\"Return header request to the server.\"\"\"", "in use for request to the server.\"\"\" return self._timeout @timeout.setter", "including without limitation the rights to # use, copy, modify,", "# Copyright (C) 2017 <NAME> <<EMAIL>> # # Permission is", "persons to whom the Software is furnished to do so,", "base_url: (str) The server URL. :param headers: (dict) The header", "key): \"\"\"Check if the parameter exists in the header. :param", "single parameter inside the header. :param key: (str) Header parameters", "be included in all # copies or substantial portions of", "request to the server.\"\"\" return self._verify @verify.setter def verify(self, value):", "connect to server. \"\"\" try: return self._s.delete( urljoin(self.base_url, path), params=kwargs,", "path. :param path: (str) Path for request. :returns: Response the", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "(str) Header parameters key. :param value: (str) Value to be", "a simple server connection. :param base_url: (str) The server URL.", "# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "self._s.delete( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except", "header request to the server.\"\"\" return self._headers @headers.setter def headers(self,", ":param data: (dict) Payload for request. :returns: Response the request.", "to do so, # subject to the following conditions: #", "to the server.\"\"\" return self._headers @headers.setter def headers(self, value): \"\"\"", "use for requests to the server.\"\"\" return self._base_url @base_url.setter def", "Software without restriction, including without limitation the rights to #", "value): \"\"\"Add a single parameter inside the header. :param key:", "copy of # this software and associated documentation files (the", "the path. :param path: (str) Path for request. :returns: Response", "add_param_headers(self, key, value): \"\"\"Add a single parameter inside the header.", "\"\"\" self._headers = value def param_headers(self, key): \"\"\" Return a", "OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR", "IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS", "verify=self.verify, ) except Exception as e: raise KeycloakConnectionError(\"Can't connect to", "allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol, adapter) if", "parameters.\"\"\" self.headers = {} def exist_param_headers(self, key): \"\"\"Check if the", "a copy of # this software and associated documentation files", "MIT License (MIT) # # Copyright (C) 2017 <NAME> <<EMAIL>>", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "base_url(self): \"\"\"Return base url in use for requests to the", "timeout(self, value): \"\"\" \"\"\" self._timeout = value @property def verify(self):", "@property def headers(self): \"\"\"Return header request to the server.\"\"\" return", "to server. \"\"\" try: return self._s.delete( urljoin(self.base_url, path), params=kwargs, data=data,", "self._s = requests.Session() self._s.auth = lambda x: x # don't", "to be added. \"\"\" self.headers[key] = value def del_param_headers(self, key):", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "the header parameters exist, return True. \"\"\" return self.param_headers(key) is", "value): \"\"\" \"\"\" self._base_url = value @property def timeout(self): \"\"\"Return", "OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "self._base_url = value @property def timeout(self): \"\"\"Return timeout in use", "server. \"\"\" try: return self._s.post( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers,", "urljoin(self.base_url, path), params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as", "be added. \"\"\" self.headers[key] = value def del_param_headers(self, key): \"\"\"Remove", "path), params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as e:", "raw_post(self, path, data, **kwargs): \"\"\"Submit post request to the path.", "# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "servers requests is sent by. \"\"\" def __init__(self, base_url, headers={},", "try: return self._s.put( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify,", "Path for request. :param data: (dict) Payload for request. :returns:", "return self._s.get( urljoin(self.base_url, path), params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except", "so, # subject to the following conditions: # # The", "try: return self._s.post( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify,", "to the path. :param path: (str) Path for request. :param", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A", "(dict) The header parameters of the requests to the server.", "\"\"\" def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None): self._base_url =", "value @property def verify(self): \"\"\"Return verify in use for request", "\"\"\"Submit get request to the path. :param path: (str) Path", "self._timeout @timeout.setter def timeout(self, value): \"\"\" \"\"\" self._timeout = value", "raw_get(self, path, **kwargs): \"\"\"Submit get request to the path. :param", "connect to server. \"\"\" try: return self._s.post( urljoin(self.base_url, path), params=kwargs,", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "specific header parameter. :param key: (str) Header parameters key. :returns:", "included in all # copies or substantial portions of the", "# The above copyright notice and this permission notice shall", "-*- coding: utf-8 -*- # # The MIT License (MIT)", "value def param_headers(self, key): \"\"\" Return a specific header parameter.", "return True. \"\"\" return self.param_headers(key) is not None def add_param_headers(self,", "allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol, adapter) if proxies: self._s.proxies.update(proxies) def", "@property def verify(self): \"\"\"Return verify in use for request to", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN #", "key: (str) Key of the header parameters. \"\"\" self.headers.pop(key, None)", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER #", "without restriction, including without limitation the rights to # use,", "Key of the header parameters. \"\"\" self.headers.pop(key, None) def raw_get(self,", "POST to retry whitelist allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods =", "e) def raw_post(self, path, data, **kwargs): \"\"\"Submit post request to", "clean_headers(self): \"\"\"Clear header parameters.\"\"\" self.headers = {} def exist_param_headers(self, key):", "furnished to do so, # subject to the following conditions:", "permission notice shall be included in all # copies or", "server.\"\"\" return self._base_url @base_url.setter def base_url(self, value): \"\"\" \"\"\" self._base_url", "Header parameters key. :returns: If the header parameters exist, return", "data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as e: raise", "return self._base_url @base_url.setter def base_url(self, value): \"\"\" \"\"\" self._base_url =", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "**kwargs): \"\"\"Submit get request to the path. :param path: (str)", "# subject to the following conditions: # # The above", "= value @property def verify(self): \"\"\"Return verify in use for", "server. :param verify: (bool) Verify server SSL. :param proxies: (dict)", "following conditions: # # The above copyright notice and this", "= value @property def timeout(self): \"\"\"Return timeout in use for", "OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH", "def timeout(self): \"\"\"Return timeout in use for request to the", "rights to # use, copy, modify, merge, publish, distribute, sublicense,", "conditions: # # The above copyright notice and this permission", "timeout=60, verify=True, proxies=None): self._base_url = base_url self._headers = headers self._timeout", "\"\"\" \"\"\" self._verify = value @property def headers(self): \"\"\"Return header", "(str) The server URL. :param headers: (dict) The header parameters", "if the parameter exists in the header. :param key: (str)", "the request. :raises: HttpError Can't connect to server. \"\"\" try:", "# don't let requests add auth headers # retry once", "server.\"\"\" return self._timeout @timeout.setter def timeout(self, value): \"\"\" \"\"\" self._timeout", "files (the \"Software\"), to deal in # the Software without", "for request to the server.\"\"\" return self._verify @verify.setter def verify(self,", "-*- # # The MIT License (MIT) # # Copyright", "self.param_headers(key) is not None def add_param_headers(self, key, value): \"\"\"Add a", "self.headers.get(key) def clean_headers(self): \"\"\"Clear header parameters.\"\"\" self.headers = {} def", "header parameters. \"\"\" self.headers.pop(key, None) def raw_get(self, path, **kwargs): \"\"\"Submit", "def headers(self): \"\"\"Return header request to the server.\"\"\" return self._headers", "@headers.setter def headers(self, value): \"\"\" \"\"\" self._headers = value def", ":returns: If the header parameters exist, return True. \"\"\" return", "Keycloak after tomcat's ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in", "server.\"\"\" return self._verify @verify.setter def verify(self, value): \"\"\" \"\"\" self._verify", ":param key: (str) Key of the header parameters. \"\"\" self.headers.pop(key,", "\"\"\"Add a single parameter inside the header. :param key: (str)", "return self._headers @headers.setter def headers(self, value): \"\"\" \"\"\" self._headers =", "request to the server.\"\"\" return self._timeout @timeout.setter def timeout(self, value):", "The MIT License (MIT) # # Copyright (C) 2017 <NAME>", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "of the header parameters. \"\"\" self.headers.pop(key, None) def raw_get(self, path,", "return self._s.put( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, )", "after tomcat's ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in (\"https://\",", "\"\"\"Return verify in use for request to the server.\"\"\" return", "all # copies or substantial portions of the Software. #", "SOFTWARE. try: from urllib.parse import urljoin except ImportError: from urlparse", "\"\"\" try: return self._s.get( urljoin(self.base_url, path), params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify,", "return self._s.delete( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, )", "software and associated documentation files (the \"Software\"), to deal in", "whom the Software is furnished to do so, # subject", "Timeout to use for requests to the server. :param verify:", "Copyright (C) 2017 <NAME> <<EMAIL>> # # Permission is hereby", "simple server connection. :param base_url: (str) The server URL. :param", "base_url(self, value): \"\"\" \"\"\" self._base_url = value @property def timeout(self):", "# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "= value def param_headers(self, key): \"\"\" Return a specific header", "notice and this permission notice shall be included in all", "is hereby granted, free of charge, to any person obtaining", "connect to server. \"\"\" try: return self._s.put( urljoin(self.base_url, path), params=kwargs,", "ImportError: from urlparse import urljoin import requests from requests.adapters import", "OTHER DEALINGS IN THE SOFTWARE. try: from urllib.parse import urljoin", "server. :param timeout: (int) Timeout to use for requests to", "key: (str) Header parameters key. :returns: If the header parameters", "**kwargs): \"\"\"Submit post request to the path. :param path: (str)", "for requests to the server.\"\"\" return self._base_url @base_url.setter def base_url(self,", "= lambda x: x # don't let requests add auth", "use for request to the server.\"\"\" return self._verify @verify.setter def", "the Software without restriction, including without limitation the rights to", "parameter. :param key: (str) Key of the header parameters. \"\"\"", "= value @property def headers(self): \"\"\"Return header request to the", "\"http://\"): adapter = HTTPAdapter(max_retries=1) # adds POST to retry whitelist", "# -*- coding: utf-8 -*- # # The MIT License", "# # Permission is hereby granted, free of charge, to", "def clean_headers(self): \"\"\"Clear header parameters.\"\"\" self.headers = {} def exist_param_headers(self,", "OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION", "self._headers = value def param_headers(self, key): \"\"\" Return a specific", "proxies servers requests is sent by. \"\"\" def __init__(self, base_url,", "and/or sell copies of # the Software, and to permit", "subject to the following conditions: # # The above copyright", "permit persons to whom the Software is furnished to do", "urlparse import urljoin import requests from requests.adapters import HTTPAdapter from", "don't let requests add auth headers # retry once to", "to whom the Software is furnished to do so, #", "of # the Software, and to permit persons to whom", "\"\"\"Submit delete request to the path. :param path: (str) Path", "= value def del_param_headers(self, key): \"\"\"Remove a specific parameter. :param", "key): \"\"\" Return a specific header parameter. :param key: (str)", "def headers(self, value): \"\"\" \"\"\" self._headers = value def param_headers(self,", "sell copies of # the Software, and to permit persons", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "for requests to the server. :param verify: (bool) Verify server", "Software is furnished to do so, # subject to the", "a single parameter inside the header. :param key: (str) Header", "to server (%s)\" % e) def raw_put(self, path, data, **kwargs):", "IN # CONNECTION WITH THE SOFTWARE OR THE USE OR", "License (MIT) # # Copyright (C) 2017 <NAME> <<EMAIL>> #", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", ":param value: (str) Value to be added. \"\"\" self.headers[key] =", "def __del__(self): self._s.close() @property def base_url(self): \"\"\"Return base url in", "OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT,", "@base_url.setter def base_url(self, value): \"\"\" \"\"\" self._base_url = value @property", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS #", "or substantial portions of the Software. # # THE SOFTWARE", "<NAME> <<EMAIL>> # # Permission is hereby granted, free of", "def param_headers(self, key): \"\"\" Return a specific header parameter. :param", "OR OTHER DEALINGS IN THE SOFTWARE. try: from urllib.parse import", "True. \"\"\" return self.param_headers(key) is not None def add_param_headers(self, key,", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "e) def raw_delete(self, path, data={}, **kwargs): \"\"\"Submit delete request to", "(MIT) # # Copyright (C) 2017 <NAME> <<EMAIL>> # #", "def base_url(self, value): \"\"\" \"\"\" self._base_url = value @property def", "value. \"\"\" return self.headers.get(key) def clean_headers(self): \"\"\"Clear header parameters.\"\"\" self.headers", "THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY", "\"\"\" Represents a simple server connection. :param base_url: (str) The", "\"Software\"), to deal in # the Software without restriction, including", "(%s)\" % e) def raw_put(self, path, data, **kwargs): \"\"\"Submit put", "connection. :param base_url: (str) The server URL. :param headers: (dict)", "path. :param path: (str) Path for request. :param data: (dict)", "import urljoin except ImportError: from urlparse import urljoin import requests", "base url in use for requests to the server.\"\"\" return", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "self._timeout = value @property def verify(self): \"\"\"Return verify in use", "(int) Timeout to use for requests to the server. :param", "WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND", "get request to the path. :param path: (str) Path for", "(%s)\" % e) def raw_post(self, path, data, **kwargs): \"\"\"Submit post", "server (%s)\" % e) def raw_put(self, path, data, **kwargs): \"\"\"Submit", "let requests add auth headers # retry once to reset", "verify self._s = requests.Session() self._s.auth = lambda x: x #", "# Permission is hereby granted, free of charge, to any", "timeout in use for request to the server.\"\"\" return self._timeout", "adds POST to retry whitelist allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods", "def add_param_headers(self, key, value): \"\"\"Add a single parameter inside the", "\"\"\" self.headers[key] = value def del_param_headers(self, key): \"\"\"Remove a specific", "None def add_param_headers(self, key, value): \"\"\"Add a single parameter inside", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "THE SOFTWARE. try: from urllib.parse import urljoin except ImportError: from", "\"\"\" return self.headers.get(key) def clean_headers(self): \"\"\"Clear header parameters.\"\"\" self.headers =", "\"\"\" Return a specific header parameter. :param key: (str) Header", "self._s.post( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except", "value def del_param_headers(self, key): \"\"\"Remove a specific parameter. :param key:", "% e) def raw_put(self, path, data, **kwargs): \"\"\"Submit put request", "self._s.get( urljoin(self.base_url, path), params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception", "to any person obtaining a copy of # this software", "the header. :param key: (str) Header parameters key. :returns: If", "self._s.auth = lambda x: x # don't let requests add", "Exception as e: raise KeycloakConnectionError(\"Can't connect to server (%s)\" %", "to permit persons to whom the Software is furnished to", "coding: utf-8 -*- # # The MIT License (MIT) #", "protocol in (\"https://\", \"http://\"): adapter = HTTPAdapter(max_retries=1) # adds POST", "server SSL. :param proxies: (dict) The proxies servers requests is", "from urlparse import urljoin import requests from requests.adapters import HTTPAdapter", "DEALINGS IN THE SOFTWARE. try: from urllib.parse import urljoin except", "try: return self._s.get( urljoin(self.base_url, path), params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify, )", "is sent by. \"\"\" def __init__(self, base_url, headers={}, timeout=60, verify=True,", ":raises: HttpError Can't connect to server. \"\"\" try: return self._s.delete(", "AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "The proxies servers requests is sent by. \"\"\" def __init__(self,", "headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as e: raise KeycloakConnectionError(\"Can't", "Value to be added. \"\"\" self.headers[key] = value def del_param_headers(self,", ":param proxies: (dict) The proxies servers requests is sent by.", "retry once to reset connection with Keycloak after tomcat's ConnectionTimeout", "def raw_get(self, path, **kwargs): \"\"\"Submit get request to the path.", "the following conditions: # # The above copyright notice and", "the server.\"\"\" return self._headers @headers.setter def headers(self, value): \"\"\" \"\"\"", "in # the Software without restriction, including without limitation the", "raise KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def raw_post(self,", "% e) def raw_delete(self, path, data={}, **kwargs): \"\"\"Submit delete request", "from urllib.parse import urljoin except ImportError: from urlparse import urljoin", "the server.\"\"\" return self._base_url @base_url.setter def base_url(self, value): \"\"\" \"\"\"", ":raises: HttpError Can't connect to server. \"\"\" try: return self._s.post(", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "\"\"\"Check if the parameter exists in the header. :param key:", "put request to the path. :param path: (str) Path for", "HttpError Can't connect to server. \"\"\" try: return self._s.put( urljoin(self.base_url,", "parameters key. :returns: If the header parameters exist, return True.", "key. :param value: (str) Value to be added. \"\"\" self.headers[key]", "obtaining a copy of # this software and associated documentation", "header parameters exist, return its value. \"\"\" return self.headers.get(key) def", "value): \"\"\" \"\"\" self._headers = value def param_headers(self, key): \"\"\"", "any person obtaining a copy of # this software and", "the Software, and to permit persons to whom the Software", "verify(self, value): \"\"\" \"\"\" self._verify = value @property def headers(self):", "exist_param_headers(self, key): \"\"\"Check if the parameter exists in the header.", ") except Exception as e: raise KeycloakConnectionError(\"Can't connect to server", "\"\"\"Submit put request to the path. :param path: (str) Path", "tomcat's ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in (\"https://\", \"http://\"):", "and associated documentation files (the \"Software\"), to deal in #", ":param verify: (bool) Verify server SSL. :param proxies: (dict) The", "**kwargs): \"\"\"Submit delete request to the path. :param path: (str)", "The header parameters of the requests to the server. :param", "add auth headers # retry once to reset connection with", "once to reset connection with Keycloak after tomcat's ConnectionTimeout #", ":param key: (str) Header parameters key. :returns: If the header", "\"\"\" try: return self._s.post( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout,", "# The MIT License (MIT) # # Copyright (C) 2017", "to server. \"\"\" try: return self._s.get( urljoin(self.base_url, path), params=kwargs, headers=self.headers,", "HTTPAdapter from .exceptions import KeycloakConnectionError class ConnectionManager(object): \"\"\" Represents a", "header parameters of the requests to the server. :param timeout:", "% e) def raw_post(self, path, data, **kwargs): \"\"\"Submit post request", "params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as e:", "raise KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def raw_delete(self,", "= headers self._timeout = timeout self._verify = verify self._s =", "= frozenset(allowed_methods) self._s.mount(protocol, adapter) if proxies: self._s.proxies.update(proxies) def __del__(self): self._s.close()", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "\"\"\" try: return self._s.delete( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout,", "requests is sent by. \"\"\" def __init__(self, base_url, headers={}, timeout=60,", "in all # copies or substantial portions of the Software.", "this software and associated documentation files (the \"Software\"), to deal", "KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def raw_put(self, path,", "key. :returns: If the header parameters exist, return True. \"\"\"", "retry whitelist allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol,", "(str) Key of the header parameters. \"\"\" self.headers.pop(key, None) def", "e: raise KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def", "def verify(self, value): \"\"\" \"\"\" self._verify = value @property def", ":param key: (str) Header parameters key. :param value: (str) Value", "shall be included in all # copies or substantial portions", "(the \"Software\"), to deal in # the Software without restriction,", "headers={}, timeout=60, verify=True, proxies=None): self._base_url = base_url self._headers = headers", "headers self._timeout = timeout self._verify = verify self._s = requests.Session()", "@timeout.setter def timeout(self, value): \"\"\" \"\"\" self._timeout = value @property", "its value. \"\"\" return self.headers.get(key) def clean_headers(self): \"\"\"Clear header parameters.\"\"\"", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "= timeout self._verify = verify self._s = requests.Session() self._s.auth =", "request. :returns: Response the request. :raises: HttpError Can't connect to", "set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol, adapter) if proxies: self._s.proxies.update(proxies)", "connect to server. \"\"\" try: return self._s.get( urljoin(self.base_url, path), params=kwargs,", ":param path: (str) Path for request. :returns: Response the request.", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "import requests from requests.adapters import HTTPAdapter from .exceptions import KeycloakConnectionError", "timeout(self): \"\"\"Return timeout in use for request to the server.\"\"\"", "substantial portions of the Software. # # THE SOFTWARE IS", "def exist_param_headers(self, key): \"\"\"Check if the parameter exists in the", ":raises: HttpError Can't connect to server. \"\"\" try: return self._s.get(", "notice shall be included in all # copies or substantial", "HttpError Can't connect to server. \"\"\" try: return self._s.delete( urljoin(self.base_url,", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "timeout self._verify = verify self._s = requests.Session() self._s.auth = lambda", "headers(self): \"\"\"Return header request to the server.\"\"\" return self._headers @headers.setter", "value): \"\"\" \"\"\" self._timeout = value @property def verify(self): \"\"\"Return", "\"\"\"Return base url in use for requests to the server.\"\"\"", "URL. :param headers: (dict) The header parameters of the requests", ":param path: (str) Path for request. :param data: (dict) Payload", "to server (%s)\" % e) def raw_delete(self, path, data={}, **kwargs):", "verify: (bool) Verify server SSL. :param proxies: (dict) The proxies", "key. :returns: If the header parameters exist, return its value.", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "urljoin except ImportError: from urlparse import urljoin import requests from", "return its value. \"\"\" return self.headers.get(key) def clean_headers(self): \"\"\"Clear header", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "Represents a simple server connection. :param base_url: (str) The server", "in use for request to the server.\"\"\" return self._verify @verify.setter", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "self._verify @verify.setter def verify(self, value): \"\"\" \"\"\" self._verify = value", "of # this software and associated documentation files (the \"Software\"),", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "to the path. :param path: (str) Path for request. :returns:", "# # The MIT License (MIT) # # Copyright (C)", "parameter inside the header. :param key: (str) Header parameters key.", "the path. :param path: (str) Path for request. :param data:", "path, data={}, **kwargs): \"\"\"Submit delete request to the path. :param", "requests to the server.\"\"\" return self._base_url @base_url.setter def base_url(self, value):", "adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol, adapter) if proxies: self._s.proxies.update(proxies) def __del__(self):", "@property def base_url(self): \"\"\"Return base url in use for requests", "return self._s.post( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, )", "self._headers = headers self._timeout = timeout self._verify = verify self._s", "specific parameter. :param key: (str) Key of the header parameters.", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR", "as e: raise KeycloakConnectionError(\"Can't connect to server (%s)\" % e)", "server (%s)\" % e) def raw_post(self, path, data, **kwargs): \"\"\"Submit", "request to the server.\"\"\" return self._headers @headers.setter def headers(self, value):", ":param timeout: (int) Timeout to use for requests to the", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "parameter. :param key: (str) Header parameters key. :returns: If the", "copyright notice and this permission notice shall be included in", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "The server URL. :param headers: (dict) The header parameters of", "proxies=None): self._base_url = base_url self._headers = headers self._timeout = timeout", "urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception", ":raises: HttpError Can't connect to server. \"\"\" try: return self._s.put(", "Path for request. :returns: Response the request. :raises: HttpError Can't", "and to permit persons to whom the Software is furnished", "FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE", "with Keycloak after tomcat's ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol", "header. :param key: (str) Header parameters key. :returns: If the", "to deal in # the Software without restriction, including without", "associated documentation files (the \"Software\"), to deal in # the", "parameter exists in the header. :param key: (str) Header parameters", "= base_url self._headers = headers self._timeout = timeout self._verify =", "return self._timeout @timeout.setter def timeout(self, value): \"\"\" \"\"\" self._timeout =", "USE OR OTHER DEALINGS IN THE SOFTWARE. try: from urllib.parse", "__init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None): self._base_url = base_url self._headers", "to server (%s)\" % e) def raw_post(self, path, data, **kwargs):", "(\"https://\", \"http://\"): adapter = HTTPAdapter(max_retries=1) # adds POST to retry", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "verify(self): \"\"\"Return verify in use for request to the server.\"\"\"", "Payload for request. :returns: Response the request. :raises: HttpError Can't", "If the header parameters exist, return its value. \"\"\" return", "in (\"https://\", \"http://\"): adapter = HTTPAdapter(max_retries=1) # adds POST to", "restriction, including without limitation the rights to # use, copy,", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "parameters. \"\"\" self.headers.pop(key, None) def raw_get(self, path, **kwargs): \"\"\"Submit get", "proxies: (dict) The proxies servers requests is sent by. \"\"\"", "if proxies: self._s.proxies.update(proxies) def __del__(self): self._s.close() @property def base_url(self): \"\"\"Return", "the requests to the server. :param timeout: (int) Timeout to", "requests.adapters import HTTPAdapter from .exceptions import KeycloakConnectionError class ConnectionManager(object): \"\"\"", "to reset connection with Keycloak after tomcat's ConnectionTimeout # see", "HTTPAdapter(max_retries=1) # adds POST to retry whitelist allowed_methods = set(adapter.max_retries.allowed_methods)", "self._base_url @base_url.setter def base_url(self, value): \"\"\" \"\"\" self._base_url = value", "not None def add_param_headers(self, key, value): \"\"\"Add a single parameter", "2017 <NAME> <<EMAIL>> # # Permission is hereby granted, free", "header parameters exist, return True. \"\"\" return self.param_headers(key) is not", "path, data, **kwargs): \"\"\"Submit post request to the path. :param", "for request. :param data: (dict) Payload for request. :returns: Response", "requests.Session() self._s.auth = lambda x: x # don't let requests", "inside the header. :param key: (str) Header parameters key. :param", "\"\"\" \"\"\" self._base_url = value @property def timeout(self): \"\"\"Return timeout", "connection with Keycloak after tomcat's ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36 for", "connect to server (%s)\" % e) def raw_delete(self, path, data={},", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "url in use for requests to the server.\"\"\" return self._base_url", "del_param_headers(self, key): \"\"\"Remove a specific parameter. :param key: (str) Key", "@property def timeout(self): \"\"\"Return timeout in use for request to", "data: (dict) Payload for request. :returns: Response the request. :raises:", "value): \"\"\" \"\"\" self._verify = value @property def headers(self): \"\"\"Return", "OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "Verify server SSL. :param proxies: (dict) The proxies servers requests", "\"\"\" self._timeout = value @property def verify(self): \"\"\"Return verify in", "for protocol in (\"https://\", \"http://\"): adapter = HTTPAdapter(max_retries=1) # adds", "self._s.proxies.update(proxies) def __del__(self): self._s.close() @property def base_url(self): \"\"\"Return base url", "the server.\"\"\" return self._verify @verify.setter def verify(self, value): \"\"\" \"\"\"", "parameters exist, return True. \"\"\" return self.param_headers(key) is not None", "without limitation the rights to # use, copy, modify, merge,", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION", "Can't connect to server. \"\"\" try: return self._s.put( urljoin(self.base_url, path),", "# retry once to reset connection with Keycloak after tomcat's", "return self._verify @verify.setter def verify(self, value): \"\"\" \"\"\" self._verify =", "(dict) The proxies servers requests is sent by. \"\"\" def", "self._verify = value @property def headers(self): \"\"\"Return header request to", "copies of # the Software, and to permit persons to", "to server. \"\"\" try: return self._s.post( urljoin(self.base_url, path), params=kwargs, data=data,", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. try:", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. try: from", "HttpError Can't connect to server. \"\"\" try: return self._s.get( urljoin(self.base_url,", "Header parameters key. :param value: (str) Value to be added.", "__del__(self): self._s.close() @property def base_url(self): \"\"\"Return base url in use", "= {} def exist_param_headers(self, key): \"\"\"Check if the parameter exists", "timeout: (int) Timeout to use for requests to the server.", "publish, distribute, sublicense, and/or sell copies of # the Software,", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "path, **kwargs): \"\"\"Submit get request to the path. :param path:", "\"\"\" self._verify = value @property def headers(self): \"\"\"Return header request", "FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "data, **kwargs): \"\"\"Submit put request to the path. :param path:", "above copyright notice and this permission notice shall be included", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "proxies: self._s.proxies.update(proxies) def __del__(self): self._s.close() @property def base_url(self): \"\"\"Return base", "to # use, copy, modify, merge, publish, distribute, sublicense, and/or", "requests add auth headers # retry once to reset connection", "is furnished to do so, # subject to the following", "def verify(self): \"\"\"Return verify in use for request to the", "params=kwargs, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as e: raise", "# copies or substantial portions of the Software. # #", "to use for requests to the server. :param verify: (bool)", "path, data, **kwargs): \"\"\"Submit put request to the path. :param", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT", "= requests.Session() self._s.auth = lambda x: x # don't let", "copies or substantial portions of the Software. # # THE", "try: return self._s.delete( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify,", "(str) Path for request. :param data: (dict) Payload for request.", "server (%s)\" % e) def raw_delete(self, path, data={}, **kwargs): \"\"\"Submit", "parameters key. :param value: (str) Value to be added. \"\"\"", "# see https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in (\"https://\", \"http://\"): adapter =", "frozenset(allowed_methods) self._s.mount(protocol, adapter) if proxies: self._s.proxies.update(proxies) def __del__(self): self._s.close() @property", "key, value): \"\"\"Add a single parameter inside the header. :param", "urllib.parse import urljoin except ImportError: from urlparse import urljoin import", ":param headers: (dict) The header parameters of the requests to", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "data={}, **kwargs): \"\"\"Submit delete request to the path. :param path:", "header parameter. :param key: (str) Header parameters key. :returns: If", "self._s.mount(protocol, adapter) if proxies: self._s.proxies.update(proxies) def __del__(self): self._s.close() @property def", "in the header. :param key: (str) Header parameters key. :returns:", "to the following conditions: # # The above copyright notice", "the parameter exists in the header. :param key: (str) Header", "https://github.com/marcospereirampj/python-keycloak/issues/36 for protocol in (\"https://\", \"http://\"): adapter = HTTPAdapter(max_retries=1) #", "verify=True, proxies=None): self._base_url = base_url self._headers = headers self._timeout =", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "and this permission notice shall be included in all #", "class ConnectionManager(object): \"\"\" Represents a simple server connection. :param base_url:", "WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "KeycloakConnectionError(\"Can't connect to server (%s)\" % e) def raw_delete(self, path,", "parameters key. :returns: If the header parameters exist, return its", ":returns: If the header parameters exist, return its value. \"\"\"", "the header. :param key: (str) Header parameters key. :param value:", "free of charge, to any person obtaining a copy of", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR", "header parameters.\"\"\" self.headers = {} def exist_param_headers(self, key): \"\"\"Check if", "for request. :returns: Response the request. :raises: HttpError Can't connect", "SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "charge, to any person obtaining a copy of # this", "the Software is furnished to do so, # subject to", "from requests.adapters import HTTPAdapter from .exceptions import KeycloakConnectionError class ConnectionManager(object):", "import HTTPAdapter from .exceptions import KeycloakConnectionError class ConnectionManager(object): \"\"\" Represents", "Software, and to permit persons to whom the Software is", ":param base_url: (str) The server URL. :param headers: (dict) The", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "(str) Header parameters key. :returns: If the header parameters exist,", "use for requests to the server. :param verify: (bool) Verify", "auth headers # retry once to reset connection with Keycloak", "to the server. :param verify: (bool) Verify server SSL. :param", "# this software and associated documentation files (the \"Software\"), to", "to the server. :param timeout: (int) Timeout to use for", "= verify self._s = requests.Session() self._s.auth = lambda x: x", "THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE", "# use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "modify, merge, publish, distribute, sublicense, and/or sell copies of #", "to retry whitelist allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods)", "server.\"\"\" return self._headers @headers.setter def headers(self, value): \"\"\" \"\"\" self._headers", "header. :param key: (str) Header parameters key. :param value: (str)", "# the Software, and to permit persons to whom the", "EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE", "If the header parameters exist, return True. \"\"\" return self.param_headers(key)", "for request to the server.\"\"\" return self._timeout @timeout.setter def timeout(self,", "# adds POST to retry whitelist allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\")", "to the server.\"\"\" return self._verify @verify.setter def verify(self, value): \"\"\"", "urljoin import requests from requests.adapters import HTTPAdapter from .exceptions import", "adapter = HTTPAdapter(max_retries=1) # adds POST to retry whitelist allowed_methods", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "base_url, headers={}, timeout=60, verify=True, proxies=None): self._base_url = base_url self._headers =", "self.headers[key] = value def del_param_headers(self, key): \"\"\"Remove a specific parameter.", "def raw_put(self, path, data, **kwargs): \"\"\"Submit put request to the", "connect to server (%s)\" % e) def raw_post(self, path, data,", "value @property def timeout(self): \"\"\"Return timeout in use for request", "Response the request. :raises: HttpError Can't connect to server. \"\"\"", "self.headers.pop(key, None) def raw_get(self, path, **kwargs): \"\"\"Submit get request to", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "the server. :param verify: (bool) Verify server SSL. :param proxies:", "delete request to the path. :param path: (str) Path for", "None) def raw_get(self, path, **kwargs): \"\"\"Submit get request to the", "merge, publish, distribute, sublicense, and/or sell copies of # the", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "path: (str) Path for request. :returns: Response the request. :raises:", "ConnectionManager(object): \"\"\" Represents a simple server connection. :param base_url: (str)", "NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE", "granted, free of charge, to any person obtaining a copy", "whitelist allowed_methods = set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol, adapter)", "to the server.\"\"\" return self._timeout @timeout.setter def timeout(self, value): \"\"\"", "to server. \"\"\" try: return self._s.put( urljoin(self.base_url, path), params=kwargs, data=data,", "reset connection with Keycloak after tomcat's ConnectionTimeout # see https://github.com/marcospereirampj/python-keycloak/issues/36", "exists in the header. :param key: (str) Header parameters key.", "headers # retry once to reset connection with Keycloak after", "sent by. \"\"\" def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None):", "path), params=kwargs, data=data, headers=self.headers, timeout=self.timeout, verify=self.verify, ) except Exception as", "def base_url(self): \"\"\"Return base url in use for requests to", "(C) 2017 <NAME> <<EMAIL>> # # Permission is hereby granted,", "OF OR IN # CONNECTION WITH THE SOFTWARE OR THE", "# the Software without restriction, including without limitation the rights", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "e) def raw_put(self, path, data, **kwargs): \"\"\"Submit put request to", "server. \"\"\" try: return self._s.put( urljoin(self.base_url, path), params=kwargs, data=data, headers=self.headers,", "Permission is hereby granted, free of charge, to any person", "\"\"\"Clear header parameters.\"\"\" self.headers = {} def exist_param_headers(self, key): \"\"\"Check", "self._timeout = timeout self._verify = verify self._s = requests.Session() self._s.auth", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR #", "value: (str) Value to be added. \"\"\" self.headers[key] = value", "self._s.close() @property def base_url(self): \"\"\"Return base url in use for", "exist, return its value. \"\"\" return self.headers.get(key) def clean_headers(self): \"\"\"Clear", "of charge, to any person obtaining a copy of #", "return self.headers.get(key) def clean_headers(self): \"\"\"Clear header parameters.\"\"\" self.headers = {}", "Can't connect to server. \"\"\" try: return self._s.delete( urljoin(self.base_url, path),", "The above copyright notice and this permission notice shall be", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN", "the header parameters exist, return its value. \"\"\" return self.headers.get(key)", "exist, return True. \"\"\" return self.param_headers(key) is not None def", "= set(adapter.max_retries.allowed_methods) allowed_methods.add(\"POST\") adapter.max_retries.allowed_methods = frozenset(allowed_methods) self._s.mount(protocol, adapter) if proxies:", "request. :param data: (dict) Payload for request. :returns: Response the", "(dict) Payload for request. :returns: Response the request. :raises: HttpError", "sublicense, and/or sell copies of # the Software, and to", "Return a specific header parameter. :param key: (str) Header parameters", "<<EMAIL>> # # Permission is hereby granted, free of charge,", ".exceptions import KeycloakConnectionError class ConnectionManager(object): \"\"\" Represents a simple server", "KeycloakConnectionError class ConnectionManager(object): \"\"\" Represents a simple server connection. :param", "adapter) if proxies: self._s.proxies.update(proxies) def __del__(self): self._s.close() @property def base_url(self):", "# # Copyright (C) 2017 <NAME> <<EMAIL>> # # Permission", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "{} def exist_param_headers(self, key): \"\"\"Check if the parameter exists in", "server. \"\"\" try: return self._s.get( urljoin(self.base_url, path), params=kwargs, headers=self.headers, timeout=self.timeout,", "headers(self, value): \"\"\" \"\"\" self._headers = value def param_headers(self, key):", "person obtaining a copy of # this software and associated", "import KeycloakConnectionError class ConnectionManager(object): \"\"\" Represents a simple server connection.", "IN THE SOFTWARE. try: from urllib.parse import urljoin except ImportError:", "try: from urllib.parse import urljoin except ImportError: from urlparse import", "the server. :param timeout: (int) Timeout to use for requests", "verify in use for request to the server.\"\"\" return self._verify", "except Exception as e: raise KeycloakConnectionError(\"Can't connect to server (%s)\"" ]
[ "p,cc = {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1] for _", "range(8): n = p[n] a.append(str(n)) return \"\".join(a), p[1] * p[p[1]]", "a.append(str(n)) return \"\".join(a), p[1] * p[p[1]] print(\"Part 1:\", f(cs.copy(), 100)[0])", "= p[n] a.append(str(n)) return \"\".join(a), p[1] * p[p[1]] print(\"Part 1:\",", "p[dc],p[hc[-1]] = hc[0],p[dc] a,n = [],1 for _ in range(8):", "2:\", f(cs.copy() + [i for i in range(10, 1000001)], 10000000)[1])", "= [int(c) for c in open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs, ts):", "for c in open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs, ts): p,cc =", "in range(ts): cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0 else", "-= 1 if dc < 1: dc = max(p.keys()) p[dc],p[hc[-1]]", "else max(p.keys()) hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in", "100)[0]) print(\"Part 2:\", f(cs.copy() + [i for i in range(10,", "<gh_stars>1-10 #!venv/bin/python3 cs = [int(c) for c in open(\"inputs/23.in\", \"r\").readline().strip()]", "\"r\").readline().strip()] def f(cs, ts): p,cc = {n: cs[(i+1)%len(cs)] for i,n", "in open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs, ts): p,cc = {n: cs[(i+1)%len(cs)]", "[],1 for _ in range(8): n = p[n] a.append(str(n)) return", "* p[p[1]] print(\"Part 1:\", f(cs.copy(), 100)[0]) print(\"Part 2:\", f(cs.copy() +", "max(p.keys()) hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in hc:", "in range(8): n = p[n] a.append(str(n)) return \"\".join(a), p[1] *", "hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in hc: dc", "ts): p,cc = {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1] for", "c in open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs, ts): p,cc = {n:", "cs = [int(c) for c in open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs,", "while dc in hc: dc -= 1 if dc <", "p[cc]-1 > 0 else max(p.keys()) hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]]", "if dc < 1: dc = max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc]", "\"\".join(a), p[1] * p[p[1]] print(\"Part 1:\", f(cs.copy(), 100)[0]) print(\"Part 2:\",", "f(cs, ts): p,cc = {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1]", "cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys()) hc,p[cc]", "if p[cc]-1 > 0 else max(p.keys()) hc,p[cc] = [p[cc], p[p[cc]],", "cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1] for _ in range(ts): cc,dc", "= max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc] a,n = [],1 for _", "print(\"Part 2:\", f(cs.copy() + [i for i in range(10, 1000001)],", "1:\", f(cs.copy(), 100)[0]) print(\"Part 2:\", f(cs.copy() + [i for i", "a,n = [],1 for _ in range(8): n = p[n]", "hc: dc -= 1 if dc < 1: dc =", "_ in range(ts): cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0", "dc < 1: dc = max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc] a,n", "1 if dc < 1: dc = max(p.keys()) p[dc],p[hc[-1]] =", "dc in hc: dc -= 1 if dc < 1:", "p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys()) hc,p[cc] = [p[cc],", "in enumerate(cs)},cs[-1] for _ in range(ts): cc,dc = p[cc],p[cc]-1 if", "def f(cs, ts): p,cc = {n: cs[(i+1)%len(cs)] for i,n in", "i,n in enumerate(cs)},cs[-1] for _ in range(ts): cc,dc = p[cc],p[cc]-1", "for _ in range(ts): cc,dc = p[cc],p[cc]-1 if p[cc]-1 >", "1: dc = max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc] a,n = [],1", "= hc[0],p[dc] a,n = [],1 for _ in range(8): n", "> 0 else max(p.keys()) hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while", "[int(c) for c in open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs, ts): p,cc", "= [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in hc: dc -=", "n = p[n] a.append(str(n)) return \"\".join(a), p[1] * p[p[1]] print(\"Part", "hc[0],p[dc] a,n = [],1 for _ in range(8): n =", "_ in range(8): n = p[n] a.append(str(n)) return \"\".join(a), p[1]", "print(\"Part 1:\", f(cs.copy(), 100)[0]) print(\"Part 2:\", f(cs.copy() + [i for", "= p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys()) hc,p[cc] =", "p[n] a.append(str(n)) return \"\".join(a), p[1] * p[p[1]] print(\"Part 1:\", f(cs.copy(),", "return \"\".join(a), p[1] * p[p[1]] print(\"Part 1:\", f(cs.copy(), 100)[0]) print(\"Part", "enumerate(cs)},cs[-1] for _ in range(ts): cc,dc = p[cc],p[cc]-1 if p[cc]-1", "range(ts): cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys())", "dc = max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc] a,n = [],1 for", "{n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1] for _ in range(ts):", "p[1] * p[p[1]] print(\"Part 1:\", f(cs.copy(), 100)[0]) print(\"Part 2:\", f(cs.copy()", "open(\"inputs/23.in\", \"r\").readline().strip()] def f(cs, ts): p,cc = {n: cs[(i+1)%len(cs)] for", "0 else max(p.keys()) hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc", "p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in hc: dc -= 1 if dc", "#!venv/bin/python3 cs = [int(c) for c in open(\"inputs/23.in\", \"r\").readline().strip()] def", "for _ in range(8): n = p[n] a.append(str(n)) return \"\".join(a),", "for i,n in enumerate(cs)},cs[-1] for _ in range(ts): cc,dc =", "= {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1] for _ in", "p[p[1]] print(\"Part 1:\", f(cs.copy(), 100)[0]) print(\"Part 2:\", f(cs.copy() + [i", "p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in hc: dc -= 1 if", "f(cs.copy(), 100)[0]) print(\"Part 2:\", f(cs.copy() + [i for i in", "= [],1 for _ in range(8): n = p[n] a.append(str(n))", "< 1: dc = max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc] a,n =", "[p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]] while dc in hc: dc -= 1", "dc -= 1 if dc < 1: dc = max(p.keys())", "max(p.keys()) p[dc],p[hc[-1]] = hc[0],p[dc] a,n = [],1 for _ in", "in hc: dc -= 1 if dc < 1: dc" ]
[ "# Run the application in debug mode app.run(host='0.0.0.0', port=int(app.config['PORT']), debug=True)", "device_registry import app # Run the application in debug mode", "Import the application from device_registry import app # Run the", "# Import the application from device_registry import app # Run", "application from device_registry import app # Run the application in", "import app # Run the application in debug mode app.run(host='0.0.0.0',", "the application from device_registry import app # Run the application", "app # Run the application in debug mode app.run(host='0.0.0.0', port=int(app.config['PORT']),", "<gh_stars>10-100 # Import the application from device_registry import app #", "from device_registry import app # Run the application in debug" ]
[ "def load_stage_file(path): with open(path, \"r\", encoding=\"utf-8\") as fd: return parse_stage(fd.read(),", "\"r\", encoding=\"utf-8\") as fd: return parse_stage(fd.read(), path) def parse_stage(text, path):", "{} except yaml.error.YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text,", "import CSafeLoader as SafeLoader except ImportError: from yaml import SafeLoader", "\"w\", encoding=\"utf-8\") as fd: yaml = YAML() yaml.default_flow_style = False", "attributes to store comments and line breaks. This allows us", "This allows us to preserve all of those upon dump.", "special attributes to store comments and line breaks. This allows", "ruamel.yaml import YAML from ruamel.yaml.error import YAMLError try: from yaml", "try: return yaml.load(text, Loader=SafeLoader) or {} except yaml.error.YAMLError as exc:", "YAML from ruamel.yaml.error import YAMLError try: from yaml import CSafeLoader", "\"\"\" try: yaml = YAML() return yaml.load(text) or {} except", "def dump_stage_file(path, data): with open(path, \"w\", encoding=\"utf-8\") as fd: yaml", "simple `parse_stage()`. \"\"\" try: yaml = YAML() return yaml.load(text) or", "parse_stage(fd.read(), path) def parse_stage(text, path): try: return yaml.load(text, Loader=SafeLoader) or", "yaml.load(text) or {} except YAMLError as exc: raise StageFileCorruptedError(path, cause=exc)", "or {} except YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def", "from dvc.exceptions import StageFileCorruptedError from dvc.utils.compat import open def load_stage_file(path):", "dump. This one is, however, several times slower than simple", "path): \"\"\"Parses text into Python structure. Unlike `parse_stage()` this returns", "and line breaks. This allows us to preserve all of", "def parse_stage(text, path): try: return yaml.load(text, Loader=SafeLoader) or {} except", "CSafeLoader as SafeLoader except ImportError: from yaml import SafeLoader from", "path) def parse_stage(text, path): try: return yaml.load(text, Loader=SafeLoader) or {}", "as fd: yaml = YAML() yaml.default_flow_style = False yaml.dump(data, fd)", "have special attributes to store comments and line breaks. This", "open(path, \"r\", encoding=\"utf-8\") as fd: return parse_stage(fd.read(), path) def parse_stage(text,", "dvc.exceptions import StageFileCorruptedError from dvc.utils.compat import open def load_stage_file(path): with", "text into Python structure. Unlike `parse_stage()` this returns ordered dicts,", "into Python structure. Unlike `parse_stage()` this returns ordered dicts, values", "path): try: return yaml.load(text, Loader=SafeLoader) or {} except yaml.error.YAMLError as", "dicts, values have special attributes to store comments and line", "from dvc.utils.compat import open def load_stage_file(path): with open(path, \"r\", encoding=\"utf-8\")", "values have special attributes to store comments and line breaks.", "parse_stage_for_update(text, path): \"\"\"Parses text into Python structure. Unlike `parse_stage()` this", "allows us to preserve all of those upon dump. This", "SafeLoader from dvc.exceptions import StageFileCorruptedError from dvc.utils.compat import open def", "except ImportError: from yaml import SafeLoader from dvc.exceptions import StageFileCorruptedError", "fd: return parse_stage(fd.read(), path) def parse_stage(text, path): try: return yaml.load(text,", "to store comments and line breaks. This allows us to", "yaml.error.YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text, path): \"\"\"Parses", "yaml from ruamel.yaml import YAML from ruamel.yaml.error import YAMLError try:", "StageFileCorruptedError from dvc.utils.compat import open def load_stage_file(path): with open(path, \"r\",", "open(path, \"w\", encoding=\"utf-8\") as fd: yaml = YAML() yaml.default_flow_style =", "line breaks. This allows us to preserve all of those", "as exc: raise StageFileCorruptedError(path, cause=exc) def dump_stage_file(path, data): with open(path,", "slower than simple `parse_stage()`. \"\"\" try: yaml = YAML() return", "This one is, however, several times slower than simple `parse_stage()`.", "raise StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text, path): \"\"\"Parses text into Python", "= YAML() return yaml.load(text) or {} except YAMLError as exc:", "store comments and line breaks. This allows us to preserve", "Loader=SafeLoader) or {} except yaml.error.YAMLError as exc: raise StageFileCorruptedError(path, cause=exc)", "structure. Unlike `parse_stage()` this returns ordered dicts, values have special", "import YAMLError try: from yaml import CSafeLoader as SafeLoader except", "from yaml import CSafeLoader as SafeLoader except ImportError: from yaml", "of those upon dump. This one is, however, several times", "\"\"\"Parses text into Python structure. Unlike `parse_stage()` this returns ordered", "data): with open(path, \"w\", encoding=\"utf-8\") as fd: yaml = YAML()", "Unlike `parse_stage()` this returns ordered dicts, values have special attributes", "as fd: return parse_stage(fd.read(), path) def parse_stage(text, path): try: return", "cause=exc) def dump_stage_file(path, data): with open(path, \"w\", encoding=\"utf-8\") as fd:", "try: yaml = YAML() return yaml.load(text) or {} except YAMLError", "raise StageFileCorruptedError(path, cause=exc) def dump_stage_file(path, data): with open(path, \"w\", encoding=\"utf-8\")", "load_stage_file(path): with open(path, \"r\", encoding=\"utf-8\") as fd: return parse_stage(fd.read(), path)", "as SafeLoader except ImportError: from yaml import SafeLoader from dvc.exceptions", "than simple `parse_stage()`. \"\"\" try: yaml = YAML() return yaml.load(text)", "from yaml import SafeLoader from dvc.exceptions import StageFileCorruptedError from dvc.utils.compat", "however, several times slower than simple `parse_stage()`. \"\"\" try: yaml", "one is, however, several times slower than simple `parse_stage()`. \"\"\"", "times slower than simple `parse_stage()`. \"\"\" try: yaml = YAML()", "YAMLError try: from yaml import CSafeLoader as SafeLoader except ImportError:", "or {} except yaml.error.YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def", "import StageFileCorruptedError from dvc.utils.compat import open def load_stage_file(path): with open(path,", "several times slower than simple `parse_stage()`. \"\"\" try: yaml =", "yaml.load(text, Loader=SafeLoader) or {} except yaml.error.YAMLError as exc: raise StageFileCorruptedError(path,", "returns ordered dicts, values have special attributes to store comments", "except yaml.error.YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text, path):", "ImportError: from yaml import SafeLoader from dvc.exceptions import StageFileCorruptedError from", "breaks. This allows us to preserve all of those upon", "YAML() return yaml.load(text) or {} except YAMLError as exc: raise", "except YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def dump_stage_file(path, data):", "is, however, several times slower than simple `parse_stage()`. \"\"\" try:", "with open(path, \"w\", encoding=\"utf-8\") as fd: yaml = YAML() yaml.default_flow_style", "yaml import SafeLoader from dvc.exceptions import StageFileCorruptedError from dvc.utils.compat import", "upon dump. This one is, however, several times slower than", "dump_stage_file(path, data): with open(path, \"w\", encoding=\"utf-8\") as fd: yaml =", "comments and line breaks. This allows us to preserve all", "exc: raise StageFileCorruptedError(path, cause=exc) def dump_stage_file(path, data): with open(path, \"w\",", "encoding=\"utf-8\") as fd: yaml = YAML() yaml.default_flow_style = False yaml.dump(data,", "those upon dump. This one is, however, several times slower", "return yaml.load(text) or {} except YAMLError as exc: raise StageFileCorruptedError(path,", "to preserve all of those upon dump. This one is,", "`parse_stage()` this returns ordered dicts, values have special attributes to", "import SafeLoader from dvc.exceptions import StageFileCorruptedError from dvc.utils.compat import open", "open def load_stage_file(path): with open(path, \"r\", encoding=\"utf-8\") as fd: return", "from ruamel.yaml.error import YAMLError try: from yaml import CSafeLoader as", "def parse_stage_for_update(text, path): \"\"\"Parses text into Python structure. Unlike `parse_stage()`", "StageFileCorruptedError(path, cause=exc) def dump_stage_file(path, data): with open(path, \"w\", encoding=\"utf-8\") as", "this returns ordered dicts, values have special attributes to store", "us to preserve all of those upon dump. This one", "YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def dump_stage_file(path, data): with", "import open def load_stage_file(path): with open(path, \"r\", encoding=\"utf-8\") as fd:", "parse_stage(text, path): try: return yaml.load(text, Loader=SafeLoader) or {} except yaml.error.YAMLError", "as exc: raise StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text, path): \"\"\"Parses text", "dvc.utils.compat import open def load_stage_file(path): with open(path, \"r\", encoding=\"utf-8\") as", "StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text, path): \"\"\"Parses text into Python structure.", "preserve all of those upon dump. This one is, however,", "import yaml from ruamel.yaml import YAML from ruamel.yaml.error import YAMLError", "ruamel.yaml.error import YAMLError try: from yaml import CSafeLoader as SafeLoader", "Python structure. Unlike `parse_stage()` this returns ordered dicts, values have", "ordered dicts, values have special attributes to store comments and", "cause=exc) def parse_stage_for_update(text, path): \"\"\"Parses text into Python structure. Unlike", "encoding=\"utf-8\") as fd: return parse_stage(fd.read(), path) def parse_stage(text, path): try:", "try: from yaml import CSafeLoader as SafeLoader except ImportError: from", "all of those upon dump. This one is, however, several", "return parse_stage(fd.read(), path) def parse_stage(text, path): try: return yaml.load(text, Loader=SafeLoader)", "from ruamel.yaml import YAML from ruamel.yaml.error import YAMLError try: from", "return yaml.load(text, Loader=SafeLoader) or {} except yaml.error.YAMLError as exc: raise", "with open(path, \"r\", encoding=\"utf-8\") as fd: return parse_stage(fd.read(), path) def", "import YAML from ruamel.yaml.error import YAMLError try: from yaml import", "{} except YAMLError as exc: raise StageFileCorruptedError(path, cause=exc) def dump_stage_file(path,", "`parse_stage()`. \"\"\" try: yaml = YAML() return yaml.load(text) or {}", "yaml = YAML() return yaml.load(text) or {} except YAMLError as", "SafeLoader except ImportError: from yaml import SafeLoader from dvc.exceptions import", "yaml import CSafeLoader as SafeLoader except ImportError: from yaml import", "exc: raise StageFileCorruptedError(path, cause=exc) def parse_stage_for_update(text, path): \"\"\"Parses text into" ]
[ "entrada = input(\"palabra\") listaDeLetras = [] for i in entrada:", "= input(\"palabra\") listaDeLetras = [] for i in entrada: listaDeLetras.append(i)" ]
[ "from sklearn.model_selection import train_test_split np.random.seed(0) data = load_breast_cancer() X_train, X_test,", "data = load_breast_cancer() X_train, X_test, y_train, y_test = train_test_split( data.data,", "= NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100, 100, activation='logistic') nn.add_layer(100, 1", "from pyad.nn import NeuralNet from sklearn.datasets import load_breast_cancer from sklearn.model_selection", "nn.train( X_train, y_train, X_test, y_test, batch_size=1, learning_rate=1e-3, epochs=20 ) print('Predictions:',", "100, activation='linear') nn.add_layer(100, 100, activation='logistic') nn.add_layer(100, 1 + np.max(y_train), activation='linear')", "nn.add_layer(100, 100, activation='logistic') nn.add_layer(100, 1 + np.max(y_train), activation='linear') nn.train( X_train,", "nn.add_layer(100, 1 + np.max(y_train), activation='linear') nn.train( X_train, y_train, X_test, y_test,", "y_test = train_test_split( data.data, data.target, train_size=0.8, random_state=0 ) nn =", "activation='linear') nn.train( X_train, y_train, X_test, y_test, batch_size=1, learning_rate=1e-3, epochs=20 )", "activation='logistic') nn.add_layer(100, 1 + np.max(y_train), activation='linear') nn.train( X_train, y_train, X_test,", "import numpy as np from pyad.nn import NeuralNet from sklearn.datasets", "sklearn.datasets import load_breast_cancer from sklearn.model_selection import train_test_split np.random.seed(0) data =", "= load_breast_cancer() X_train, X_test, y_train, y_test = train_test_split( data.data, data.target,", "train_test_split( data.data, data.target, train_size=0.8, random_state=0 ) nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1],", "nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100, 100, activation='logistic') nn.add_layer(100,", "train_test_split np.random.seed(0) data = load_breast_cancer() X_train, X_test, y_train, y_test =", "100, activation='logistic') nn.add_layer(100, 1 + np.max(y_train), activation='linear') nn.train( X_train, y_train,", "np.random.seed(0) data = load_breast_cancer() X_train, X_test, y_train, y_test = train_test_split(", "numpy as np from pyad.nn import NeuralNet from sklearn.datasets import", "import load_breast_cancer from sklearn.model_selection import train_test_split np.random.seed(0) data = load_breast_cancer()", "activation='linear') nn.add_layer(100, 100, activation='logistic') nn.add_layer(100, 1 + np.max(y_train), activation='linear') nn.train(", "as np from pyad.nn import NeuralNet from sklearn.datasets import load_breast_cancer", "nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100, 100, activation='logistic') nn.add_layer(100, 1 + np.max(y_train),", "np from pyad.nn import NeuralNet from sklearn.datasets import load_breast_cancer from", ") nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100, 100, activation='logistic')", "train_size=0.8, random_state=0 ) nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100,", "load_breast_cancer() X_train, X_test, y_train, y_test = train_test_split( data.data, data.target, train_size=0.8,", "random_state=0 ) nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100, 100,", "load_breast_cancer from sklearn.model_selection import train_test_split np.random.seed(0) data = load_breast_cancer() X_train,", "data.target, train_size=0.8, random_state=0 ) nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear')", "X_test, y_train, y_test = train_test_split( data.data, data.target, train_size=0.8, random_state=0 )", "X_train, X_test, y_train, y_test = train_test_split( data.data, data.target, train_size=0.8, random_state=0", "pyad.nn import NeuralNet from sklearn.datasets import load_breast_cancer from sklearn.model_selection import", "NeuralNet from sklearn.datasets import load_breast_cancer from sklearn.model_selection import train_test_split np.random.seed(0)", "= train_test_split( data.data, data.target, train_size=0.8, random_state=0 ) nn = NeuralNet(loss_fn='cross_entropy')", "NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100, activation='linear') nn.add_layer(100, 100, activation='logistic') nn.add_layer(100, 1 +", "+ np.max(y_train), activation='linear') nn.train( X_train, y_train, X_test, y_test, batch_size=1, learning_rate=1e-3,", "1 + np.max(y_train), activation='linear') nn.train( X_train, y_train, X_test, y_test, batch_size=1,", "import train_test_split np.random.seed(0) data = load_breast_cancer() X_train, X_test, y_train, y_test", "np.max(y_train), activation='linear') nn.train( X_train, y_train, X_test, y_test, batch_size=1, learning_rate=1e-3, epochs=20", "from sklearn.datasets import load_breast_cancer from sklearn.model_selection import train_test_split np.random.seed(0) data", "X_train, y_train, X_test, y_test, batch_size=1, learning_rate=1e-3, epochs=20 ) print('Predictions:', nn.predict(X_test))", "sklearn.model_selection import train_test_split np.random.seed(0) data = load_breast_cancer() X_train, X_test, y_train,", "data.data, data.target, train_size=0.8, random_state=0 ) nn = NeuralNet(loss_fn='cross_entropy') nn.add_layer(X_train.shape[1], 100,", "import NeuralNet from sklearn.datasets import load_breast_cancer from sklearn.model_selection import train_test_split", "y_train, y_test = train_test_split( data.data, data.target, train_size=0.8, random_state=0 ) nn" ]
[ "import * I2CADR = 0x0E DIE_TEMP = 0x0F while True:", "0x0E DIE_TEMP = 0x0F while True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d =", "DIE_TEMP = 0x0F while True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d = i2c.read(I2CADR,", "i2c.read(I2CADR, 1) x = d[0] if x >=128: x -=", "microbit import * I2CADR = 0x0E DIE_TEMP = 0x0F while", "if x >=128: x -= 256 x += 10 print(x)", "* I2CADR = 0x0E DIE_TEMP = 0x0F while True: i2c.write(I2CADR,", "x >=128: x -= 256 x += 10 print(x) sleep(500)", "0x0F while True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d = i2c.read(I2CADR, 1) x", "from microbit import * I2CADR = 0x0E DIE_TEMP = 0x0F", "= i2c.read(I2CADR, 1) x = d[0] if x >=128: x", "= 0x0F while True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d = i2c.read(I2CADR, 1)", "while True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d = i2c.read(I2CADR, 1) x =", "I2CADR = 0x0E DIE_TEMP = 0x0F while True: i2c.write(I2CADR, bytearray([DIE_TEMP]))", "True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d = i2c.read(I2CADR, 1) x = d[0]", "x = d[0] if x >=128: x -= 256 x", "1) x = d[0] if x >=128: x -= 256", "i2c.write(I2CADR, bytearray([DIE_TEMP])) d = i2c.read(I2CADR, 1) x = d[0] if", "= d[0] if x >=128: x -= 256 x +=", "= 0x0E DIE_TEMP = 0x0F while True: i2c.write(I2CADR, bytearray([DIE_TEMP])) d", "d[0] if x >=128: x -= 256 x += 10", "bytearray([DIE_TEMP])) d = i2c.read(I2CADR, 1) x = d[0] if x", "d = i2c.read(I2CADR, 1) x = d[0] if x >=128:" ]