id stringlengths 2 8 | text stringlengths 16 264k | dataset_id stringclasses 1 value |
|---|---|---|
6683007 | <filename>web/app/models.py<gh_stars>1-10
from datetime import datetime
from app import db
class Users(db.Model):
user = db.Column(db.Integer, primary_key=True)
birth_date = db.Column(db.DateTime, nullable=True)
first_name = db.Column(db.String(25), nullable=False)
last_name = db.Column(db.String(25), nullable=False)
username = db.Column(db.String(16), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
gender = db.Column(db.String(1), nullable=False)
def __repr__(self):
return '<User %r>' % self.username
| StarcoderdataPython |
9698658 | """
CryptoAPIs
Crypto APIs 2.0 is a complex and innovative infrastructure layer that radically simplifies the development of any Blockchain and Crypto related applications. Organized around REST, Crypto APIs 2.0 can assist both novice Bitcoin/Ethereum enthusiasts and crypto experts with the development of their blockchain applications. Crypto APIs 2.0 provides unified endpoints and data, raw data, automatic tokens and coins forwardings, callback functionalities, and much more. # noqa: E501
The version of the OpenAPI document: 2.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import cryptoapis
from cryptoapis.model.get_transaction_details_by_transaction_id_response_item_blockchain_specific import GetTransactionDetailsByTransactionIDResponseItemBlockchainSpecific
from cryptoapis.model.get_transaction_details_by_transaction_id_response_item_fee import GetTransactionDetailsByTransactionIDResponseItemFee
from cryptoapis.model.get_transaction_details_by_transaction_id_response_item_recipients import GetTransactionDetailsByTransactionIDResponseItemRecipients
from cryptoapis.model.get_transaction_details_by_transaction_id_response_item_senders import GetTransactionDetailsByTransactionIDResponseItemSenders
globals()['GetTransactionDetailsByTransactionIDResponseItemBlockchainSpecific'] = GetTransactionDetailsByTransactionIDResponseItemBlockchainSpecific
globals()['GetTransactionDetailsByTransactionIDResponseItemFee'] = GetTransactionDetailsByTransactionIDResponseItemFee
globals()['GetTransactionDetailsByTransactionIDResponseItemRecipients'] = GetTransactionDetailsByTransactionIDResponseItemRecipients
globals()['GetTransactionDetailsByTransactionIDResponseItemSenders'] = GetTransactionDetailsByTransactionIDResponseItemSenders
from cryptoapis.model.get_transaction_details_by_transaction_id_response_item import GetTransactionDetailsByTransactionIDResponseItem
class TestGetTransactionDetailsByTransactionIDResponseItem(unittest.TestCase):
"""GetTransactionDetailsByTransactionIDResponseItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetTransactionDetailsByTransactionIDResponseItem(self):
"""Test GetTransactionDetailsByTransactionIDResponseItem"""
# FIXME: construct object with mandatory attributes with example values
# model = GetTransactionDetailsByTransactionIDResponseItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
9699633 | <reponame>Mr-Umidjon/Variables_and_types
# a o'zgaruvchiga 5 ni taminlang;
# b o'zgaruvchiga 4 ni taminlang;
# c o'zgaruvchiga 8 ni taminlang;
# Quyidagi ifodani natijaga chop eting: ab / c
a = 5
b = 4
c = 8
print(a * b / c)
| StarcoderdataPython |
4828821 | <gh_stars>1000+
X = "OK"
def test():
X = 4
print(X)
test()
print X
| StarcoderdataPython |
3201394 | """
FILTERED ELEMENT COLLECTOR
"""
__author__ = '<NAME> - <EMAIL>'
__twitter__ = '@solamour'
__version__ = '1.0.0'
# Importing Reference Modules
import clr # CLR ( Common Language Runtime Module )
clr.AddReference("RevitServices") # Adding the RevitServices.dll special Dynamo module to deal
# with Revit
import RevitServices # Importing RevitServices
from RevitServices.Persistence import DocumentManager # From RevitServices import the Document Manager
clr.AddReference("RevitAPI") # Adding the RevitAPI.dll module to access the Revit API
import Autodesk # Here we import the Autodesk namespace
# From the Autodesk namespace - derived down to the Revit Database, we import only the Filtered
# Element Collector and BuiltInCategory classes
from Autodesk.Revit.DB import FilteredElementCollector
# Here we give the Revit Document a nickname of 'doc' which allows us to simply call 'doc' later
# without having to type the long namespace name
doc = DocumentManager.Instance.CurrentDBDocument
# To create a Filtered Element Collector, we simply type the PINK part of RevitAPIDocs
# ( FilteredElementCollector ), wrap it inside of Parenthesis and then call the ORANGE part
# of RevitAPIDocs ( Document, View ). We are running multiple filters here: The 'OfClass',
# 'OfCategory' and 'Where Element Is Not Element Type'. We then cast it to Elements so we can use
# it in Dynamo.
elementCollector = FilteredElementCollector( doc ).WhereElementIsNotElementType().ToElements()
# To get our results back inside of Dynamo, we need to append a list to the OUT port
OUT = elementCollector
| StarcoderdataPython |
112875 | # sinh() function
import numpy as np
import math
in_array = [0, math.pi / 2, np.pi / 3, np.pi]
print ("Input array : \n", in_array)
Sinh_Values = np.sinh(in_array)
print ("\nSine Hyperbolic values : \n", Sinh_Values) | StarcoderdataPython |
5026086 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Visualization routines using matplotlib
"""
import copy
import logging
import numpy as np
from astropy import units as u
from matplotlib import pyplot as plt
from matplotlib.collections import PatchCollection
from matplotlib.colors import Normalize, LogNorm, SymLogNorm
from matplotlib.patches import Ellipse, RegularPolygon, Rectangle
from numpy import sqrt
__all__ = ['CameraDisplay']
logger = logging.getLogger(__name__)
PIXEL_EPSILON = 0.0005 # a bit of extra size to pixels to avoid aliasing
def polar_to_cart(rho, phi):
""""returns r, theta(degrees)"""
x = rho * np.cos(phi)
y = rho * np.sin(phi)
return x, y
class CameraDisplay:
"""
Camera Display using matplotlib.
Parameters
----------
geometry : `~ctapipe.instrument.CameraGeometry`
Definition of the Camera/Image
image: array_like
array of values corresponding to the pixels in the CameraGeometry.
ax : `matplotlib.axes.Axes`
A matplotlib axes object to plot on, or None to create a new one
title : str (default "Camera")
Title to put on camera plot
norm : str or `matplotlib.color.Normalize` instance (default 'lin')
Normalization for the color scale.
Supported str arguments are
- 'lin': linear scale
- 'log': logarithmic scale (base 10)
cmap : str or `matplotlib.colors.Colormap` (default 'hot')
Color map to use (see `matplotlib.cm`)
allow_pick : bool (default False)
if True, allow user to click and select a pixel
autoupdate : bool (default True)
redraw automatically (otherwise need to call plt.draw())
autoscale : bool (default True)
rescale the vmin/vmax values when the image changes.
This is set to False if `set_limits_*` is called to explicity
set data limits.
Notes
-----
Speed:
CameraDisplay is not intended to be very fast (matplotlib
is not a very speed performant graphics library, it is
intended for nice output plots). However, most of the
slowness of CameraDisplay is in the constructor. Once one is
displayed, changing the image that is displayed is relatively
fast and efficient. Therefore it is best to initialize an
instance, and change the data, rather than generating new
CameraDisplays.
Pixel Implementation:
Pixels are rendered as a
`matplotlib.collections.PatchCollection` of Polygons (either 6
or 4 sided). You can access the PatchCollection directly (to
e.g. change low-level style parameters) via
`CameraDisplay.pixels`
Output:
Since CameraDisplay uses matplotlib, any display can be
saved to any output file supported via
plt.savefig(filename). This includes `.pdf` and `.png`.
"""
def __init__(
self,
geometry,
image=None,
ax=None,
title=None,
norm="lin",
cmap=None,
allow_pick=False,
autoupdate=True,
autoscale=True
):
self.axes = ax if ax is not None else plt.gca()
self.geom = geometry
self.pixels = None
self.colorbar = None
self.autoupdate = autoupdate
self.autoscale = autoscale
self._active_pixel = None
self._active_pixel_label = None
self._axes_overlays = []
if title is None:
title = geometry.cam_id
# initialize the plot and generate the pixels as a
# RegularPolyCollection
patches = []
if not hasattr(self.geom, "mask"):
self.geom.mask = np.ones_like(self.geom.pix_x.value, dtype=bool)
for xx, yy, aa in zip(
u.Quantity(self.geom.pix_x[self.geom.mask]).value,
u.Quantity(self.geom.pix_y[self.geom.mask]).value,
u.Quantity(np.array(self.geom.pix_area)[self.geom.mask]).value):
if self.geom.pix_type.startswith("hex"):
rr = sqrt(aa * 2 / 3 / sqrt(3)) + 2 * PIXEL_EPSILON
poly = RegularPolygon(
(xx, yy), 6, radius=rr,
orientation=self.geom.pix_rotation.rad,
fill=True,
)
else:
rr = sqrt(aa) + PIXEL_EPSILON
poly = Rectangle(
(xx - rr / 2., yy - rr / 2.),
width=rr,
height=rr,
angle=self.geom.pix_rotation.deg,
fill=True,
)
patches.append(poly)
self.pixels = PatchCollection(patches, cmap=cmap, linewidth=0)
self.axes.add_collection(self.pixels)
self.pixel_highlighting = copy.copy(self.pixels)
self.pixel_highlighting.set_facecolor('none')
self.pixel_highlighting.set_linewidth(0)
self.axes.add_collection(self.pixel_highlighting)
# Set up some nice plot defaults
self.axes.set_aspect('equal', 'datalim')
self.axes.set_title(title)
self.axes.set_xlabel("X position ({})".format(self.geom.pix_x.unit))
self.axes.set_ylabel("Y position ({})".format(self.geom.pix_y.unit))
self.axes.autoscale_view()
# set up a patch to display when a pixel is clicked (and
# pixel_picker is enabled):
self._active_pixel = copy.copy(patches[0])
self._active_pixel.set_facecolor('r')
self._active_pixel.set_alpha(0.5)
self._active_pixel.set_linewidth(2.0)
self._active_pixel.set_visible(False)
self.axes.add_patch(self._active_pixel)
self._active_pixel_label = self.axes.text(self._active_pixel.xy[0],
self._active_pixel.xy[1],
"0",
horizontalalignment='center',
verticalalignment='center')
self._active_pixel_label.set_visible(False)
# enable ability to click on pixel and do something (can be
# enabled on-the-fly later as well:
if allow_pick:
self.enable_pixel_picker()
if image is not None:
self.image = image
else:
self.image = np.zeros_like(self.geom.pix_id, dtype=np.float)
self.norm = norm
def highlight_pixels(self, pixels, color='g', linewidth=1, alpha=0.75):
"""
Highlight the given pixels with a colored line around them
Parameters
----------
pixels : index-like
The pixels to highlight.
Can either be a list or array of integers or a
boolean mask of length number of pixels
color: a matplotlib conform color
the color for the pixel highlighting
linewidth: float
linewidth of the highlighting in points
alpha: 0 <= alpha <= 1
The transparency
"""
l = np.zeros_like(self.image)
l[pixels] = linewidth
self.pixel_highlighting.set_linewidth(l)
self.pixel_highlighting.set_alpha(alpha)
self.pixel_highlighting.set_edgecolor(color)
self._update()
def enable_pixel_picker(self):
""" enable ability to click on pixels """
self.pixels.set_picker(True) # enable click
self.pixels.set_pickradius(sqrt(u.Quantity(self.geom.pix_area[0])
.value) / np.pi)
self.pixels.set_snap(True) # snap cursor to pixel center
self.axes.figure.canvas.mpl_connect('pick_event', self._on_pick)
def set_limits_minmax(self, zmin, zmax):
""" set the color scale limits from min to max """
self.pixels.set_clim(zmin, zmax)
self.autoscale = False
self._update()
def set_limits_percent(self, percent=95):
""" auto-scale the color range to percent of maximum """
zmin = self.pixels.get_array().min()
zmax = self.pixels.get_array().max()
dz = zmax - zmin
frac = percent / 100.0
self.autoscale = False
self.set_limits_minmax(zmin, zmax - (1.0 - frac) * dz)
@property
def norm(self):
"""
The norm instance of the Display
Possible values:
- "lin": linear scale
- "log": log scale (cannot have negative values)
- "symlog": symmetric log scale (negative values are ok)
- any matplotlib.colors.Normalize instance, e. g. PowerNorm(gamma=-2)
"""
return self.pixels.norm
@norm.setter
def norm(self, norm):
if norm == 'lin':
self.pixels.norm = Normalize()
elif norm == 'log':
self.pixels.norm = LogNorm()
self.pixels.autoscale() # this is to handle matplotlib bug #5424
elif norm == 'symlog':
self.pixels.norm = SymLogNorm(linthresh=1.0)
self.pixels.autoscale()
elif isinstance(norm, Normalize):
self.pixels.norm = norm
else:
raise ValueError("Unsupported norm: '{}', options are 'lin',"
"'log','symlog', or a matplotlib Normalize object"
.format(norm))
self.update(force=True)
self.pixels.autoscale()
@property
def cmap(self):
"""
Color map to use. Either a name or `matplotlib.colors.ColorMap`
instance, e.g. from `matplotlib.pyplot.cm`
"""
return self.pixels.get_cmap()
@cmap.setter
def cmap(self, cmap):
self.pixels.set_cmap(cmap)
self._update()
@property
def image(self):
"""The image displayed on the camera (1D array of pixel values)"""
return self.pixels.get_array()
@image.setter
def image(self, image):
"""
Change the image displayed on the Camera.
Parameters
----------
image: array_like
array of values corresponding to the pixels in the CameraGeometry.
"""
image = np.asanyarray(image)
if image.shape != self.geom.pix_x.shape:
raise ValueError(
"Image has a different shape {} than the "
"given CameraGeometry {}"
.format(image.shape, self.geom.pix_x.shape)
)
self.pixels.set_array(image[self.geom.mask])
self.pixels.changed()
if self.autoscale:
self.pixels.autoscale()
self._update()
def _update(self, force=False):
""" signal a redraw if autoupdate is turned on """
if self.autoupdate:
self.update(force)
def update(self, force=False):
""" redraw the display now """
self.axes.figure.canvas.draw()
if self.colorbar is not None:
if force is True:
self.colorbar.update_bruteforce(self.pixels)
else:
self.colorbar.update_normal(self.pixels)
self.colorbar.draw_all()
def add_colorbar(self, **kwargs):
"""
add a colorbar to the camera plot
kwargs are passed to `figure.colorbar(self.pixels, **kwargs)`
See matplotlib documentation for the supported kwargs:
http://matplotlib.org/api/figure_api.html#matplotlib.figure.Figure.colorbar
"""
if self.colorbar is not None:
raise ValueError(
'There is already a colorbar attached to this CameraDisplay'
)
else:
self.colorbar = self.axes.figure.colorbar(self.pixels, **kwargs)
self.update()
def add_ellipse(self, centroid, length, width, angle, asymmetry=0.0,
**kwargs):
"""
plot an ellipse on top of the camera
Parameters
----------
centroid: (float, float)
position of centroid
length: float
major axis
width: float
minor axis
angle: float
rotation angle wrt x-axis about the centroid, anticlockwise, in radians
asymmetry: float
3rd-order moment for directionality if known
kwargs:
any MatPlotLib style arguments to pass to the Ellipse patch
"""
ellipse = Ellipse(xy=centroid, width=length, height=width,
angle=np.degrees(angle), fill=False, **kwargs)
self.axes.add_patch(ellipse)
self.update()
return ellipse
def overlay_moments(self, hillas_parameters, with_label=True, keep_old=False,
**kwargs):
"""helper to overlay ellipse from a `HillasParametersContainer` structure
Parameters
----------
hillas_parameters: `HillasParametersContainer`
structuring containing Hillas-style parameterization
with_label: bool
If True, show coordinates of centroid and width and length
keep_old: bool
If True, to not remove old overlays
kwargs: key=value
any style keywords to pass to matplotlib (e.g. color='red'
or linewidth=6)
"""
if not keep_old:
self.clear_overlays()
# strip off any units
cen_x = u.Quantity(hillas_parameters.x).value
cen_y = u.Quantity(hillas_parameters.y).value
length = u.Quantity(hillas_parameters.length).value
width = u.Quantity(hillas_parameters.width).value
el = self.add_ellipse(
centroid=(cen_x, cen_y),
length=length * 2,
width=width * 2,
angle=hillas_parameters.psi.rad,
**kwargs
)
self._axes_overlays.append(el)
if with_label:
text = self.axes.text(
cen_x,
cen_y,
"({:.02f},{:.02f})\n[w={:.02f},l={:.02f}]".format(
hillas_parameters.x,
hillas_parameters.y,
hillas_parameters.width,
hillas_parameters.length,
),
color=el.get_edgecolor()
)
self._axes_overlays.append(text)
def clear_overlays(self):
""" Remove added overlays from the axes """
while self._axes_overlays:
overlay = self._axes_overlays.pop()
overlay.remove()
def _on_pick(self, event):
""" handler for when a pixel is clicked """
pix_id = event.ind[-1]
xx, yy, aa = u.Quantity(self.geom.pix_x[pix_id]).value, \
u.Quantity(self.geom.pix_y[pix_id]).value, \
u.Quantity(np.array(self.geom.pix_area)[pix_id])
if self.geom.pix_type.startswith("hex"):
self._active_pixel.xy = (xx, yy)
else:
rr = sqrt(aa)
self._active_pixel.xy = (xx - rr / 2., yy - rr / 2.)
self._active_pixel.set_visible(True)
self._active_pixel_label.set_x(xx)
self._active_pixel_label.set_y(yy)
self._active_pixel_label.set_text("{:003d}".format(pix_id))
self._active_pixel_label.set_visible(True)
self._update()
self.on_pixel_clicked(pix_id) # call user-function
def on_pixel_clicked(self, pix_id):
"""virtual function to overide in sub-classes to do something special
when a pixel is clicked
"""
print("Clicked pixel_id {}".format(pix_id))
def show(self):
self.axes.figure.show()
| StarcoderdataPython |
1634081 | # -*- coding: utf-8 -*-
from .coin_meta import *
from .stock_meta import *
| StarcoderdataPython |
1822871 | from unittest.mock import patch
import mongomock
from snews import decider
@mongomock.patch(servers=(('localhost', 27017),))
def test_decider(mongodb):
decide = decider.Decider(
coinc_threshold=10,
msg_expiration=120,
datetime_format="%y/%m/%d %H:%M:%S",
mongo_server="mongodb://localhost:27017/",
drop_db=False,
)
with patch.object(decide.db, 'cache', mongodb.cache):
# check messages in cache
messages = list(decide.getCacheMessages())
assert mongodb.cache.count() == len(messages)
# check deciding functionality, should determine coincidence
assert decide.deciding()
| StarcoderdataPython |
11301965 | <reponame>GitExl/Turrican2Editor<gh_stars>1-10
# Copyright (c) 2016, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from ctypes import *
from renderlib.dll import dll
from renderlib.palette import Palette
from renderlib.stream_read import StreamRead
from renderlib.surface import Surface
__all__ = ['Bitplane', 'MaskMode', 'BitplaneType']
bitplaneCreateFromStream = dll.bitplaneCreateFromStream
bitplaneCreateFromStream.argtypes = [c_void_p, c_uint, c_uint, c_uint, c_uint]
bitplaneCreateFromStream.restype = c_void_p
bitplaneDestroy = dll.bitplaneDestroy
bitplaneDestroy.argtypes = [c_void_p]
bitplaneDestroy.restype = None
bitplaneToSurface = dll.bitplaneToSurface
bitplaneToSurface.argtypes = [c_void_p, c_void_p, c_void_p, c_uint, c_int, c_uint]
bitplaneToSurface.restype = c_void_p
class BitplaneType:
CHUNKY: int = 0
PLANAR: int = 1
AMIGA_SPRITE: int = 2
class MaskMode:
NONE: int = 0
INDEX: int = 1
BITPLANE: int = 2
class Bitplane:
"""
A bitplane holds a number of 8 bit pixels, without a palette. It can read a bitmap from a series of bitplanes.
"""
def __init__(self, ptr: int):
self._bitplane: int = ptr
def __del__(self):
bitplaneDestroy(self._bitplane)
@classmethod
def from_stream(cls, stream: StreamRead, bitplane_type: BitplaneType, width: int, height: int, planes: int):
"""
Creates a new bitplane by reading it from a stream.
:param stream: the stream to read from.
:param bitplane_type: a BitplaneType value.
:param width: the width of the bitmap.
:param height: the height of the bitmap.
:param planes: the number of bitplanes in the bitmap.
:return: a new Bitplane object.
"""
ptr = bitplaneCreateFromStream(stream.pointer, bitplane_type, width, height, planes)
if not ptr:
raise Exception('Could not create Bitplane object from stream.')
return cls(ptr)
def create_surface(self, mask, palette: Palette, mask_color: int, shift: int, mask_mode: MaskMode):
"""
Creates a Surface from this Bitplane.
:param mask: the bitplane to use as the mask.
:param palette: the palette object to use for color conversion.
:param mask_color: the color in the mask bitplane that is transparent.
:param shift: the amount of bits to shift the bitplane's colors with.
:param mask_mode: the masking mode from MaskMode.
:return:
"""
if mask is None:
mask_pointer = 0
else:
mask_pointer = mask.pointer
ptr = bitplaneToSurface(self._bitplane, mask_pointer, palette.pointer, mask_color, shift, mask_mode)
if not ptr:
raise Exception('Could not create Surface from Bitplane object.')
return Surface(ptr)
@property
def pointer(self) -> int:
return self._bitplane
| StarcoderdataPython |
1817876 | import hashlib
from base64 import b64encode, b64decode
from config import Config
from wallet import Wallet
from .exceptions import ValidationError, InsufficientBalanceError, DuplicateNonceError
class Transaction:
def __init__(
self, sender, recipient, amount, nonce: int, fee=None, signature=None, **kwargs
):
if fee is None:
fee = 1
self.sender = sender
self.recipient = recipient
self.amount = amount
self.fee = fee
self.nonce = nonce
self.signature = signature
def to_dict(self):
return {
"sender": self.sender,
"recipient": self.recipient,
"amount": self.amount,
"fee": self.fee,
"nonce": self.nonce,
"signature": self.base64_signature,
}
def is_signature_verified(self):
return Wallet.verify_signature(self.sender, self.signature, self.hash())
def validate(self, blockchain_state):
"""
Check validation of transaction
1. check sender key (is valid ECDSA key)
2. check sender wallet balance
3. check amount is integer > 0
4. check fee is integer > 0
5. check nonce is used only once
6. check sender signature
:raises ValidationError
:return: None
"""
if self.signature is None:
raise ValidationError("Transaction isn't singed")
sender_wallet = blockchain_state.wallets.get(self.sender, None)
if sender_wallet is None or sender_wallet.balance < (self.amount + self.fee):
if not Config.IS_TEST_NET:
raise InsufficientBalanceError()
if sender_wallet is not None and sender_wallet.nonce_counter >= self.nonce:
raise DuplicateNonceError("Wallet nonce is grater then transaction nonce")
if type(self.amount) not in (int, float) or self.amount <= 0:
raise ValidationError("amount must be number grater then 0")
if type(self.fee) not in (int, float) or self.fee <= 0:
raise ValidationError("fee must be number grater then 0")
if not self.is_signature_verified():
raise ValidationError("transaction signature is not valid")
def _raw_transaction(self):
return f"{self.sender}:{self.recipient}:{self.amount}:{self.fee}:{self.nonce}"
def hash(self):
transaction_string = self._raw_transaction().encode()
return hashlib.sha256(transaction_string).hexdigest()
@property
def base64_signature(self):
return b64encode(self.signature).decode()
@classmethod
def from_dict(cls, sender, recipient, signature, **kwargs):
signature = b64decode(signature.encode())
return Transaction(
sender=sender, recipient=recipient, signature=signature, **kwargs
)
| StarcoderdataPython |
8127285 | <filename>heatmap.py
#########################3#
### Written by <NAME>
### 16 Jan 2017
### Python 3.5
###########################
#########################
### PART 1: DATA HANDLING
#########################
# https://www.kaggle.com/us-drought-monitor/united-states-droughts-by-county/data#
import pandas as pd
import numpy as np
### Read in county data and drought info data
county = pd.read_csv('county_info_2016.csv', encoding = "ISO-8859-1")
county.columns = ['USPS','GEOID','ANSICODE','NAME','ALAND','AWATER','ALAND_SQMI','AWATER_SQMI','INTPTLAT','INTPTLONG' ]
county = county[['GEOID','ALAND_SQMI','INTPTLAT','INTPTLONG']]
dr = pd.read_csv('us-droughts.csv')
### Check if there are NaNs and all entries are recorded week's internal
#dr.isnull().sum() ## To check if there are NaNs
#dr.validStart = pd.to_datetime(dr.validStart, format='%Y-%m-%d')
#dr.validEnd = pd.to_datetime(dr.validEnd, format='%Y-%m-%d')
#((dr.validEnd - dr.validStart)!="6 day").sum()
#dr.releaseDate = pd.to_datetime(dr.releaseDate, format='%Y-%m-%d')
#(dr.releaseDate != dr.validStart).sum()
### Data cleansing
dr.releaseDate = pd.to_datetime(dr.releaseDate, format='%Y-%m-%d')
num_m = len(dr.releaseDate.unique())
dr = dr.drop(dr[['county', 'state', 'validStart', 'validEnd', 'domStatisticFormatID']], axis=1)
### Resample data on monthly basis
dr_m = dr.set_index('releaseDate').groupby(['FIPS']).resample('M').mean()
### Calculate drought level (when NONE is 100% => 0, if D4 is 100% => 5, and linearly between 0 and 5)
dr_m['LEVEL'] = (dr_m.D4*5 + (dr_m.D3-dr_m.D4)*4 + (dr_m.D2-dr_m.D3)*3 + (dr_m.D1-dr_m.D2)*2 + (dr_m.D0-dr_m.D1))/100
### Merge drought data and county info (coordination, size) by FIPS/GEOID
dr_m = dr_m.reset_index(level=1)
dr_final = pd.merge(dr_m, county, left_on='FIPS', right_on='GEOID', how='inner', sort='False')
dr_final = dr_final[['FIPS', 'releaseDate', 'LEVEL', 'ALAND_SQMI', 'INTPTLAT', 'INTPTLONG']]
dr_final = dr_final.groupby('FIPS')
#####################################
### PART 2: HEATMAP ANIMATION DISPLAY
#####################################
import matplotlib.pyplot as plt
from matplotlib.cm import ScalarMappable
from mpl_toolkits.basemap import Basemap
from matplotlib.animation import FuncAnimation
#get_ipython().magic('matplotlib nbagg') ## Only for Jupyter iPython display
### Create a figure and draw a basemap
fig = plt.figure(figsize=(16,8))
ax = fig.add_subplot(111)
m = Basemap(llcrnrlon=-119,llcrnrlat=22,urcrnrlon=-64,urcrnrlat=49,projection='lcc',lat_1=33,lat_2=45,lon_0=-95)
m.drawcoastlines() #zorder=3
m.drawmapboundary(zorder=0) #fill_color='#9fdbff'
m.fillcontinents(color='#ffffff',zorder=1) #,lake_color='#9fdbff',alpha=1
m.drawcountries(linewidth=1.5) #color='darkblue'
m.drawstates() #zorder=3
### Set county location values, drough level values, marker sizes (according to county size), colormap and title
x, y = m(dr_final.nth(0).INTPTLONG.tolist(), dr_final.nth(0).INTPTLAT.tolist())
colors = (dr_final.nth(0).LEVEL).tolist()
sizes = (dr_final.nth(0).ALAND_SQMI/7.5).tolist()
cmap = plt.cm.YlOrRd
sm = ScalarMappable(cmap=cmap)
plt.title('US Drought Level (Year-Month): '+dr_final.nth(0).releaseDate.iloc[0].strftime('%Y-%m'))
### Display the scatter plot and its colorbar (0-5)
scatter = ax.scatter(x,y,s=sizes,c=colors,cmap=cmap,alpha=1,edgecolors='face',marker='H',vmax=5,vmin=0,zorder=1.5)
plt.colorbar(scatter)
## Update function for animation
def update(ii):
colors = (dr_final.nth(ii).LEVEL).tolist()
scatter.set_color(sm.to_rgba(colors))
plt.title('US Drought Level (Year-Month): '+dr_final.nth(ii).releaseDate.iloc[0].strftime('%Y-%m'))
return scatter,
anim = FuncAnimation(plt.gcf(),update,interval=300,repeat=False,frames=203,blit=True) #blit=True
anim.save('Heatmap_animation_US_Drought.gif', writer='imagemagick')
#plt.show()
###################################
### PART 3: ANIMATION SAVING AS MP4
###################################
### Save functions for animation
#from matplotlib import rc, animation
#mywriter = animation.FFMpegWriter()
#anim.save('Heatmap_animation_US_Drought.mp4',writer=mywriter) | StarcoderdataPython |
12863793 | # Generated by Django 3.2.8 on 2021-10-12 15:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AspectActlog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('activitycode', models.CharField(max_length=16)),
('comment', models.CharField(max_length=256)),
('updated', models.DateTimeField()),
],
options={
'db_table': 'aspect_actlog',
'managed': False,
},
),
migrations.CreateModel(
name='AspectErrors',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('errorcode', models.CharField(max_length=16)),
('comment', models.CharField(max_length=256)),
('updated', models.DateTimeField()),
],
options={
'db_table': 'aspect_errors',
'managed': False,
},
),
migrations.CreateModel(
name='AspectFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.TextField()),
('type', models.CharField(max_length=32)),
('version', models.IntegerField()),
('updated', models.DateTimeField()),
],
options={
'db_table': 'aspect_files',
'managed': False,
},
),
migrations.CreateModel(
name='AspectLookup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uniqueid', models.CharField(max_length=128)),
('title', models.CharField(max_length=256)),
('type', models.CharField(max_length=16)),
('graphname', models.CharField(max_length=256)),
('currentversion', models.IntegerField()),
('auth_user_id', models.PositiveIntegerField()),
('updated', models.DateTimeField()),
],
options={
'db_table': 'aspect_lookup',
'managed': False,
},
),
migrations.CreateModel(
name='Datasets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=64)),
('sourcecode', models.CharField(max_length=16, null=True)),
('source', models.CharField(default='', max_length=64)),
('sourceurl', models.CharField(default='', max_length=256)),
('datasetname', models.CharField(max_length=16, null=True)),
('uniqueidformat', models.CharField(max_length=128, null=True)),
('protected', models.CharField(choices=[('yes', 'Yes'), ('no', 'No')], default='no', max_length=16)),
('count', models.IntegerField(default=0)),
],
options={
'db_table': 'datasets',
'managed': False,
},
),
migrations.CreateModel(
name='FacetActlog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('activitycode', models.CharField(max_length=16)),
('comment', models.CharField(max_length=256)),
('updated', models.DateTimeField()),
],
options={
'db_table': 'facet_actlog',
'managed': False,
},
),
migrations.CreateModel(
name='FacetErrors',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('errorcode', models.CharField(max_length=16)),
('comment', models.CharField(max_length=256)),
('updated', models.DateTimeField()),
],
options={
'db_table': 'facet_errors',
'managed': False,
},
),
migrations.CreateModel(
name='FacetFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.TextField()),
('type', models.CharField(max_length=32)),
('version', models.IntegerField()),
('updated', models.DateTimeField()),
],
options={
'db_table': 'facet_files',
'managed': False,
},
),
migrations.CreateModel(
name='FacetLookup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uniqueid', models.CharField(max_length=128)),
('title', models.CharField(max_length=256)),
('type', models.CharField(max_length=16)),
('graphname', models.CharField(max_length=256)),
('currentversion', models.IntegerField()),
('auth_user_id', models.PositiveIntegerField()),
('updated', models.DateTimeField()),
],
options={
'db_table': 'facet_lookup',
'managed': False,
},
),
migrations.CreateModel(
name='JsonActlog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('session', models.CharField(default=None, max_length=24)),
('activitylog', models.CharField(default='', max_length=2048)),
('comment', models.CharField(default=None, max_length=256)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'json_actlog',
'managed': False,
},
),
migrations.CreateModel(
name='JsonAspects',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'json_aspects',
'managed': False,
},
),
migrations.CreateModel(
name='JsonErrors',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('session', models.CharField(default=None, max_length=24)),
('errorcode', models.CharField(default='', max_length=128)),
('comment', models.CharField(default=None, max_length=256)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'json_errors',
'managed': False,
},
),
migrations.CreateModel(
name='JsonFacets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'json_facets',
'managed': False,
},
),
migrations.CreateModel(
name='JsonFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.TextField(default='')),
('type', models.CharField(default='', max_length=32)),
('version', models.IntegerField(default='')),
('jhash', models.CharField(blank=True, max_length=52, null=True)),
('comments', models.CharField(blank=True, max_length=32, null=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'json_files',
'managed': False,
},
),
migrations.CreateModel(
name='JsonLookup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uniqueid', models.CharField(default='', max_length=128, unique=True)),
('title', models.CharField(default='', max_length=256)),
('graphname', models.CharField(default='', max_length=256)),
('currentversion', models.IntegerField(default=0)),
('auth_user_id', models.IntegerField(default='')),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'json_lookup',
'managed': False,
},
),
migrations.CreateModel(
name='References',
fields=[
('id', models.SmallAutoField(primary_key=True, serialize=False)),
('journal', models.CharField(blank=True, max_length=256, null=True)),
('authors', models.CharField(blank=True, max_length=2048, null=True)),
('aulist', models.CharField(blank=True, max_length=1024, null=True)),
('year', models.PositiveSmallIntegerField(blank=True, null=True)),
('volume', models.CharField(blank=True, max_length=12, null=True)),
('issue', models.CharField(blank=True, max_length=16, null=True)),
('startpage', models.CharField(blank=True, max_length=16, null=True)),
('endpage', models.CharField(blank=True, max_length=16, null=True)),
('title', models.CharField(blank=True, max_length=512, null=True)),
('url', models.CharField(blank=True, max_length=256, null=True)),
('doi', models.CharField(max_length=256)),
('count', models.SmallIntegerField(blank=True, null=True)),
('updated', models.DateTimeField()),
],
options={
'db_table': 'references',
'managed': False,
},
),
]
| StarcoderdataPython |
3231114 | <gh_stars>1-10
"""
Investigate the title of an Actor's active browser window.
"""
from screenpy import Actor
from screenpy.pacing import beat
from ..abilities import BrowseTheWeb
class BrowserTitle:
"""Ask what the title of the browser's active window is.
Abilities Required:
:class:`~screenpy_selenium.abilities.BrowseTheWeb`
Examples::
the_actor.should(See.the(BrowserTitle(), ReadsExactly("Welcome!")))
"""
def describe(self) -> str:
"""Describe the Question."""
return "The current page's title."
@beat("{} reads the title of the page from their browser.")
def answered_by(self, the_actor: Actor) -> str:
"""Direct the Actor to investigate the browser's title."""
browser = the_actor.ability_to(BrowseTheWeb).browser
return browser.title
| StarcoderdataPython |
11245378 | import logging, weakref
import striga.core.exception
import striga.server.application, striga.server.service
import striga.service.socketserver
###
L = logging.getLogger('Frontend')
L.setLevel(logging.NOTSET)
###
class FrontendService(striga.server.service.Service):
'''
Base class for frontend service
'''
def __init__(self, parent, name, startstoppriority):
striga.server.service.Service.__init__(self, parent, name, startstoppriority)
self.ApplicationRef = weakref.ref(striga.server.application.GetInstance())
self.SiteBusRef = None
self.SiteBusName = 'SiteBus'
def _configure(self, conffilename):
return {
'tcpipv4' : self.__configure_tcpipv4,
'sitebus' : self.__configure_sitebus,
'!' : self._configure_finished,
}
def __configure_tcpipv4(self, conffilename, host = '0.0.0.0', port = 4000):
#Here we are creating 'child' service ...
striga.service.socketserver.TCPIPv4TCPServerService(
host, port,
self._socketReady,
self, 'TCPIPv4Server', 201)
def __configure_sitebus(self, conffilename, name):
self.SiteBusName = str(name)
def _configure_finished(self):
#Obtain thread pool ...
app = striga.server.application.GetInstance()
#This actually creates dependency on thread pool service ...
#TODO: Service dependency handling
self.ThreadPool = weakref.ref(app.Services.ThreadPool)
self._ChangeServiceStateToConfigured()
def _DoStart(self):
app = striga.server.application.GetInstance()
if not hasattr(app.Services, self.SiteBusName):
raise striga.core.exception.StrigaConfigurationError("Cannot start frontend service '%s' as there is no sitebus '%s'" % (self.ServiceName, self.SiteBusName))
self.SiteBusRef = weakref.ref(getattr(app.Services, self.SiteBusName))
L.debug("Frontend service '%s' connected to '%s'" % (self.ServiceName, self.SiteBusRef().ServiceName))
def _DoStop(self):
self.SiteBusRef = None
def _socketReady(self, socket):
'''
Override this in implementation
'''
pass
| StarcoderdataPython |
4894635 | <filename>views/__init__.py<gh_stars>0
from PIL import Image
from flask import Blueprint, render_template, request, jsonify
import numpy as np
import tensorflow as tf
from util import model, lb
graph = tf.get_default_graph()
base = Blueprint('base', __name__)
THRESHOLD = 1.5
@base.route('/')
def index():
return render_template('index.html')
@base.route('/predict', methods=['post'])
def predict():
files = request.files
img_left = Image.open(files.get('imgLeft'))
img_cnn = img_left.resize((256, 256))
img_cnn = np.array(img_cnn)
# tambahkan dimensi gamb`arnnya
Image.fromarray(img_cnn).save('tes.jpg')
img_cnn = np.expand_dims(img_cnn, axis = 0)
print(img_cnn.shape)
print(model.summary())
# melakukan prediksi
# proses kembali sehingga didapatkan label kelas keluaran hasil prediksinya
with graph.as_default():
pred = model.predict(img_cnn)
i = pred.argmax(axis = 1)[0]
label_class = lb.classes_[i]
# label_class = 'ngarang'
# pred = np.array([1,2])
return jsonify(klasifikasi=label_class,
score=pred.max().item()*100)
| StarcoderdataPython |
3243319 | <gh_stars>0
from jpfund.__morningstar import Morningstar
from jpfund.__emaxis import EMaxis
from jpfund.__nikko import Nikko
__copyright__ = 'Copyright (C) 2018 kunigaku'
__version__ = '0.0.1'
__license__ = 'MIT'
__author__ = 'kunigaku'
__author_email__ = '<EMAIL>'
__url__ = 'https://github.com/kunigaku/jpfund'
__all__ = []
| StarcoderdataPython |
4880648 |
# exports
from .helper import DatasetHelper
from .dataset import Dataset
from .full import FullDataset
from .sub import SubDataset
from .all import AllFull
from .rs import RoughSmoothFull
from .rs import RoughSmoothSub
from .gabor import GaborRoughSmoothFull
from .gabor import GaborRoughSmoothSub
| StarcoderdataPython |
12844149 | <reponame>Helveg/BlenderNEURON<filename>blenderneuron/blender/blendernode.py<gh_stars>10-100
import bpy
from blenderneuron.blender.blenderroot import BlenderRoot
from blenderneuron.blender.blenderrootgroup import *
from blenderneuron.commnode import CommNode
class BlenderNode(CommNode):
def __init__(self, *args, **kwargs):
super(BlenderNode, self).__init__("Blender", *args, **kwargs)
@property
def ui_properties(self):
return bpy.data.scenes[0].BlenderNEURON
def add_group(self, name=None, include_groupless_roots=True):
self.update_root_index()
if name is None:
name = self.find_unique_group_name()
group = BlenderRootGroup(name, self)
# Attach group to node
self.groups[name] = group
# Add group to the UI list
group.add_to_UI()
if include_groupless_roots:
group.add_groupless_roots()
return group
def update_root_index(self):
# Keep track which roots have been removed from NRN
roots_to_delete = set(self.root_index.keys())
# Get the list of root sections from NEURON
try:
root_data = self.client.get_roots()
# Update new or existing root entries
for i, root_info in enumerate(root_data):
name = root_info["name"]
existing_root = self.root_index.get(name)
# Update existing root
if existing_root is not None:
existing_root.index = root_info["index"]
existing_root.name = root_info["name"]
# Don't remove roots that previously existed and are present
roots_to_delete.remove(name)
# Add a new root
else:
new_root = self.root_index[name] = BlenderRoot(
root_info["index"],
root_info["name"]
)
# Make sure it's listed as selectable in all groups
for group in self.groups.values():
new_root.add_to_UI_group(group.ui_group)
except ConnectionRefusedError:
root_data = []
finally:
# Delete removed roots
for name_to_delete in roots_to_delete:
self.root_index[name_to_delete].remove(node=self)
def find_unique_group_name(self):
i_name = len(self.groups.values())
while True:
name = "Group." + str(i_name).zfill(3)
if name in self.groups:
i_name += 1
else:
break
return name
def get_group_data_from_neuron(self, group_list):
# Convert blender groups to skeletal dicts (needed for XML rcp with NRN)
# These dicts contain basic information (e.g. no 3D data, activity)
blender_groups = self.get_group_dicts(group_list)
# Send a request to NRN for the selected groups
compressed = self.client.initialize_groups(blender_groups)
# Decompress the result
nrn_groups = self.decompress(compressed)
return nrn_groups
def import_groups_from_neuron(self, group_list):
nrn_groups = self.get_group_data_from_neuron(group_list)
# Update each blender node group with the data received from NRN
for nrn_group in nrn_groups:
node_group = self.groups[nrn_group["name"]]
print('Importing group: ' + node_group.name + ' from NEURON...')
# Remove any views of the cells
if node_group.view is not None:
node_group.view.remove()
node_group.view = None
# Update blender node group with the data received from NRN
node_group.from_full_NEURON_group(nrn_group)
def get_selected_groups(self):
return [group for group in self.groups.values() if group.selected]
def get_group_dicts(self, group_list):
return [group.to_dict() for group in group_list]
@property
def synapse_sets(self):
return bpy.context.scene.BlenderNEURON.synapse_sets
def add_synapse_set(self, name=None):
new_set = self.synapse_sets.add()
if name is None:
i_name = len(self.synapse_sets.values())
while True:
name = "SynapseSet." + str(i_name).zfill(3)
if name in self.synapse_sets.keys():
i_name += 1
else:
break
new_set.name = name
return new_set
def display_groups(self):
for group in self.groups.values():
if group.selected:
print('Showing group ' + group.name + ' in Blender')
group.show()
else:
group.remove_view()
def add_neon_effect(self):
"""
Adds glare filter to the compositing node tree
:return:
"""
scene = bpy.context.scene
scene.use_nodes = True
links = scene.node_tree.links
nodes = scene.node_tree.nodes
layers = nodes.get('Render Layers')
if layers is None:
layers = nodes.new('CompositorNodeRLayers')
glare = nodes.new('CompositorNodeGlare')
composite = nodes.get('Composite')
if composite is None:
composite = nodes.new('CompositorNodeComposite')
links.new(layers.outputs['Image'], glare.inputs['Image'])
links.new(glare.outputs['Image'], composite.inputs['Image'])
glare.quality = 'MEDIUM'
glare.iterations = 3
glare.color_modulation = 0.2
glare.threshold = 0.1
glare.streaks = 7
glare.fade = 0.75
| StarcoderdataPython |
11345483 | # Meme Generator 1
# Demonstrates how to draw text on images using PIL
# (Python Imaging Library)
#
# The script loads an image from the clipboard (or uses
# a default one if the clipboard is empty) and asks for
# two captions (top and bottom) that are then drawn onto
# the image.
import Image
import ImageDraw
import ImageFont
import clipboard
def draw_caption(img, text, top=False):
draw = ImageDraw.Draw(img)
#Find a suitable font size to fill the entire width:
w = img.size[0]
s = 100
while w >= (img.size[0] - 20):
font = ImageFont.truetype('HelveticaNeue-CondensedBlack', s)
w, h = draw.textsize(text, font=font)
s -= 1
if s <= 12: break
#Draw the text multiple times in black to get the outline:
for x in xrange(-3, 4):
for y in xrange(-3, 4):
draw_y = y if top else img.size[1] - h + y
draw.text((10 + x, draw_y), text, font=font, fill='black')
#Draw the text once more in white:
draw_y = 0 if top else img.size[1] - h
draw.text((10, draw_y), text, font=font, fill='white')
def main():
print 'Loading image from clipboard...'
img = clipboard.get_image()
if img is None:
print 'No image in clipboard, using default image instead...'
img = Image.open('Test_Mandrill')
img.show()
print 'Enter the top caption (press return for none):'
caption_top = unicode(raw_input(), 'utf-8')
caption_top = caption_top.upper()
if caption_top != '':
draw_caption(img, caption_top, top=True)
print 'Enter the bottom caption (press return for none):'
caption_btm = unicode(raw_input(), 'utf-8')
caption_btm = caption_btm.upper()
if caption_btm != '':
draw_caption(img, caption_btm, top=False)
img.show()
# If you want to copy the result to the clipboard automatically,
# uncomment the following line:
#clipboard.set_image(img.convert('RGBA'))
# You can also copy an image from the console output or save it
# to your camera roll by touching and holding it.
if __name__ == '__main__':
main()
| StarcoderdataPython |
3355408 | <gh_stars>1-10
# -*- coding: utf-8 -*-
from grow import extensions
# Monkey patch grow.url before doing anything else
from grow.common import urls
urls._Url = urls.Url
class AmpDevUrl(urls._Url):
def __init__(self, path, host=None, port=None, scheme=None):
super(AmpDevUrl, self).__init__(path, host=None, port=None, scheme=None)
self.path = self.path.replace('/index.html', '/').replace('.html', '')
urls.Url = AmpDevUrl
from grow import extensions
from grow.documents import document, document_format, static_document
from grow.extensions import hooks
from .markdown_extras import block_filter as BlockFilter
from .markdown_extras import block_tip as BlockTip
from .markdown_extras import block_video as BlockVideo
from .markdown_extras import inline_tip as InlineTip
class AmpDevPreRenderHook(hooks.PreRenderHook):
"""Handle the post-render hook."""
def should_trigger(self, previous_result, doc, original_body, *_args,
**_kwargs):
# Only trigger for non-empty documents
content = previous_result if previous_result else original_body
if content is None:
return False
# Only trigger for MarkdownDocuments
if not isinstance(doc.format, document_format.MarkdownDocumentFormat):
return False
return True
def trigger(self, previous_result, doc, original_body, *_args, **_kwargs):
content = previous_result if previous_result else original_body
content = self.extension.transform_markdown(original_body, content)
return content
class AmpDevExtension(extensions.BaseExtension):
"""Extends Grow with specifics for amp.dev."""
def __init__(self, pod, config):
super(AmpDevExtension, self).__init__(pod, config)
# Initialize an object cache for template partials
self.template_cache = pod.podcache.get_object_cache('amp_dev_template');
# Expose extension direclty on pod for use in templates
setattr(pod, 'amp_dev', self)
def transform_markdown(self, original_body, content):
content = InlineTip.trigger(original_body, content)
content = BlockTip.trigger(original_body, content)
content = BlockVideo.trigger(original_body, content)
content = BlockFilter.trigger(original_body, content)
return content
@property
def available_hooks(self):
return [
AmpDevPreRenderHook,
]
| StarcoderdataPython |
3438027 | <filename>docs/src/ming_odm_properties.py<gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Clear the class names in case MappedClasses are declared in another example
import re
from ming.odm import Mapper
Mapper._mapper_by_classname.clear()
from ming import create_datastore
from ming.odm import ThreadLocalODMSession
session = ThreadLocalODMSession(bind=create_datastore('mim:///odm_tutorial'))
from ming import schema
from ming.odm import MappedClass
from ming.odm import FieldProperty
import hashlib
class PasswordProperty(FieldProperty):
def __init__(self):
# Password is always a required string.
super(PasswordProperty, self).__init__(schema.String(required=True))
def __get__(self, instance, cls=None):
if instance is None: return self
class Password(str):
def __new__(cls, content):
self = str.__new__(cls, '******')
self.raw_value = content
return self
# As we don't want to leak passwords we return an asterisked string
# but the real value of the password will always be available as .raw_value
# so we can check passwords when logging in.
return Password(super(PasswordProperty, self).__get__(instance, cls))
def __set__(self, instance, value):
pwd = hashlib.md5(value).hexdigest()
super(PasswordProperty, self).__set__(instance, pwd)
class User(MappedClass):
class __mongometa__:
session = session
name = 'user'
_id = FieldProperty(schema.ObjectId)
name = FieldProperty(schema.String(required=True))
password = PasswordProperty()
User.query.remove({})
#{compileall
from ming.odm import Mapper
Mapper.compile_all()
#}
def snippet1_1():
user = User(name='User 1',
password='<PASSWORD>')
session.flush()
user = session.db.user.find_one()
user['password']
user['password'] == hashlib.md5('<PASSWORD>').hexdigest()
def snippet1_2():
session.clear()
user = User.query.find().first()
user.password
user.password.raw_value | StarcoderdataPython |
268899 | <reponame>notnamed/social-graph<gh_stars>0
import social_graph
social_graph.init_db()
| StarcoderdataPython |
9794008 | from django.shortcuts import render
from rest_framework import viewsets
from rest_framework import filters
from rest_framework import permissions
from .serializer import FilerSerializer, FilerFolderSerializer
from filer.models import Image, File, Folder
class FilerViewSet(viewsets.ModelViewSet):
queryset = File.objects.all().order_by('name')
serializer_class = FilerSerializer
filter_backends = [filters.SearchFilter]
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
class FilerFolderViewSet(viewsets.ModelViewSet):
queryset = Folder.objects.all().order_by('name')
serializer_class = FilerFolderSerializer
filter_backends = [filters.SearchFilter]
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
| StarcoderdataPython |
11234693 | <filename>mobilabs/py_lib/__init__.py
"""."""
from . main import get_name
| StarcoderdataPython |
6522945 | from vehicles.vehicles import Car
from vehicles.package import Package
def main():
morgan = Car(wheels=3, max_weight=100)
ferrari = Car(wheels=4, max_weight=120)
print(ferrari.speed, morgan.speed)
print(ferrari.wheels, morgan.wheels)
print(ferrari.speed)
print(ferrari.max_weight)
p1 = Package(weight=20, volume=50)
p2 = Package(weight=150, volume=100)
p3 = Package(weight=10, volume=20)
morgan.add_package([p2, p3])
print(morgan.get_packages())
morgan.add_package(p2)
print(morgan.get_packages())
print(f"packet count is: {morgan.get_package_count()}")
print(f"total weight is: {morgan.get_total_weight()}")
print(f"total volume is: {morgan.get_total_volume()}")
morgan.remove_package(p1)
if __name__ == "__main__":
main()
| StarcoderdataPython |
1374 | # Author: <NAME>
# Copyright (c) 2019, <NAME>
# All rights reserved.
# based on github.com/ClementPinard/SfMLearner-Pytorch
from __future__ import division
import torch
from torch.autograd import Variable
pixel_coords = None
def set_id_grid(depth):
global pixel_coords
b, h, w = depth.size()
i_range = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w)).type_as(depth) # [1, H, W]
j_range = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w)).type_as(depth) # [1, H, W]
ones = Variable(torch.ones(1,h,w)).type_as(depth)
pixel_coords = torch.stack((j_range, i_range, ones), dim=1) # [1, 3, H, W]
def check_sizes(input, input_name, expected):
condition = [input.ndimension() == len(expected)]
for i,size in enumerate(expected):
if size.isdigit():
condition.append(input.size(i) == int(size))
assert(all(condition)), "wrong size for {}, expected {}, got {}".format(input_name, 'x'.join(expected), list(input.size()))
def pixel2cam(depth, intrinsics_inv):
global pixel_coords
"""Transform coordinates in the pixel frame to the camera frame.
Args:
depth: depth maps -- [B, H, W]
intrinsics_inv: intrinsics_inv matrix for each element of batch -- [B, 3, 3]
Returns:
array of (u,v,1) cam coordinates -- [B, 3, H, W]
"""
b, h, w = depth.size()
if (pixel_coords is None) or pixel_coords.size(2) != h or pixel_coords.size(3) != w:
set_id_grid(depth)
current_pixel_coords = pixel_coords[:,:,:h,:w].expand(b,3,h,w).contiguous().view(b, 3, -1) # [B, 3, H*W]
cam_coords = intrinsics_inv.bmm(current_pixel_coords).view(b, 3, h, w)
return cam_coords * depth.unsqueeze(1)
def cam2pixel(cam_coords, proj_c2p_rot, proj_c2p_tr, padding_mode):
"""Transform coordinates in the camera frame to the pixel frame.
Args:
cam_coords: pixel coordinates defined in the first camera coordinates system -- [B, 4, H, W]
proj_c2p_rot: rotation matrix of cameras -- [B, 3, 4]
proj_c2p_tr: translation vectors of cameras -- [B, 3, 1]
Returns:
array of [-1,1] coordinates -- [B, 2, H, W]
"""
b, _, h, w = cam_coords.size()
cam_coords_flat = cam_coords.view(b, 3, -1) # [B, 3, H*W]
if proj_c2p_rot is not None:
pcoords = proj_c2p_rot.bmm(cam_coords_flat)
else:
pcoords = cam_coords_flat
if proj_c2p_tr is not None:
pcoords = pcoords + proj_c2p_tr # [B, 3, H*W]
X = pcoords[:, 0]
Y = pcoords[:, 1]
Z = pcoords[:, 2].clamp(min=1e-3)
X_norm = 2*(X / Z)/(w-1) - 1 # Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1) [B, H*W]
Y_norm = 2*(Y / Z)/(h-1) - 1 # Idem [B, H*W]
if padding_mode == 'zeros':
X_mask = ((X_norm > 1)+(X_norm < -1)).detach()
X_norm[X_mask] = 2 # make sure that no point in warped image is a combinaison of im and gray
Y_mask = ((Y_norm > 1)+(Y_norm < -1)).detach()
Y_norm[Y_mask] = 2
pixel_coords = torch.stack([X_norm, Y_norm], dim=2) # [B, H*W, 2]
return pixel_coords.view(b,h,w,2)
def euler2mat(angle):
"""Convert euler angles to rotation matrix.
Reference: https://github.com/pulkitag/pycaffe-utils/blob/master/rot_utils.py#L174
Args:
angle: rotation angle along 3 axis (in radians) -- size = [B, 3]
Returns:
Rotation matrix corresponding to the euler angles -- size = [B, 3, 3]
"""
B = angle.size(0)
x, y, z = angle[:,0], angle[:,1], angle[:,2]
cosz = torch.cos(z)
sinz = torch.sin(z)
zeros = z.detach()*0
ones = zeros.detach()+1
zmat = torch.stack([cosz, -sinz, zeros,
sinz, cosz, zeros,
zeros, zeros, ones], dim=1).view(B, 3, 3)
cosy = torch.cos(y)
siny = torch.sin(y)
ymat = torch.stack([cosy, zeros, siny,
zeros, ones, zeros,
-siny, zeros, cosy], dim=1).view(B, 3, 3)
cosx = torch.cos(x)
sinx = torch.sin(x)
xmat = torch.stack([ones, zeros, zeros,
zeros, cosx, -sinx,
zeros, sinx, cosx], dim=1).view(B, 3, 3)
rotMat = xmat.bmm(ymat).bmm(zmat)
return rotMat
def quat2mat(quat):
"""Convert quaternion coefficients to rotation matrix.
Args:
quat: first three coeff of quaternion of rotation. fourht is then computed to have a norm of 1 -- size = [B, 3]
Returns:
Rotation matrix corresponding to the quaternion -- size = [B, 3, 3]
"""
norm_quat = torch.cat([quat[:,:1].detach()*0 + 1, quat], dim=1)
norm_quat = norm_quat/norm_quat.norm(p=2, dim=1, keepdim=True)
w, x, y, z = norm_quat[:,0], norm_quat[:,1], norm_quat[:,2], norm_quat[:,3]
B = quat.size(0)
w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2)
wx, wy, wz = w*x, w*y, w*z
xy, xz, yz = x*y, x*z, y*z
rotMat = torch.stack([w2 + x2 - y2 - z2, 2*xy - 2*wz, 2*wy + 2*xz,
2*wz + 2*xy, w2 - x2 + y2 - z2, 2*yz - 2*wx,
2*xz - 2*wy, 2*wx + 2*yz, w2 - x2 - y2 + z2], dim=1).view(B, 3, 3)
return rotMat
def pose_vec2mat(vec, rotation_mode='euler'):
"""
Convert 6DoF parameters to transformation matrix.
Args:s
vec: 6DoF parameters in the order of tx, ty, tz, rx, ry, rz -- [B, 6]
Returns:
A transformation matrix -- [B, 3, 4]
"""
translation = vec[:, :3].unsqueeze(-1) # [B, 3, 1]
rot = vec[:,3:]
if rotation_mode == 'euler':
rot_mat = euler2mat(rot) # [B, 3, 3]
elif rotation_mode == 'quat':
rot_mat = quat2mat(rot) # [B, 3, 3]
transform_mat = torch.cat([rot_mat, translation], dim=2) # [B, 3, 4]
return transform_mat
def flow_warp(img, flow, padding_mode='zeros'):
"""
Inverse warp a source image to the target image plane.
Args:
img: the source image (where to sample pixels) -- [B, 3, H, W]
flow: flow map of the target image -- [B, 2, H, W]
Returns:
Source image warped to the target image plane
"""
check_sizes(img, 'img', 'BCHW')
check_sizes(flow, 'flow', 'B2HW')
bs, _, h, w = flow.size()
u = flow[:,0,:,:]
v = flow[:,1,:,:]
grid_x = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w), requires_grad=False).type_as(u).expand_as(u) # [bs, H, W]
grid_y = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w), requires_grad=False).type_as(v).expand_as(v) # [bs, H, W]
X = grid_x + u
Y = grid_y + v
X = 2*(X/(w-1.0) - 0.5)
Y = 2*(Y/(h-1.0) - 0.5)
grid_tf = torch.stack((X,Y), dim=3)
img_tf = torch.nn.functional.grid_sample(img, grid_tf, padding_mode=padding_mode)
return img_tf
def pose2flow(depth, pose, intrinsics, intrinsics_inv, rotation_mode='euler', padding_mode=None):
"""
Converts pose parameters to rigid optical flow
"""
check_sizes(depth, 'depth', 'BHW')
check_sizes(pose, 'pose', 'B6')
check_sizes(intrinsics, 'intrinsics', 'B33')
check_sizes(intrinsics_inv, 'intrinsics', 'B33')
assert(intrinsics_inv.size() == intrinsics.size())
bs, h, w = depth.size()
grid_x = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w), requires_grad=False).type_as(depth).expand_as(depth) # [bs, H, W]
grid_y = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w), requires_grad=False).type_as(depth).expand_as(depth) # [bs, H, W]
cam_coords = pixel2cam(depth, intrinsics_inv) # [B,3,H,W]
pose_mat = pose_vec2mat(pose, rotation_mode) # [B,3,4]
# Get projection matrix for tgt camera frame to source pixel frame
proj_cam_to_src_pixel = intrinsics.bmm(pose_mat) # [B, 3, 4]
src_pixel_coords = cam2pixel(cam_coords, proj_cam_to_src_pixel[:,:,:3], proj_cam_to_src_pixel[:,:,-1:], padding_mode) # [B,H,W,2]
X = (w-1)*(src_pixel_coords[:,:,:,0]/2.0 + 0.5) - grid_x
Y = (h-1)*(src_pixel_coords[:,:,:,1]/2.0 + 0.5) - grid_y
return torch.stack((X,Y), dim=1)
def flow2oob(flow):
check_sizes(flow, 'flow', 'B2HW')
bs, _, h, w = flow.size()
u = flow[:,0,:,:]
v = flow[:,1,:,:]
grid_x = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w), requires_grad=False).type_as(u).expand_as(u) # [bs, H, W]
grid_y = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w), requires_grad=False).type_as(v).expand_as(v) # [bs, H, W]
X = grid_x + u
Y = grid_y + v
X = 2*(X/(w-1.0) - 0.5)
Y = 2*(Y/(h-1.0) - 0.5)
oob = (X.abs()>1).add(Y.abs()>1)>0
return oob
def occlusion_mask(grid, depth):
check_sizes(img, 'grid', 'BHW2')
check_sizes(depth, 'depth', 'BHW')
mask = grid
return mask
def inverse_warp(img, depth, pose, intrinsics, intrinsics_inv, rotation_mode='euler', padding_mode='zeros'):
"""
Inverse warp a source image to the target image plane.
Args:
img: the source image (where to sample pixels) -- [B, 3, H, W]
depth: depth map of the target image -- [B, H, W]
pose: 6DoF pose parameters from target to source -- [B, 6]
intrinsics: camera intrinsic matrix -- [B, 3, 3]
intrinsics_inv: inverse of the intrinsic matrix -- [B, 3, 3]
Returns:
Source image warped to the target image plane
"""
check_sizes(img, 'img', 'B3HW')
check_sizes(depth, 'depth', 'BHW')
check_sizes(pose, 'pose', 'B6')
check_sizes(intrinsics, 'intrinsics', 'B33')
check_sizes(intrinsics_inv, 'intrinsics', 'B33')
assert(intrinsics_inv.size() == intrinsics.size())
batch_size, _, img_height, img_width = img.size()
cam_coords = pixel2cam(depth, intrinsics_inv) # [B,3,H,W]
pose_mat = pose_vec2mat(pose, rotation_mode) # [B,3,4]
# Get projection matrix for tgt camera frame to source pixel frame
proj_cam_to_src_pixel = intrinsics.bmm(pose_mat) # [B, 3, 4]
src_pixel_coords = cam2pixel(cam_coords, proj_cam_to_src_pixel[:,:,:3], proj_cam_to_src_pixel[:,:,-1:], padding_mode) # [B,H,W,2]
projected_img = torch.nn.functional.grid_sample(img, src_pixel_coords, padding_mode=padding_mode)
return projected_img
| StarcoderdataPython |
1902166 | import shodan
from errbot import BotPlugin, arg_botcmd
# URL for Shodan Host permalinks
_HOST_URL = 'https://www.shodan.io/host/'
class Shodan(BotPlugin):
'''Use the Shodan.io API.
'''
def get_configuration_template(self):
return dict(
apikey='Shodan API Key'
)
@arg_botcmd('ip', type=str, template='lookup_host',
help='IP of the host to lookup.')
def shodan_lookup_host(self, message, ip=None):
'''Lookup an IP address in Shodan.
'''
api = shodan.Shodan(self.config.get('apikey'))
host = api.host(ip)
host['permalink'] = '{0}{1}'.format(_HOST_URL, ip)
if not host.get('ip_str'):
host['ip_str'] = ip
return host | StarcoderdataPython |
11339105 | <filename>src/ZEO/scripts/zeoqueue.py
#!/usr/bin/env python2.3
"""Report on the number of currently waiting clients in the ZEO queue.
Usage: %(PROGRAM)s [options] logfile
Options:
-h / --help
Print this help text and exit.
-v / --verbose
Verbose output
-f file
--file file
Use the specified file to store the incremental state as a pickle. If
not given, %(STATEFILE)s is used.
-r / --reset
Reset the state of the tool. This blows away any existing state
pickle file and then exits -- it does not parse the file. Use this
when you rotate log files so that the next run will parse from the
beginning of the file.
"""
from __future__ import print_function
import os
import re
import sys
import time
import errno
import getopt
from ZEO._compat import load, dump
COMMASPACE = ', '
STATEFILE = 'zeoqueue.pck'
PROGRAM = sys.argv[0]
tcre = re.compile(r"""
(?P<ymd>
\d{4}- # year
\d{2}- # month
\d{2}) # day
T # separator
(?P<hms>
\d{2}: # hour
\d{2}: # minute
\d{2}) # second
""", re.VERBOSE)
ccre = re.compile(r"""
zrpc-conn:(?P<addr>\d+.\d+.\d+.\d+:\d+)\s+
calling\s+
(?P<method>
\w+) # the method
\( # args open paren
\' # string quote start
(?P<tid>
\S+) # first argument -- usually the tid
\' # end of string
(?P<rest>
.*) # rest of line
""", re.VERBOSE)
wcre = re.compile(r'Clients waiting: (?P<num>\d+)')
def parse_time(line):
"""Return the time portion of a zLOG line in seconds or None."""
mo = tcre.match(line)
if mo is None:
return None
date, time_ = mo.group('ymd', 'hms')
date_l = [int(elt) for elt in date.split('-')]
time_l = [int(elt) for elt in time_.split(':')]
return int(time.mktime(date_l + time_l + [0, 0, 0]))
class Txn(object):
"""Track status of single transaction."""
def __init__(self, tid):
self.tid = tid
self.hint = None
self.begin = None
self.vote = None
self.abort = None
self.finish = None
self.voters = []
def isactive(self):
if self.begin and not (self.abort or self.finish):
return True
else:
return False
class Status(object):
"""Track status of ZEO server by replaying log records.
We want to keep track of several events:
- The last committed transaction.
- The last committed or aborted transaction.
- The last transaction that got the lock but didn't finish.
- The client address doing the first vote of a transaction.
- The number of currently active transactions.
- The number of reported queued transactions.
- Client restarts.
- Number of current connections (but this might not be useful).
We can observe these events by reading the following sorts of log
entries:
2002-12-16T06:16:05 BLATHER(-100) zrpc:12649 calling
tpc_begin('\x03I\x90((\xdbp\xd5', '', 'QueueCatal...
2002-12-16T06:16:06 BLATHER(-100) zrpc:12649 calling
vote('\x03I\x90((\xdbp\xd5')
2002-12-16T06:16:06 BLATHER(-100) zrpc:12649 calling
tpc_finish('\x03I\x90((\xdbp\xd5')
2002-12-16T10:46:10 INFO(0) ZSS:12649:1 Transaction blocked waiting
for storage. Clients waiting: 1.
2002-12-16T06:15:57 BLATHER(-100) zrpc:12649 connect from
('10.0.26.54', 48983): <ManagedServerConnection ('10.0.26.54', 48983)>
2002-12-16T10:30:09 INFO(0) ZSS:12649:1 disconnected
"""
def __init__(self):
self.lineno = 0
self.pos = 0
self.reset()
def reset(self):
self.commit = None
self.commit_or_abort = None
self.last_unfinished = None
self.n_active = 0
self.n_blocked = 0
self.n_conns = 0
self.t_restart = None
self.txns = {}
def iscomplete(self):
# The status report will always be complete if we encounter an
# explicit restart.
if self.t_restart is not None:
return True
# If we haven't seen a restart, assume that seeing a finished
# transaction is good enough.
return self.commit is not None
def process_file(self, fp):
if self.pos:
if VERBOSE:
print('seeking to file position', self.pos)
fp.seek(self.pos)
while True:
line = fp.readline()
if not line:
break
self.lineno += 1
self.process(line)
self.pos = fp.tell()
def process(self, line):
if line.find("calling") != -1:
self.process_call(line)
elif line.find("connect") != -1:
self.process_connect(line)
# test for "locked" because word may start with "B" or "b"
elif line.find("locked") != -1:
self.process_block(line)
elif line.find("Starting") != -1:
self.process_start(line)
def process_call(self, line):
mo = ccre.search(line)
if mo is None:
return
called_method = mo.group('method')
# Exit early if we've got zeoLoad, because it's the most
# frequently called method and we don't use it.
if called_method == "zeoLoad":
return
t = parse_time(line)
meth = getattr(self, "call_%s" % called_method, None)
if meth is None:
return
client = mo.group('addr')
tid = mo.group('tid')
rest = mo.group('rest')
meth(t, client, tid, rest)
def process_connect(self, line):
pass
def process_block(self, line):
mo = wcre.search(line)
if mo is None:
# assume that this was a restart message for the last blocked
# transaction.
self.n_blocked = 0
else:
self.n_blocked = int(mo.group('num'))
def process_start(self, line):
if line.find("Starting ZEO server") != -1:
self.reset()
self.t_restart = parse_time(line)
def call_tpc_begin(self, t, client, tid, rest):
txn = Txn(tid)
txn.begin = t
if rest[0] == ',':
i = 1
while rest[i].isspace():
i += 1
rest = rest[i:]
txn.hint = rest
self.txns[tid] = txn
self.n_active += 1
self.last_unfinished = txn
def call_vote(self, t, client, tid, rest):
txn = self.txns.get(tid)
if txn is None:
print("Oops!")
txn = self.txns[tid] = Txn(tid)
txn.vote = t
txn.voters.append(client)
def call_tpc_abort(self, t, client, tid, rest):
txn = self.txns.get(tid)
if txn is None:
print("Oops!")
txn = self.txns[tid] = Txn(tid)
txn.abort = t
txn.voters = []
self.n_active -= 1
if self.commit_or_abort:
# delete the old transaction
try:
del self.txns[self.commit_or_abort.tid]
except KeyError:
pass
self.commit_or_abort = txn
def call_tpc_finish(self, t, client, tid, rest):
txn = self.txns.get(tid)
if txn is None:
print("Oops!")
txn = self.txns[tid] = Txn(tid)
txn.finish = t
txn.voters = []
self.n_active -= 1
if self.commit:
# delete the old transaction
try:
del self.txns[self.commit.tid]
except KeyError:
pass
if self.commit_or_abort:
# delete the old transaction
try:
del self.txns[self.commit_or_abort.tid]
except KeyError:
pass
self.commit = self.commit_or_abort = txn
def report(self):
print("Blocked transactions:", self.n_blocked)
if not VERBOSE:
return
if self.t_restart:
print("Server started:", time.ctime(self.t_restart))
if self.commit is not None:
t = self.commit_or_abort.finish
if t is None:
t = self.commit_or_abort.abort
print("Last finished transaction:", time.ctime(t))
# the blocked transaction should be the first one that calls vote
L = [(txn.begin, txn) for txn in self.txns.values()]
L.sort()
for x, txn in L:
if txn.isactive():
began = txn.begin
if txn.voters:
print("Blocked client (first vote):", txn.voters[0])
print("Blocked transaction began at:", time.ctime(began))
print("Hint:", txn.hint)
print("Idle time: %d sec" % int(time.time() - began))
break
def usage(code, msg=''):
print(__doc__ % globals(), file=sys.stderr)
if msg:
print(msg, file=sys.stderr)
sys.exit(code)
def main():
global VERBOSE
VERBOSE = 0
file = STATEFILE
reset = False
# -0 is a secret option used for testing purposes only
seek = True
try:
opts, args = getopt.getopt(sys.argv[1:], 'vhf:r0',
['help', 'verbose', 'file=', 'reset'])
except getopt.error as msg:
usage(1, msg)
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-v', '--verbose'):
VERBOSE += 1
elif opt in ('-f', '--file'):
file = arg
elif opt in ('-r', '--reset'):
reset = True
elif opt == '-0':
seek = False
if reset:
# Blow away the existing state file and exit
try:
os.unlink(file)
if VERBOSE:
print('removing pickle state file', file)
except OSError as e:
if e.errno != errno.ENOENT:
raise
return
if not args:
usage(1, 'logfile is required')
if len(args) > 1:
usage(1, 'too many arguments: %s' % COMMASPACE.join(args))
path = args[0]
# Get the previous status object from the pickle file, if it is available
# and if the --reset flag wasn't given.
status = None
try:
statefp = open(file, 'rb')
try:
status = load(statefp)
if VERBOSE:
print('reading status from file', file)
finally:
statefp.close()
except IOError as e:
if e.errno != errno.ENOENT:
raise
if status is None:
status = Status()
if VERBOSE:
print('using new status')
if not seek:
status.pos = 0
fp = open(path, 'rb')
try:
status.process_file(fp)
finally:
fp.close()
# Save state
statefp = open(file, 'wb')
dump(status, statefp, 1)
statefp.close()
# Print the report and return the number of blocked clients in the exit
# status code.
status.report()
sys.exit(status.n_blocked)
if __name__ == "__main__":
main()
| StarcoderdataPython |
76940 | from manim import *
from manim_ml.neural_network.layers import TripletLayer, triplet
from manim_ml.neural_network.layers.feed_forward import FeedForwardLayer
from manim_ml.neural_network.neural_network import NeuralNetwork
config.pixel_height = 720
config.pixel_width = 1280
config.frame_height = 6.0
config.frame_width = 6.0
class TripletScene(Scene):
def construct(self):
anchor_path = "../assets/triplet/anchor.jpg"
positive_path = "../assets/triplet/positive.jpg"
negative_path = "../assets/triplet/negative.jpg"
triplet_layer = TripletLayer.from_paths(anchor_path, positive_path, negative_path, grayscale=False)
triplet_layer.scale(0.08)
neural_network = NeuralNetwork([
triplet_layer,
FeedForwardLayer(5),
FeedForwardLayer(3)
])
neural_network.scale(1)
self.play(Create(neural_network), run_time=3)
self.play(neural_network.make_forward_pass_animation(), run_time=10) | StarcoderdataPython |
3253277 | from __future__ import division
import json
import os
import nltk
import commonfunctions as cf
wnl = nltk.WordNetLemmatizer()
from string import punctuation
from string import digits
import urllib
directory = cf.working_directory
# The dispersion plot is currently only working for a single debate - we need to think about
# whether this is something we want
# This code creates a graph that shows the frequency of different words over time
# In this case ive chosen as an example the words: racist, immigration, latino and america
# Things to be changed: the X axis should represent years
# List all the files in the directory
filesList = os.listdir(directory)
# Create a list for all the objects imported to JSON to be added to
transcripts = []
# import positive and negative lists
# create empty lists for both
files = ['negative.txt', 'positive.txt']
path = 'http://www.unc.edu/~ncaren/haphazard/'
for file_name in files:
urllib.urlretrieve(path + file_name, file_name)
pos_words = open("positive.txt").read()
positive_words = pos_words.split('\n')
positive_counts = []
neg_words = open('negative.txt').read()
negative_words = neg_words.split('\n')
negative_counts = []
# Go through each file, open it, and add its content to the list
for myFile in filesList:
with open(os.path.join(directory, myFile), 'r') as f:
# Here, the JSON is converted back to a Python object
transcript = json.load(f)
transcripts.append(transcript)
# Create lists for the years and the sentiment for each year.
years = []
length = []
# Go through each transcript
for transcript in transcripts:
# Get the date - converting the ISO date back into a datetime.date object
date = cf.iso_to_datetime(transcript['date'])
year = date.year
years.append(year)
# Create a string for all of the text in the debate
allText = ""
# Add all the text spoken by speakers to that string
for speaker in transcript['text_by_speakers']:
allText += (" " + speaker['text'])
# Removing punctuation, digits
# Splitting text into words
# Removing short words and suffixes
for p in list(punctuation):
allText = allText.replace(p, '')
for k in list(digits):
allText = allText.replace(k, '')
words = allText.split()
long_words = [w for w in words if len(w) > 3]
listofwords = [wnl.lemmatize(t) for t in long_words]
text = nltk.Text(listofwords)
# This creates the graph with the words in the parentheses
text.dispersion_plot(["immigration", "latino", "america", "racist"])
| StarcoderdataPython |
1790144 | # Imports: standard library
import os
import logging
# Imports: third party
import pandas as pd
def save_mrns_and_csns_csv(
staging_dir: str,
hd5_dir: str,
adt: str,
first_mrn_index: int,
last_mrn_index: int,
overwrite_hd5: bool,
):
"""
Get unique MRNs and CSNs from ADT and save to patients.csv.
:param staging_dir: <str> Path to temporary staging directory.
:param hd5_dir: <str> Path to directory where hd5 files are stored.
:param adt: <str> Path to CSV containing ADT table.
:param first_mrn_index: <int> First index of desired MRNs.
:param last_mrn_index: <int> Last index of desired MRNs.
:param overwrite_hd5: <bool> Overwrite existing hd5 files.
"""
adt_df = pd.read_csv(adt).sort_values(by=["MRN"], ascending=True)
patients = adt_df[["MRN", "PatientEncounterID"]].drop_duplicates().dropna()
mrns = patients["MRN"].drop_duplicates()[first_mrn_index:last_mrn_index]
mrns_and_csns = patients[patients["MRN"].isin(mrns)]
if not overwrite_hd5 and os.path.isdir(hd5_dir):
hd5_mrns = [
int(hd5_mrn.split(".")[0])
for hd5_mrn in os.listdir(hd5_dir)
if hd5_mrn.endswith(".hd5")
]
mrns_and_csns = mrns_and_csns[~mrns_and_csns["MRN"].isin(hd5_mrns)]
mrns_and_csns_path = os.path.join(staging_dir, "patients.csv")
mrns_and_csns.to_csv(mrns_and_csns_path, index=False)
logging.info(f"Saved {mrns_and_csns_path}")
def get_files_in_directory(
directory: str,
file_extension: str,
departments_short_names: set = None,
) -> tuple:
"""
Given a path to a directory and a file extension, returns a list of full paths
to all files ending in the file extension, and a list of full paths to all files
that do not end in the file extension.
Optionally, limit search to a subset of departments.
"""
fpaths = []
not_fpaths = []
for root, dirs, files in os.walk(directory, topdown=True):
if departments_short_names is not None:
dirs[:] = [d for d in dirs if d in departments_short_names]
for file in files:
fpath = os.path.join(root, file)
if file.endswith(file_extension):
fpaths.append(fpath)
else:
not_fpaths.append(fpath)
return fpaths, not_fpaths
| StarcoderdataPython |
8078467 | <filename>api/rest/serializers/video_seriazer.py<gh_stars>0
'''
@author: gconstantino
'''
from rest_framework import serializers
from api import models
class VideoSerializer(serializers.HyperlinkedModelSerializer):
"""
It is the basic representation of the Video model
"""
class Meta:
model = models.Video
fields = ('url', 'id', 'category', 'name', 'video', 'thumbs_up', 'thumbs_down')
| StarcoderdataPython |
11276621 | from erde import autocli, utils, read_stream, write_stream
def _nullify(geoseries, nullify_irrelevant):
import numpy as np
if not nullify_irrelevant:
return geoseries.area
return np.where(geoseries.geom_type.str.endswith('Polygon'), geoseries.area, [np.nan] * len(geoseries))
@autocli
def main(input_data: read_stream, column_name='area', skip_transform:bool=False, nullify_irrelevant:bool=False, default_crs=None) -> write_stream:
"""Calculates areas of geometries in metres (or in CRS units if skip_transform==True), sanitizes the input: checks and transforms CRS, may set area of irrelevant geometries to null.
Parameters
----------
input_data : GeoDataFrame
GeoDataFrame to take and write to.
column_name : string, default 'area'
How to call the new column with area values. Existing column will be overridden.
skip_transform : bool, default False
If False, geometries are converted to Pseudo-Mercator (epsg:3857) and area is in metres. If True, areas are calculated in current units.
nullify_irrelevant : bool, default False
If True, for geometries other than (Multi)Polygon, area value will be nan.
default_crs : int or dict or pyproj object, optional
If input_data will have no CRS, set it to default_crs.
Returns
-------
GeoDataFrame with new column added.
The function/script does not assume any CRS of input_data, because otherwise it will either crash from infinite coordinates or return nonsense, and the reason will be hard to find. You have to provide `default_crs` argument if you're assured it will work correctly.
"""
if default_crs is None and input_data.crs is None and not skip_transform:
raise ValueError('Input data has no CRS to transform from. Set input_data CRS, or provide default_crs, or set skip_transform')
elif default_crs is not None:
input_data = input_data.copy()
input_data.crs = default_crs
if skip_transform:
input_data[column_name] = _nullify(input_data.geometry, nullify_irrelevant)
return input_data
input_data[column_name] = _nullify(input_data.geometry.to_crs(3857), nullify_irrelevant) * utils.coslat(input_data.geometry) ** 2
return input_data
| StarcoderdataPython |
4848909 | <gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_language_relation(apps, schema_editor):
from molo.core.models import SiteLanguage, LanguageRelation
from wagtail.wagtailcore.models import Page
if not (SiteLanguage.objects.filter(is_main_language=True)).exists():
from molo.core.models import LanguagePage
current_language = LanguagePage.objects.live().first()
if current_language:
main_lang = SiteLanguage.objects.create(
locale=current_language.code)
for p in Page.objects.all().descendant_of(current_language):
LanguageRelation.objects.create(page=p, language=main_lang)
class Migration(migrations.Migration):
dependencies = [
('tuneme', '0001_initial'),
('yourwords', '0006_create_your_words_index_pages'),
]
operations = [
migrations.RunPython(add_language_relation),
]
| StarcoderdataPython |
1847851 | #!/usr/bin/env python
import csv
class SequenceFileInfo:
def __init__(self, minion_reads, illumina_r1, illumina_r2, outname):
self.minion_reads = minion_reads
self.illumina_r1 = illumina_r1
self.illumina_r2 = illumina_r2
self.outname = outname
def parse_hybrid_csv(csvfile):
"""
Parses an input csv file so that the pipeline can know where each illumina file/minion file is and what to call
each sample.
:param csvfile: Full path to CSV file with headers MinION, Illumina_R1, Illumina_R2, and OutName
:return: list of SequenceFileInfo objects
"""
# TODO: Add checks that CSV file has proper headers
sequence_file_info = list()
with open(csvfile) as input_csv:
reader = csv.DictReader(input_csv)
for row in reader:
sequence_file_info.append(SequenceFileInfo(minion_reads=row['MinION'],
illumina_r1=row['Illumina_R1'],
illumina_r2=row['Illumina_R2'],
outname=row['OutName']))
return sequence_file_info
| StarcoderdataPython |
1918901 | <filename>rtGradientBoostTest.py
## Code for regression tree gradient boosting model
## Import packages needed for analysis
import csv
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from sklearn.model_selection import train_test_split
from sksurv.ensemble import GradientBoostingSurvivalAnalysis
from sksurv.preprocessing import OneHotEncoder
## Download data and remove columns
## Adult_dataedit is datafile used for loss = ipcwls for time to event values
## Add absolute path file
adult_data = pd.read_csv('allimputed.csv')
#adult_data = adult_data.drop(columns=['Unnamed: 0'])
adult_data = adult_data.drop(columns = ['Number'])
print('got data and removed unnecessary columns...so far so good...')
## Get x and y datasets- separate predictors to y outcome variables
X = adult_data[['BMI', 'Systolic', 'Diastolic', 'regularity', 'Chol2', 'Ethnicity', 'Gender', 'Age', 'heart_attack', 'relative_ha', 'liver_problem', 'cancer', 'stroke','days_active', 'smoking_status']]
y = adult_data[['mortstat', 'permth_int']]
mort = list(y['mortstat'])
time = list(y['permth_int'])
## Change to binary value rather than number
for n,i in enumerate(mort):
if i == 0:
mort[n] = False
else:
mort[n] = True
mort
## Zip lists together to get list of tuples
survival = zip(mort, time)
Y = list(survival)
## Need to turn list of tuples into structured array
## Have to tell it what type of data you have in the struct. array
## Get this from the toy data imported above
dt = np.dtype([('fstat', '?'),('lenfol', '<f8')])
Y = np.array(Y,dtype=dt)
## Get test and train data values and then split X data
train_vals, test_vals = train_test_split(range(len(adult_data)), test_size = 0.2, random_state=1)
x_train = X.loc[train_vals].reset_index(drop = True)
x_test = X.loc[test_vals].reset_index(drop = True)
## Get Y outcome data as test and train
y_train = []
for val in train_vals:
y_train.append(Y[val])
y_train = np.asarray(y_train)
#print(y_train)
y_test = []
for val in test_vals:
y_test.append(Y[val])
y_test = np.asarray(y_test)
#print(y_test)
x_train1 = OneHotEncoder().fit_transform(x_train)
x_test1 = OneHotEncoder().fit_transform(x_test)
# In[ ]:
## Instantiate the GB method
#estimator_gb = GradientBoostingSurvivalAnalysis(n_estimators = 100, learning_rate = 0.1, random_state = 0)
## Import data and removing variables
adult_data2 = pd.read_csv('adult_datatest2.csv')
#adult_data2 = adult_data2.drop(columns=['Unnamed: 0'])
adult_data2 = adult_data2.drop(columns = ['Number'])
adult_data2
## Get x and y datasets- separate predictors to y outcome variables
X2 = adult_data2[['BMI', 'Systolic', 'Diastolic', 'regularity', 'Chol2', 'Ethnicity', 'Gender', 'Age', 'heart_attack', 'relative_ha', 'liver_problem', 'cancer', 'stroke', 'days_active', 'smoking_status']]
y2 = adult_data2[['mortstat', 'permth_int']]
X2 = OneHotEncoder().fit_transform(X2)
mort2 = list(y2['mortstat'])
time2 = list(y2['permth_int'])
## Change to binary value rather than number
for n,i in enumerate(mort2):
if i == 0:
mort2[n] = False
else:
mort2[n] = True
mort2
## Zip lists together to get list of tuples
survival2 = zip(mort2, time2)
Y2 = list(survival2)
## Need to turn list of tuples into structured array
## Have to tell it what type of data you have in the struct. array
## Get this from the toy data imported above
dt2 = np.dtype([('fstat', '?'),('lenfol', '<f8')])
Y2 = np.array(Y2,dtype=dt2)
## Doing predictions and loops and writing to csv files
## Change headers for whichever datafile is being made
## Loss = ipclws for the time to event values
with open ('rttimetoevent.csv', 'w', newline = '') as outfile1:
writer = csv.writer(outfile1)
headers = ['index', 'timetoevent']
first = headers
writer.writerow(first)
res = []
estimator_gb = GradientBoostingSurvivalAnalysis(n_estimators = 150, learning_rate = 0.1, max_depth=1, random_state = 0, loss = 'ipcwls')
estimator_gb.fit(x_train1, y_train)
#get c-stat prediction
#estimator_gb = GradientBoostingSurvivalAnalysis(n_estimators = 150, learning_rate = 0.1, max_depth=1, random_state = 0)
#estimator_gb.fit(x_train1, y_train)
#test_score = estimator_gb.score(x_test1, y_test)
#print('testing score = ', test_score)
## Calibration values for the test set to make plots later
#calibrateres = estimator_gb.predict_survival_function(x_test1)
#calibrateres = pd.DataFrame(data=calibrateres)
#calibrateres.to_csv('rtgb_calibrate.csv')
### Get the training score for C-statistic
#train_score=estimator_gb.score(x_train1, y_train)
#print('train score for rt gb is: ', train_score)
## Append values from risk scores to open csv code from further up
results = estimator_gb.predict(x_test1)
for i in results:
res.append(i)
writer.writerow(res)
res = []
## Using new data to produce risk scores and append to csv
#with open ('rtgradboost_newdatariskscores.csv', 'w', newline = '') as outfile1:
# writer = csv.writer(outfile1)
# headers = ['index', 'riskscore']
# first = headers
# writer.writerow(first)
# res = []
# estimator_gb = GradientBoostingSurvivalAnalysis(n_estimators = 150, learning_rate = 0.1, max_depth=1, random_state = 0)
# estimator_gb.fit(x_train1, y_train)
# risks = estimator_gb.predict(X)
# for i in risks:
# res.append(i)
# writer.writerow(res)
# res = []
## Get the survival function values to make a plot for the new data
#estimator_gb = GradientBoostingSurvivalAnalysis(n_estimators = 150, learning_rate = 0.1, max_depth=1, random_state = 0)
#survfuncs = estimator_gb.predict_survival_function(X2)
## Loop through survival functions and add to a graph
#for p in survfuncs:
#plt.step(p.x, p(p.x), where = 'post')
#print(1)
#plt.ylim(0,1)
#plt.ylabel('Survival probability P(T>t) ')
#plt.xlabel('Time (months)')
#plt.title('RTGB model estimate of survival for 6 test individuals')
#plt.legend()
#plt.grid(True)
#plt.show()
#plt.savefig('rtgradboosttest.png')
| StarcoderdataPython |
11263959 | <filename>__init__.py
from __future__ import annotations
import re
from typing import Optional
from consts import Binyan, Present, Pronoun, Paradigm
CONSONANTS = tuple("QbvgGdDhwzj7ykxlmnsRpfZqrcStT")
GRONIYOT = tuple("hjQR")
GRONIYOT_RESH = GRONIYOT + ("r",)
PRONOUN_FUTURE_PREFIX = {
Pronoun.ANI: "Q",
Pronoun.ATA: "T",
Pronoun.AT: "T",
Pronoun.HU: "y",
Pronoun.HI: "T",
Pronoun.ANACNU: "n",
Pronoun.ATEM: "T",
Pronoun.ATEN: "T",
Pronoun.HEM: "y",
Pronoun.HEN: "T",
}
DAGESH_LENE = {
"x": "k",
"g": "G",
"t": "T",
"d": "D",
"v": "b",
"f": "p",
}
DAGESH_LENE_REV = {
"k": "x",
"G": "g",
"T": "t",
"D": "d",
"b": "v",
"p": "f",
}
PRONOUN_PAST_SUFFIX = {
Pronoun.ANI: "Ti",
Pronoun.ATA: "Ta",
Pronoun.AT: "T",
Pronoun.HU: "",
Pronoun.HI: "a!H",
Pronoun.ANACNU: "nu",
Pronoun.ATEM: "TE!m",
Pronoun.ATEN: "TE!n",
Pronoun.HEM: "u!",
Pronoun.HEN: "u!",
}
PRESENT_SUFFIX = {
Present.FEMALE_SINGULAR: "a!H",
Present.MALE_PLURAL: "i!m",
Present.FEMALE_PLURAL: "W!t",
}
def add_dagesh_lene(c: str) -> str:
return DAGESH_LENE.get(c, c)
def remove_dagesh_lene(c: str) -> str:
return DAGESH_LENE_REV.get(c, c)
def add_dagesh_forte(c: str) -> str:
if c in GRONIYOT_RESH:
return c
else:
return "_" + add_dagesh_lene(c)
def add_schwa(c: str, hataf: str = "á") -> str:
assert len(c) == 1
if c in GRONIYOT:
return c + hataf
return c + "3"
def patah_gnuva(c: str) -> str:
return "Á" if c in "Rhj" else ""
def fixup(word: str) -> str:
return re.sub(r"(.)\1", lambda m: m[1] + "3" + m[1], word)
def inflect_future(
base: str, binyan: Binyan, pronoun: Pronoun, paradigm: Paradigm
) -> str:
if pronoun == Pronoun.HU:
return base
if binyan in (Binyan.HITPAEL, Binyan.PIEL, Binyan.PUAL):
assert (
base.startswith("yI")
and binyan == Binyan.HITPAEL
or base.startswith("y3")
and binyan != Binyan.HITPAEL
)
base = base[1:]
base_at = re.sub(r"[Á]", "", base)
if base_at.endswith("E!H"):
base_at = base_at[:-3]
else:
assert base_at.endswith(CONSONANTS)
if re.search("á..!", base_at):
base_at = re.sub("á(.).!", r"A\1", base_at)
else:
base_at = base_at[:-4] + add_schwa(base_at[-4]) + base_at[-1]
if pronoun == Pronoun.ANI:
return "Q" + ("E" if base.startswith("I") else "á") + base[1:]
if pronoun in (Pronoun.ATA, Pronoun.HI):
return "T" + base
if pronoun == Pronoun.AT:
return "T" + base_at + "i!"
if pronoun == Pronoun.ANACNU:
return "n" + base
if pronoun == Pronoun.ATEM:
return "T" + base_at + "u!"
if pronoun in (Pronoun.ATEN, Pronoun.HEN):
base = "T" + base.replace("Á", "")
if base.endswith("n"):
return base[:-1] + "_naH"
elif base.endswith("E!H"):
return base[:-3] + "E!YnaH"
elif re.search(r"e![jhR]$", base):
return base.replace("e!", "A!") + "naH"
elif re.search(r".!Q$", base):
return re.sub(".!", "E!", base) + "naH"
return base + ("3" if base[-3] in "aiueoWY" else "") + "naH"
if pronoun == Pronoun.HEM:
return "y" + base_at + "u!"
elif binyan == Binyan.HIFIL:
base = PRONOUN_FUTURE_PREFIX[pronoun] + base[1:]
base_at = base.replace("Á", "")
if base_at.endswith("E!H"):
base_at = base_at[:-3]
elif (
paradigm.is_kfulim()
and base_at[-1] not in "jhQRr"
and not re.search(
r"(.).!\1$",
re.sub("[fvxdgt]$", lambda m: add_dagesh_lene(m[0]), base_at),
)
):
base_at = base_at[:-1] + "_" + add_dagesh_lene(base_at[-1])
else:
assert base_at.endswith(CONSONANTS)
if pronoun in (
Pronoun.ANI,
Pronoun.ATA,
Pronoun.HU,
Pronoun.HI,
Pronoun.ANACNU,
):
return base
if pronoun == Pronoun.AT:
return base_at + "i" + ("!" if base.endswith("E!H") else "")
if pronoun in (Pronoun.ATEM, Pronoun.HEM):
return base_at + "u" + ("!" if base.endswith("E!H") else "")
if pronoun in (Pronoun.ATEN, Pronoun.HEN):
base = base.replace("Á", "")
if base.endswith("E!H"):
return base[:-3] + "E!YnaH"
base = base.replace("i!", "e!")
if base.endswith("n"):
return base[:-1] + "_naH"
if re.search(r"e![jhR]$", base):
return base.replace("e!", "A!") + "naH"
if re.search(r"e!Q$", base):
return base.replace("e!", "E!") + "naH"
return (
base.replace("Á", "") + ("3" if base[-3] in "aiueoWY" else "") + "naH"
)
elif binyan == Binyan.NIFAL:
assert base[:2] in ("ye", "yI")
if pronoun == Pronoun.ANI:
if re.fullmatch(r"yI_.A!.", base):
return "Q" + base[1:]
return "Q" + ("E" if base[1] == "I" else "e") + base[2:]
base = base[1:]
base = PRONOUN_FUTURE_PREFIX[pronoun] + base
base_at = base.replace("Á", "")
stressed_suffix_at = True
if base_at.endswith("E!H"):
base_at = base_at[:-3]
elif paradigm.is_kfulim() and not re.search(r"(.).!\1", base_at):
if base_at[-1] in "r":
base_at = base_at.replace("A!", "a!")
else:
base_at = base_at[:-1] + add_dagesh_forte(base_at[-1])
stressed_suffix_at = False
else:
assert base_at.endswith(CONSONANTS)
base_at = re.sub("..!", add_schwa(base_at[-4]), base_at)
if pronoun in (Pronoun.ATA, Pronoun.HI, Pronoun.ANACNU):
return base
if pronoun == Pronoun.AT:
return base_at + "i" + ("!" if stressed_suffix_at else "")
if pronoun in (Pronoun.ATEM, Pronoun.HEM):
return base_at + "u" + ("!" if stressed_suffix_at else "")
if pronoun in (Pronoun.ATEN, Pronoun.HEN):
if base.endswith("E!H"):
return base[:-3] + "E!YnaH"
if base.endswith("n"):
return base[:-1] + "_naH"
if re.search(r".!Q$", base):
return re.sub(".!", "E!", base) + "naH"
return (
base.replace("Á", "") + ("3" if base[-3] in "aiueoWY" else "") + "naH"
)
elif binyan == Binyan.HUFAL:
assert base[:2] in ("yU", "yu")
base = PRONOUN_FUTURE_PREFIX[pronoun] + base[1:]
base_at = base.replace("Á", "")
stressed_suffix_at = True
if base_at.endswith("E!H"):
base_at = base_at[:-3]
elif paradigm.is_kfulim() and len(base_at) == 6 and base_at[-4] == "j":
base_at = base_at[:-1] + add_dagesh_forte(base_at[-1])
stressed_suffix_at = False
else:
assert base_at.endswith(CONSONANTS)
base_at = re.sub(".[aA]!", add_schwa(base_at[-4]), base_at)
if pronoun in (Pronoun.ANI, Pronoun.ATA, Pronoun.HI, Pronoun.ANACNU):
return base
if pronoun == Pronoun.AT:
return base_at + "i" + ("!" if stressed_suffix_at else "")
if pronoun in (Pronoun.ATEM, Pronoun.HEM):
return base_at + "u" + ("!" if stressed_suffix_at else "")
if pronoun in (Pronoun.ATEN, Pronoun.HEN):
if base.endswith("E!H"):
return base[:-3] + "E!YnaH"
if base.endswith("n"):
return base[:-1] + "_naH"
if re.search(r".!Q$", base):
return re.sub(".!", "E!", base) + "naH"
return (
base.replace("Á", "") + ("3" if base[-3] in "aiueoWY" else "") + "naH"
)
elif binyan == Binyan.PAAL:
base = base[1:]
if pronoun == Pronoun.ANI:
if paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_4) and re.fullmatch(
r"i.(A!.|u!.Á?)", base
):
return "Q" + base
if base[:2] == "oQ":
return "Qo" + base[2:]
if base[0].islower():
return "Q" + ("e" if base[0] in "ie" else "a") + base[1:]
if base[1] in GRONIYOT:
return "QE" + base[1:].replace("á", "é")
return "Q" + ("E" if base[0] in "IE" else "A") + base[1:]
base = PRONOUN_FUTURE_PREFIX[pronoun] + base
base_at = base.replace("Á", "")
if base_at.endswith("E!H"):
base_at = base_at[:-3]
# FIXME
elif paradigm.is_kfulim() and len(base_at) == 6:
base_at = base_at[:-1] + add_dagesh_forte(base_at[-1])
else:
assert base_at.endswith(CONSONANTS), (base_at, base)
if re.fullmatch(r"....!.", base_at) and not (
paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_4)
and re.fullmatch(r"i.(A!.|u!.Á?)", base[1:])
or paradigm == Paradigm.PAAL_2
and re.fullmatch(rf"e.(e!.|A![{GRONIYOT}])", base[1:])
):
pass # do nothing
elif "á" in base_at: # FIXME
base_at = re.sub(f".!", "", base_at.replace("á", "A"))
elif "é" in base_at: # FIXME
base_at = re.sub(f".!", "", base_at.replace("é", "E"))
else:
base_at = re.sub(f"..!", add_schwa(base_at[-4]), base_at)
if pronoun in (Pronoun.ATA, Pronoun.HI, Pronoun.ANACNU):
return base
if pronoun == Pronoun.AT:
return base_at + "i" + ("" if "!" in base_at else "!")
if pronoun in (Pronoun.ATEM, Pronoun.HEM):
return base_at + "u" + ("" if "!" in base_at else "!")
if pronoun in (Pronoun.ATEN, Pronoun.HEN):
base = base.replace("Á", "")
if base.endswith("E!H"):
return base[:-3] + "E!YnaH"
if re.search(r".!Q$", base):
return (re.sub(".!", "E!", base) + "naH").replace("nn", "_n")
if "i!" in base:
return re.sub(
"n3?n",
"_n",
base.replace("i!", "e!")
+ ("3" if base[-3] in "aiueoWY" else "")
+ "naH",
)
if "u!" in base:
return re.sub(
"n3?n",
"_n",
base.replace("u!", "o!")
+ ("3" if base[-3] in "aiueoWY" else "")
+ "naH",
)
return re.sub(
"n3?n", "_n", base + ("3" if base[-3] in "aiueoWY" else "") + "naH"
)
assert False, (base, binyan, pronoun)
def inflect_past(
base: str, binyan: Binyan, pronoun: Pronoun, paradigm: Paradigm
) -> str:
if pronoun == Pronoun.HU:
return base
# exception
if base == "natA!n":
if pronoun in (Pronoun.ANI, Pronoun.ATA, Pronoun.AT):
return base[:-1] + "_" + PRONOUN_PAST_SUFFIX[pronoun]
if pronoun in (Pronoun.ATEM, Pronoun.ATEN):
return "n3tA_" + PRONOUN_PAST_SUFFIX[pronoun]
base = base.replace("Á", "")
if base.endswith("a!H"):
if binyan in (
Binyan.NIFAL,
Binyan.PUAL,
Binyan.HITPAEL,
Binyan.HUFAL,
Binyan.HIFIL,
):
base = base.replace("a!H", "e!Y")
else:
base = base.replace("a!H", "i!")
if pronoun in (Pronoun.ANI, Pronoun.ATA, Pronoun.AT, Pronoun.ANACNU):
if (
binyan == Binyan.PAAL
and paradigm.is_kfulim()
and re.fullmatch(r"..!.", base)
):
if base[-1] in GRONIYOT_RESH:
return (
re.sub(".!", "a" if base[-1] not in "j" else "A", base)
+ "W"
+ ("!" if "!" not in PRONOUN_PAST_SUFFIX[pronoun] else "")
+ PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
)
return (
base.replace("!", "")[:-1]
+ "_"
+ add_dagesh_lene(base[-1])
+ "W"
+ ("!" if "!" not in PRONOUN_PAST_SUFFIX[pronoun] else "")
+ PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
)
if (
binyan == Binyan.NIFAL
and paradigm.is_kfulim()
and re.fullmatch(r"na.A!.", base)
):
return (
"n3"
+ base[2]
+ "A"
+ add_dagesh_forte(base[-1])
+ "W"
+ ("!" if "!" not in PRONOUN_PAST_SUFFIX[pronoun] else "")
+ PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
)
if re.search(r"[aiueoAIUEOW]!?Y?$", base):
res = base + PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
elif base.endswith("Q") and len(base) != 3 and binyan != Binyan.PAAL:
res = re.sub(r"[aieA]!", "e!", base) + PRONOUN_PAST_SUFFIX[pronoun].replace(
"T", "t"
)
elif base.endswith("Q") and binyan == Binyan.PAAL and base[-3] in "ae":
res = base + PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
else:
res = re.sub(r"[aieA]!", "A!", base) + PRONOUN_PAST_SUFFIX[pronoun]
res = res.replace("nn", "_n").replace("tT", "_T")
return res
if pronoun in (Pronoun.ATEM, Pronoun.ATEN):
if (
binyan == Binyan.PAAL
and paradigm.is_kfulim()
and re.fullmatch(r"..!.", base)
):
if base[-1] in GRONIYOT_RESH:
return (
re.sub(".!", "a" if base[-1] not in "j" else "A", base)
+ "W"
+ ("!" if "!" not in PRONOUN_PAST_SUFFIX[pronoun] else "")
+ PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
)
return (
base.replace("!", "")[:-1]
+ add_dagesh_forte(base[-1])
+ "W"
+ PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
)
if re.search(r"[aiueoAIUEOW]!?Y?$", base):
res = base.replace("!", "") + PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
elif base.endswith("Q") and len(base) != 3 and binyan != Binyan.PAAL:
res = re.sub(r"[aieA]!", "e", base) + PRONOUN_PAST_SUFFIX[pronoun].replace(
"T", "t"
)
elif base.endswith("Q") and binyan == Binyan.PAAL and base[-3] in "ae":
res = base.replace("!", "") + PRONOUN_PAST_SUFFIX[pronoun].replace("T", "t")
else:
res = re.sub(r"[aieA]!", "A", base) + PRONOUN_PAST_SUFFIX[pronoun]
if binyan == Binyan.PAAL and len(res) > 3 + len(PRONOUN_PAST_SUFFIX[pronoun]):
res = add_schwa(res[0]) + res[2:]
elif binyan == Binyan.HIFIL and re.fullmatch(r"he[QRjh][ie]!.", base):
res = "hA" + base[2] + "A" + base[-1] + PRONOUN_PAST_SUFFIX[pronoun]
elif binyan == Binyan.HIFIL and re.match(r"he.A..{4}", res):
res = res[0] + "á" + res[2:]
res = res.replace("tT", "_T")
return res
assert pronoun in (Pronoun.HI, Pronoun.HEM, Pronoun.HEN)
if re.search(r"[aiueoAIUEOW]!?Y?$", base):
if pronoun == Pronoun.HI:
res = re.sub(r"[aiueoAIUEOW]!?Y?$", "", base)
if re.search(r"é.$", res):
return re.sub(r"é(?=.$)", "E", res) + "ta!H"
if re.search(r"á.$", res):
return re.sub(r"á(?=.$)", "A", res) + "ta!H"
return res[:-1] + add_schwa(res[-1]) + "ta!H"
return re.sub(r"[aiueoAIUEOW]!?Y?$", "", base) + PRONOUN_PAST_SUFFIX[pronoun]
if (
paradigm.is_kfulim()
and binyan in (Binyan.PAAL, Binyan.NIFAL, Binyan.HIFIL)
and not (
add_dagesh_lene(base[-1]) == add_dagesh_lene(base[-4])
and re.search(r"..!.$", base)
)
): # not re.search(r"(.).!\1$", re.sub("(.).![fvxdgt]$", lambda m: add_dagesh_lene(m[0]), base)):
return (
base[:-1]
+ add_dagesh_forte(base[-1])
+ PRONOUN_PAST_SUFFIX[pronoun].replace("!", "")
)
if binyan == Binyan.HIFIL:
return base + PRONOUN_PAST_SUFFIX[pronoun].replace("!", "")
else:
if binyan == Binyan.PAAL and len(base) == 4:
return base + PRONOUN_PAST_SUFFIX[pronoun].replace("!", "")
if re.search(r"é..!", base):
return re.sub(r"é(.).!", r"E\1", base) + PRONOUN_PAST_SUFFIX[pronoun]
if re.search("á..!", base):
return re.sub(r"á(.).!", r"A\1", base) + PRONOUN_PAST_SUFFIX[pronoun]
if binyan == Binyan.NIFAL and re.search("W!.$", base):
return base + PRONOUN_PAST_SUFFIX[pronoun].replace("!", "")
return (
re.sub(r".[Aae]!", add_schwa(base[-4]), base) + PRONOUN_PAST_SUFFIX[pronoun]
)
def inflect_present(
base: str, binyan: Binyan, param: Present, paradigm: Paradigm
) -> str:
if param == Present.MALE_SINGULAR:
return base
if (
binyan in (Binyan.HITPAEL, Binyan.PIEL)
or binyan == Binyan.PAAL
and re.fullmatch(".W.e!.Á?|.[aW].E!H", base)
):
if base.endswith("E!H"):
return base.replace("E!H", PRESENT_SUFFIX[param])
if param == Present.FEMALE_SINGULAR:
if base.endswith("e!Q") and (
paradigm == Paradigm.PAAL_5 or binyan in (Binyan.HITPAEL, Binyan.PIEL)
):
return base + "t"
if re.search("e!.Á?$", base):
return re.sub(
r"e!(.)Á?$",
lambda m: ("A!" + m[1] + "At")
if m[1] in GRONIYOT
else ("E!" + m[1] + "Et"),
base,
)
if param in (Present.MALE_PLURAL, Present.FEMALE_PLURAL):
if re.search("e!.Á?$", base):
if re.search(r"á.e!.Á?$", base):
return (
re.sub(r"á(.)e!", r"A\1", base.replace("Á", ""))
+ PRESENT_SUFFIX[param]
)
return (
re.sub(r"(.)e!(.)Á?$", lambda m: add_schwa(m[1]) + m[2], base)
+ PRESENT_SUFFIX[param]
)
if binyan in (Binyan.PUAL, Binyan.HUFAL, Binyan.NIFAL):
if base.endswith("E!H"):
if (
binyan in (Binyan.NIFAL, Binyan.HUFAL)
and param == Present.FEMALE_SINGULAR
):
return base.replace("E!H", "e!Yt")
return base.replace("E!H", PRESENT_SUFFIX[param])
if param == Present.FEMALE_SINGULAR:
if base.endswith("a!Q"):
return base[:-3] + "e!Qt"
if re.search("a!.$", base):
return re.sub(
r"a!(.)",
lambda m: ("A!" + m[1] + "At")
if m[1] in GRONIYOT
else ("E!" + m[1] + "Et"),
base,
)
if param in (Present.MALE_PLURAL, Present.FEMALE_PLURAL):
if paradigm == Paradigm.KFULIM_2:
return (
base[:-3] + "A" + add_dagesh_forte(base[-1]) + PRESENT_SUFFIX[param]
)
if re.search("a!.$", base):
return base.replace("a!", "a") + PRESENT_SUFFIX[param]
if binyan == Binyan.HIFIL:
if re.fullmatch("me.e![^Rr]Á?", base):
return (
"m3" + base[2] + "I" + add_dagesh_forte(base[5]) + PRESENT_SUFFIX[param]
)
if re.match("me[^Y]", base):
base = "m3" + base[2:]
if re.search("[" + "".join(CONSONANTS) + "]Á?$", base):
return base.replace("!", "").replace("Á", "") + PRESENT_SUFFIX[param]
if base.endswith("E!H"):
return base[:-3] + PRESENT_SUFFIX[param]
if binyan == Binyan.PAAL:
if re.fullmatch(".a!.", base):
return base.replace("!", "") + PRESENT_SUFFIX[param]
if re.fullmatch(".A!.", base):
if base.endswith(GRONIYOT_RESH):
base = base.replace("A!", "a")
else:
base = base[:-2] + add_dagesh_forte(base[-1])
return base + PRESENT_SUFFIX[param]
if re.fullmatch(".a.e!.Á?", base):
return (
add_schwa(base[0])
+ base[2:].replace("!", "").replace("Á", "")
+ PRESENT_SUFFIX[param]
)
if base in ("nejA!n", "nejA!m"):
return "" # FIXME
# assert base == "me!t", (base, binyan, param, paradigm)
return base.replace("!", "") + PRESENT_SUFFIX[param]
def future2infinitive(base: str, binyan: Binyan, paradigm: Paradigm) -> Optional[str]:
assert binyan not in (Binyan.PUAL, Binyan.HUFAL)
# exceptions:
if base == "yoQmA!r":
return "lWmA!r"
if base == "yI_Te!n":
return "late!t"
if base == "yirA!c":
return "larE!cEt"
if paradigm == Paradigm.PE_ALEF:
return "lEQé" + re.sub(
"[^iu]!(.)Á?$", lambda m: "o!" + m[1] + patah_gnuva(m[1]), base[3:]
)
# exceptions:
if base == "yehanE!H":
assert binyan == Binyan.NIFAL and paradigm == Paradigm.NONE
return "lehanW!t"
if base == "yIckA!v":
return "lIckA!v"
if binyan in (Binyan.HIFIL, Binyan.NIFAL, Binyan.HITPAEL):
return "l3h" + base[1:].replace("E!H", "W!t")
if binyan == Binyan.PIEL:
return "l" + base[1:].replace("E!H", "W!t")
if binyan == Binyan.PAAL:
if re.match(r"yE[hjR]é", base) and paradigm in (
Paradigm.PAAL_4,
Paradigm.PAAL_1,
):
base = "yA" + base[2] + "á" + base[4:]
elif base.startswith("yI_") and paradigm in (
Paradigm.PAAL_1,
Paradigm.PAAL_5,
Paradigm.PAAL_2,
):
# exceptions
if base == "yI_po!l":
return "lI_po!l"
if base == "yI_qA!j":
return "laqA!jAt"
if base == "yI_Sa!Q":
return "laSe!Qt"
base = "lIn" + base[3:]
if base.endswith("E!H"):
return "l" + base[1:].replace("E!H", "W!t")
if paradigm == Paradigm.PAAL_2 and re.fullmatch(
rf"e.(e!.|A![{GRONIYOT}])", base[1:]
):
x = base[-1]
v = "A" if x in GRONIYOT else "E"
return "la" + base[2] + v + "!" + x + v + "t"
if paradigm == Paradigm.KFULIM and re.fullmatch("ye.A!.", base):
return "la" + base[2] + "o!" + base[-1]
if paradigm == Paradigm.PAAL_3 and base.startswith("yEj"):
base = "yA" + base[2:]
return "l" + re.sub(
"[^iu]!(.)Á?$", lambda m: "o!" + m[1] + patah_gnuva(m[1]), base[1:]
)
def past2future(base: str, binyan: Binyan, paradigm: Paradigm) -> Optional[str]:
# exceptions
if base == "me!t":
return "yamu!t"
if base == "natA!n":
return "yI_Te!n"
if base == "haya!H":
return "yIhyE!H"
if base == "yaZa!Q":
return "yeZe!Q"
if binyan == Binyan.HITPAEL:
return "y" + base[1:].replace("a!H", "E!H")
if binyan in (Binyan.PIEL, Binyan.PUAL):
base = (
"y3"
+ remove_dagesh_lene(base[0])
+ re.sub("^[Ie]", "a" if re.search("e[rQ].!", base) else "A", base[1:])
)
return base.replace("a!H", "E!H")
if binyan == Binyan.HIFIL:
base = base.replace("a!H", "E!H")
if re.match("^hE.é", base):
return "y" + "A" + base[2] + "á" + base[4:]
vowel = "A"
if base[1:3] == "eY":
vowel = "e"
elif base[1] in "aiueo":
vowel = "a"
elif base[1] == "W":
vowel = "W"
return "y" + vowel + base[2:]
if binyan == Binyan.HUFAL:
return "y" + base[1:].replace("a!H", "E!H")
if binyan == Binyan.NIFAL:
if m := re.fullmatch(r"n(?:I_?|W)(.)A!(.)", base):
return (
"yI_"
+ ("w" if base[1] == "W" else "n")
+ "a"
+ remove_dagesh_lene(m[1])
+ ("A" if m[2] in "Rjh" else "e")
+ "!"
+ m[2]
)
if m := re.fullmatch(r"n(?:I_?|W)(.)a!H", base):
return (
"yI_"
+ ("w" if base[1] == "W" else "n")
+ "a"
+ remove_dagesh_lene(m[1])
+ "E!H"
)
if m := re.fullmatch(r"n(?:I_?|W)(.)a!Q", base):
return (
"yI_"
+ ("w" if base[1] == "W" else "n")
+ "a"
+ remove_dagesh_lene(m[1])
+ "e!Q"
)
if m := re.fullmatch(r"na(.)A!.", base):
return "yI_" + add_dagesh_lene(m[1]) + base[3:]
if m := re.fullmatch(r"ne([QRhjr])A!(.)", base):
return "ye" + m[1] + "A!" + m[2]
if m := re.fullmatch(r"na(.)W!.", base):
return "yI_" + add_dagesh_lene(m[1]) + base[3:]
base = base[2:].replace("!", "")
# FIXME
if not base.endswith("aH") and len(base) < 4:
return None
if re.match("^.é", base):
base = re.sub(r"(?<=^.)é", "", base)
return (
"y"
+ ("e" if base[0] in GRONIYOT_RESH else "I_")
+ add_dagesh_lene(base[0])
+ "a"
+ remove_dagesh_lene(base[1])
+ (
"E!H"
if base.endswith("aH")
else ("A" if base[3] in "hjR" else "e") + "!" + base[3]
)
)
if binyan == Binyan.PAAL:
# exceptions:
if base == "halA!x":
return "yele!x"
if base == "laqA!j":
return "yI_qA!j"
if m := re.fullmatch(rf"(.)a(.)[Aae]!([{CONSONANTS}])", base):
vowel = "o"
if base.endswith("a!Q"):
vowel = "a"
if Paradigm.PE_ALEF == paradigm:
return "yoQ" + m[2] + "A!" + m[3]
if m[1] in GRONIYOT:
if paradigm == Paradigm.PAAL_4:
return "yE" + m[1] + "é" + m[2] + "A!" + m[3]
elif paradigm == Paradigm.PAAL_3:
return (
"yE"
+ m[1]
+ ("é" + m[2] if m[1] in "QRh" else add_dagesh_lene(m[2]))
+ "A!"
+ m[3]
)
if paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_5, Paradigm.PAAL_4):
if m[1] in GRONIYOT:
return (
"yE"
+ m[1]
+ ("é" if m[1] in "QRh" else "")
+ (m[2] if m[1] in "QRh" else add_dagesh_lene(m[2]))
+ vowel
+ "!"
+ m[3]
)
if m[1] == "n":
return (
"yI"
+ (
add_dagesh_forte(m[2])
if m[2] not in GRONIYOT_RESH
else "n" + m[2]
)
+ (
"a"
if m[3] == "Q"
else ("A" if m[2] in "hjR" or m[3] in "QRjh" else "o")
)
# + ("A" if m[2] in "hjR" or m[3] in "QRjh" else "o")
+ "!"
+ m[3]
)
if m[1] == "y":
return "yi" + m[2] + "A!" + m[3]
if m[2] == m[3]:
return "ya" + remove_dagesh_lene(m[1]) + vowel + "!" + m[3]
return (
"yI"
+ remove_dagesh_lene(m[1])
+ add_dagesh_lene(m[2])
+ ("A" if m[3] != "Q" else "a")
+ "!"
+ m[3]
)
if Paradigm.PAAL_2 == paradigm:
if m[1] in GRONIYOT:
return (
"yA"
+ m[1]
+ ("á" + m[2] if m[1] == "R" else add_dagesh_lene(m[2]))
+ "o!"
+ m[3]
)
if m[1] == "n":
return (
"yI"
+ (
add_dagesh_forte(m[2])
if m[2] not in GRONIYOT_RESH
else "n" + m[2]
)
+ "A!"
+ m[3]
)
if m[1] == "y":
return "ye" + m[2] + ("A!" if m[3] in "Rjh" else "e!") + m[3]
# if m[1] in "Qh":
# return "yE" + m[1] + add_dagesh_lene(m[2]) + "o!" + m[3]
if m[1] in GRONIYOT:
return "yA" + m[1] + "á" + m[2] + "o!" + m[3]
if m[2] in GRONIYOT or m[3] in GRONIYOT:
return (
"yI"
+ remove_dagesh_lene(m[1])
+ add_dagesh_lene(m[2])
+ "A!"
+ m[3]
)
return "yI" + remove_dagesh_lene(m[1]) + add_dagesh_lene(m[2]) + "o!" + m[3]
if re.fullmatch(r".a!.", base):
if paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_5):
return (
"ya"
+ remove_dagesh_lene(base[0])
+ "i!"
+ base[-1]
+ patah_gnuva(base[-1])
)
return (
"ya"
+ remove_dagesh_lene(base[0])
+ "u!"
+ base[-1]
+ patah_gnuva(base[-1])
)
if re.fullmatch(r".A!.", base):
if paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_5):
return (
"ya"
+ remove_dagesh_lene(base[0])
+ "o!"
+ base[-1]
+ patah_gnuva(base[-1])
)
if Paradigm.PAAL_2 == paradigm:
return (
"yI"
+ add_dagesh_forte(base[0])
+ "o!"
+ base[-1]
+ patah_gnuva(base[-1])
)
return "ye" + remove_dagesh_lene(base[0]) + "A!" + base[-1]
if re.fullmatch(r".a.a!H", base):
if paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_5):
if base[0] in GRONIYOT:
return "yE" + base[0] + "é" + base[2] + "E!H"
# return "yE" + base[0] + ("é" if base[0] in "QRh" else "") + (base[2] if base[0] in "QRh" else add_dagesh_lene(base[2])) + "E!H"
if base[0] == "n":
return (
"yI"
+ (
add_dagesh_forte(base[2])
if base[2] not in GRONIYOT_RESH
else "n" + base[2]
)
+ "E!H"
)
if base[0] == "y":
return "yi" + base[2] + "E!H"
assert False
if Paradigm.PAAL_2 == paradigm:
if base[0] in GRONIYOT:
return "yA" + base[0] + add_dagesh_lene(base[2]) + "E!H"
if base[0] in GRONIYOT:
return "yA" + base[0] + "á" + base[2] + "E!H"
return "yI" + remove_dagesh_lene(base[0]) + add_dagesh_lene(base[2]) + "E!H"
def past2present(base: str, binyan: Binyan, paradigm: Paradigm) -> Optional[str]:
if base == "haya!H":
return "hWwE!H"
if binyan == Binyan.HITPAEL:
return "m" + base[1:].replace("a!H", "E!H")
if binyan in (Binyan.PIEL, Binyan.PUAL):
base = re.sub("A!", "a!", base)
base = (
"m3"
+ remove_dagesh_lene(base[0])
+ re.sub("^[Ie]", "a" if re.search("e[rQ].!", base) else "A", base[1:])
)
return base.replace("a!H", "E!H")
if binyan == Binyan.HIFIL:
base = base.replace("a!H", "E!H")
if re.match("^hE.é", base):
return "m" + "A" + base[2] + "á" + base[4:]
if re.fullmatch("he..!.Á?", base):
return "m" + base[1:]
if base[1:3] == "eY" or base[1] in "aW":
return "m" + base[1:]
if base[1] in "iueo":
return "ma" + base[2:]
return "mA" + base[2:]
if binyan == Binyan.HUFAL:
base = re.sub("A!", "a!", base)
return "m" + base[1:].replace("a!H", "E!H")
if binyan == Binyan.NIFAL:
if re.fullmatch(r"n(?:I_?|W)(.)A!(.)", base) or re.fullmatch(r"na(.)A!.", base):
return base.replace("A!", "a!")
if re.match(r"n[IE].é?.A!.", base):
return base.replace("A!", "a!")
if base.endswith("a!H"):
return base[:-3] + "E!H"
return base.replace("A!", "a!")
if binyan == Binyan.PAAL:
if re.fullmatch(r".a.A!.", base):
if paradigm == Paradigm.PAAL_4:
return base[0] + "a" + base[2] + "e!" + base[-1] + patah_gnuva(base[-1])
return base[0] + "W" + base[2] + "e!" + base[-1] + patah_gnuva(base[-1])
if re.fullmatch(r".a.a!H", base):
if paradigm == Paradigm.PAAL_4:
return base[0] + "a" + base[2] + "E!H"
return base[0] + "W" + base[2] + "E!H"
if re.fullmatch(r".[aA]!.", base):
return base
if re.fullmatch(r".a.a!Q", base):
if paradigm in (Paradigm.PAAL_1, Paradigm.PAAL_5):
return base[0] + "W" + base[2] + "e!Q"
return base[0] + "W" + base[2] + "a!Q"
if re.fullmatch(r".a.e!Q", base):
return base
return base
def past2binyan(verb: str, paradigm: Paradigm) -> Optional[Binyan]:
if paradigm != Paradigm.NO_PREFIX and re.fullmatch("n(a.|ej)A!.", verb) and paradigm.is_kfulim():
return Binyan.NIFAL
if paradigm.is_paal():
return Binyan.PAAL
if re.fullmatch(".a.(A!.|a!H|a!Q)", verb):
return Binyan.PAAL
if re.fullmatch(".[aA]!.", verb):
return Binyan.PAAL
if paradigm != Paradigm.NO_PREFIX and re.fullmatch("hI..(i!.Á?|a!H)", verb): # must be before PIEL and HITPAEL
return Binyan.HIFIL
if paradigm != Paradigm.NO_PREFIX and re.fullmatch("h(eY?.|E[rjhQR]é?.|W.)(i!.Á?|a!H)|he.e!.Á?", verb):
return Binyan.HIFIL
if paradigm != Paradigm.NO_PREFIX and re.match("hI(t.|[csS]T|Z7|zD)", verb) or re.fullmatch(
"hI_[7TD](A(.á?.|[rhjRQ])|a[rQ])(e!.Á?|a!H|a!Q)", verb
):
return Binyan.HITPAEL
if paradigm != Paradigm.NO_PREFIX and (
re.fullmatch("nI..(A!.|a!Q)", verb)
or re.fullmatch("n(I.|E[jRhQ]é?).a!H", verb)
or re.fullmatch("nE[jRhQ]é?.(A!.|a!Q)", verb)
or re.fullmatch("nW.(A!.|a!Q|a!H)", verb)
or re.fullmatch("nI[rjhRQ]A!.", verb)
or re.fullmatch("nA[Rjh]á.a!H", verb)
):
return Binyan.NIFAL
if (
re.fullmatch(".(I.á?.|I..3.|[Ie][jRQhr])(e!.Á?|a!H)", verb)
or re.fullmatch(r".W(.)e!.Á?", verb)
or re.fullmatch(rf".I[{CONSONANTS}]{{3,}}e!.Á?", verb)
):
return Binyan.PIEL
if paradigm != Paradigm.NO_PREFIX and (
re.fullmatch("h(U.|u).(A!.|a!H|a!Q)", verb) and paradigm != Paradigm.NO_PREFIX
): # must be before PUAL
return Binyan.HUFAL
if (
re.fullmatch(".(U.á?.|[Uo][jRQhr])(A!.|a!H|a!Q)", verb)
or re.fullmatch(r".W(.)A!\1", verb)
or re.fullmatch(rf".U[{CONSONANTS}]{{3,}}A!.", verb)
):
return Binyan.PUAL
return None
def __inflect(
base: str, binyan: Binyan, paradigm: Paradigm
) -> Optional[dict[str, str]]:
res = {}
future_base = past2future(base, binyan, paradigm)
if future_base is None:
return None
present_base = past2present(base, binyan, paradigm)
if present_base is None:
return None
for pronoun in Pronoun:
res["past_" + pronoun.name] = fixup(
inflect_past(base, binyan, pronoun, paradigm)
)
for pronoun in Pronoun:
res["future_" + pronoun.name] = fixup(
inflect_future(future_base, binyan, pronoun, paradigm)
)
for param in Present:
res["present_" + param.name] = fixup(
inflect_present(present_base, binyan, param, paradigm)
)
if binyan not in (Binyan.PUAL, Binyan.HUFAL):
res["shem_poal"] = fixup(future2infinitive(future_base, binyan, paradigm) or "")
return res
def inflect(verb: str, paradigm: Paradigm) -> Optional[dict[str, str]]:
binyan = past2binyan(verb, paradigm)
if binyan is None:
return None
return __inflect(verb, binyan, paradigm)
| StarcoderdataPython |
6438411 | import nltk.data
import pandas as pd
import argparse
import os
def section_start(lines, section=' IMPRESSION'):
"""Finds line index that is the start of the section."""
for idx, line in enumerate(lines):
if line.startswith(section):
return idx
return -1
def generate_whole_report_impression_csv(df, split, dir):
"""Generates a csv containing report impressions."""
df_imp = df.copy()
for index, row in df_imp.iterrows():
report = row['report'].splitlines()
impression_idx = section_start(report)
impression_and_findings_idx = section_start(report, section=' FINDINGS AND IMPRESSION:')
seperator = ''
if impression_idx != -1:
impression = seperator.join(report[impression_idx:]).replace('IMPRESSION:', '').replace('\n', '').strip()
elif impression_and_findings_idx != -1:
impression = seperator.join(report[impression_and_findings_idx:]).replace('FINDINGS AND IMPRESSION:', '').replace('\n', '').strip()
else:
impression = ''
df_imp.at[index,'report']= impression
out_name = f'mimic_{split}_impressions.csv'
out_path = os.path.join(dir, out_name)
df_imp.to_csv(out_path, index=False)
def generate_sentence_level_impression_csv(df, split, dir, tokenizer):
"""Generates a csv containing all impression sentences."""
df_imp = []
for index, row in df.iterrows():
report = row['report'].splitlines()
impression_idx = section_start(report)
impression_and_findings_idx = section_start(report, section=' FINDINGS AND IMPRESSION:')
seperator = ''
if impression_idx != -1:
impression = seperator.join(report[impression_idx:]).replace('IMPRESSION:', '').replace('\n', '').strip()
elif impression_and_findings_idx != -1:
impression = seperator.join(report[impression_and_findings_idx:]).replace('FINDINGS AND IMPRESSION:', '').replace('\n', '').strip()
else:
impression = ''
for sent_index, sent in enumerate(split_sentences(impression, tokenizer)):
df_imp.append([row['dicom_id'], row['study_id'], row['subject_id'], sent_index, sent])
df_imp = pd.DataFrame(df_imp, columns=['dicom_id', 'study_id', 'subject_id', 'sentence_id', 'report'])
out_name = f'mimic_{split}_sentence_impressions.csv'
out_path = os.path.join(dir, out_name)
df_imp.to_csv(out_path, index=False)
def split_sentences(report, tokenizer):
"""Splits sentences by periods and removes numbering and nans."""
sentences = []
if not (isinstance(report, float) and math.isnan(report)):
for sentence in tokenizer.tokenize(report):
try:
float(sentence) # Remove numbering
except ValueError:
sentences.append(sentence)
return sentences
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Extract the impression section and generate csvs for report level and sentence level.')
parser.add_argument('--dir', type=str, required=True, help='directory where train and test report reports are stored and where impression sections will be stored')
args = parser.parse_args()
train_path = os.path.join(args.dir, 'mimic_train_full.csv')
test_path = os.path.join(args.dir, 'mimic_test_full.csv')
train_df = pd.read_csv(train_path)
test_df = pd.read_csv(test_path)
# whole reports
generate_whole_report_impression_csv(train_df, 'train', args.dir)
generate_whole_report_impression_csv(test_df, 'test', args.dir)
try:
nltk.data.find('tokenizers/punkt')
except LookupError:
nltk.download('punkt')
# sentences
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
generate_sentence_level_impression_csv(train_df, 'train', args.dir, tokenizer)
| StarcoderdataPython |
11213701 | <filename>text_extraction.py
"""Program is used to extract the text from image"""
import string
import cv2
import pytesseract
def image_to_text(image_path, path_to_tesseract):
"""converted image to text"""
img = cv2.imread(image_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
pytesseract.pytesseract.tesseract_cmd = path_to_tesseract
texts = pytesseract.image_to_string(img)
print(texts)
return img
def draw_boxes_on_character(img,path_to_tesseract):
"""find only characters of the image"""
img_width = img.shape[1]
img_height = img.shape[0]
pytesseract.pytesseract.tesseract_cmd = path_to_tesseract
conf = r'-c tessedit_char_whitelist='+string.ascii_letters
boxes = pytesseract.image_to_boxes(img, config =conf)
for box in boxes.splitlines():
box = box.split(" ")
character = box[0]
x = int(box[1])
y = int(box[2])
x2 = int(box[3])
y2 = int(box[4])
cv2.rectangle(img, (x, img_height - y), (x2, img_height - y2), (0, 255, 0), 1)
cv2.putText(img, character, (x, img_height -y2), cv2.FONT_HERSHEY_COMPLEX, 0.75, (0, 0, 255) , 1)
return img
def draw_boxes_on_text(img,path_to_tesseract):
"""find only text of the image"""
raw_data = pytesseract.image_to_data(img)
pytesseract.pytesseract.tesseract_cmd = path_to_tesseract
for count, data in enumerate(raw_data.splitlines()):
if count > 0:
data = data.split()
if len(data) == 12:
x, y, w, h, content = int(data[6]), int(data[7]), int(data[8]), int(data[9]), data[11]
cv2.rectangle(img, (x, y), (w+x, h+y), (0, 255, 0), 1)
cv2.putText(img, content, (x, y), cv2.FONT_HERSHEY_COMPLEX, 0.5, (0, 0, 255) , 1)
return img
def displaying_image(img):
"""image display"""
cv2.imshow("Output", img)
cv2.waitKey(0)
def selection(number,img, path_to_tesseract):
"""selecting the options"""
match number:
case 1:
first = draw_boxes_on_character(img, path_to_tesseract)
displaying_image(first)
case 2:
second = draw_boxes_on_text(img, path_to_tesseract)
displaying_image(second)
case default:
print("selected number is out of range")
if __name__ == "__main__":
"""function to run all functions"""
path_to_tesseract = r"C:\Program Files\Tesseract-OCR\tesseract.exe"
image_path = r"C:\Users\<NAME>\Documents\project_python\second.png"
image_first_face = image_to_text(image_path, path_to_tesseract)
displaying_image(image_first_face)
print("Now you have two options")
printing = """1. drawing boxes on character in image (1),
2. drawing boxes on text in image (2),
****************************************"""
print(printing)
number = int(input("Enter a number: "))
selection(number ,image_first_face, path_to_tesseract)
| StarcoderdataPython |
3577456 | # ============================================================================
# Copyright (c) 2018 Diamond Light Source Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Author: <NAME>
# E-mail: <EMAIL>
# Description: Python implementation of the author's methods of
# distortion correction, <NAME> et al "Radial lens distortion
# correction with sub-pixel accuracy for X-ray micro-tomography"
# Optics Express 23, 32859-32868 (2015), https://doi.org/10.1364/OE.23.032859
# Publication date: 10th July 2018
# ============================================================================
# Contributors:
# ============================================================================
"""
Module of processing methods:
- Fit lines of dots to parabolas, find the center of distortion.
- Calculate undistorted intercepts of gridlines.
- Calculate distortion coefficients of the backward model, the forward model,
and the backward-from-forward model.
- Correct perspective distortion affecting curve lines.
- Generate non-perspective points or lines from perspective points or lines.
- Calculate perspective coefficients.
"""
import numpy as np
from scipy import optimize
def _para_fit_hor(list_lines, xcenter, ycenter):
"""
Fit horizontal lines of dots to parabolas.
Parameters
----------
list_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
Returns
-------
list_coef : list of 1D arrays
List of the coefficients of each parabola (y=ax**2+bx+c).
list_slines : list of 2D arrays
List of the shifted (y,x)-coordinates of dot-centroids on each line.
"""
num_line = len(list_lines)
list_coef = np.zeros((num_line, 3), dtype=np.float32)
list_slines = []
for i, iline in enumerate(list_lines):
line = np.asarray(iline)
list_coef[i] = np.asarray(np.polyfit(line[:, 1] - xcenter,
line[:, 0] - ycenter, 2))
list_temp = np.asarray(
[(dot[0] - ycenter, dot[1] - xcenter) for dot in line])
list_slines.append(list_temp)
return list_coef, list_slines
def _para_fit_ver(list_lines, xcenter, ycenter):
"""
Fit vertical lines of dots to parabolas.
Parameters
----------
list_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
Returns
-------
list_coef : list of 1D arrays
List of the coefficients of each parabola (x=ay**2+by+c).
list_slines : list of 2D arrays
List of the shifted (y,x)-coordinates of dot-centroids on each line.
"""
num_line = len(list_lines)
list_coef = np.zeros((num_line, 3), dtype=np.float32)
list_slines = []
for i, iline in enumerate(list_lines):
line = np.asarray(iline)
list_coef[i] = np.asarray(
np.polyfit(line[:, 0] - ycenter, line[:, 1] - xcenter, 2))
list_temp = np.asarray(
[(dot[0] - ycenter, dot[1] - xcenter) for dot in line])
list_slines.append(list_temp)
return list_coef, list_slines
def find_cod_coarse(list_hor_lines, list_ver_lines):
"""
Coarse estimation of the center of distortion.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
Returns
-------
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
"""
(list_coef_hor, list_hor_lines) = _para_fit_hor(list_hor_lines, 0.0, 0.0)
(list_coef_ver, list_ver_lines) = _para_fit_ver(list_ver_lines, 0.0, 0.0)
pos_hor = np.argmax(np.abs(np.diff(np.sign(list_coef_hor[:, 0])))) + 1
pos_ver = np.argmax(np.abs(np.diff(np.sign(list_coef_ver[:, 0])))) + 1
ycenter0 = (list_coef_hor[pos_hor - 1, 2] + list_coef_hor[
pos_hor, 2]) * 0.5
xcenter0 = (list_coef_ver[pos_ver - 1, 2] + list_coef_ver[
pos_ver, 2]) * 0.5
slope_hor = (list_coef_hor[pos_hor - 1, 1] + list_coef_hor[
pos_hor, 1]) * 0.5
slope_ver = (list_coef_ver[pos_ver - 1, 1] + list_coef_ver[
pos_ver, 1]) * 0.5
ycenter = (ycenter0 + xcenter0 * slope_hor) / (1.0 - slope_hor * slope_ver)
xcenter = (xcenter0 + ycenter0 * slope_ver) / (1.0 - slope_hor * slope_ver)
return xcenter, ycenter
def _func_dist(x, a, b, c):
"""
Function for finding the minimum distance.
"""
return x ** 2 + (a * x ** 2 + b * x + c) ** 2
def _calc_error(list_coef_hor, list_coef_ver):
"""
Calculate a metric of measuring how close fitted lines to the coordinate
origin by: locating points on each parabola having the minimum distance
to the origin, applying linear fits to these points, adding intercepts of
the fits.
Parameters
----------
list_coef_hor : list of 1D arrays
Coefficients of parabolic fits of horizontal lines.
list_coef_ver : list of 1D arrays
Coefficients of parabolic fits of vertical lines.
Returns
-------
float
"""
num_hline = len(list_coef_hor)
num_vline = len(list_coef_ver)
list_hpoint = np.zeros((num_hline, 2), dtype=np.float32)
for i, coefs in enumerate(list_coef_hor):
minimum = optimize.minimize(_func_dist, 0.0, args=tuple(coefs))
xm = minimum.x[0]
ym = coefs[0] * xm ** 2 + coefs[1] * xm + coefs[2]
list_hpoint[i, 0] = xm
list_hpoint[i, 1] = ym
list_vpoint = np.zeros((num_vline, 2), dtype=np.float32)
for i, coefs in enumerate(list_coef_ver):
minimum = optimize.minimize(_func_dist, 0.0, args=tuple(coefs))
ym = minimum.x[0]
xm = coefs[0] * ym ** 2 + coefs[1] * ym + coefs[2]
list_vpoint[i, 0] = ym
list_vpoint[i, 1] = xm
error_h = np.polyfit(list_hpoint[:, 0], list_hpoint[:, 1], 1)[-1]
error_v = np.polyfit(list_vpoint[:, 0], list_vpoint[:, 1], 1)[-1]
return np.abs(error_h) + np.abs(error_v)
def _calc_metric(list_hor_lines, list_ver_lines, xcenter, ycenter,
list_xshift, list_yshift):
"""
Calculate a metric for determining the best center of distortion.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
list_xshift : list of float
List of x-offsets from the x-center.
list_yshift : list of float
List of y-offsets from the y-center.
Returns
-------
xshift : float
Shift in x-direction from the x-center.
yshift : float
Shift in y-direction from the y-center.
"""
(list_coef_hor, list_hor_lines) = _para_fit_hor(
list_hor_lines, xcenter, ycenter)
(list_coef_ver, list_ver_lines) = _para_fit_ver(
list_ver_lines, xcenter, ycenter)
pos_hor = np.argmin(np.abs(list_coef_hor[:, 2]))
pos_ver = np.argmin(np.abs(list_coef_ver[:, 2]))
mat_metric = np.zeros(
(len(list_xshift), len(list_yshift)), dtype=np.float32)
num_hline = len(list_hor_lines)
num_vline = len(list_ver_lines)
numuse = min(5, num_hline // 2 - 1, num_vline // 2 - 1)
(posh1, posh2) = (
max(0, pos_hor - numuse), min(num_hline, pos_hor + numuse + 1))
(posv1, posv2) = (
max(0, pos_ver - numuse), min(num_vline, pos_ver + numuse + 1))
for j, pos_x in enumerate(list_xshift):
for i, pos_y in enumerate(list_yshift):
(list_coef_hor, _) = _para_fit_hor(
list_hor_lines[posh1:posh2], pos_x, pos_y)
(list_coef_ver, _) = _para_fit_ver(
list_ver_lines[posv1:posv2], pos_x, pos_y)
mat_metric[i, j] = _calc_error(list_coef_hor, list_coef_ver)
min_pos = (np.unravel_index(mat_metric.argmin(), mat_metric.shape))
xshift = list_xshift[min_pos[1]]
yshift = list_yshift[min_pos[0]]
return xshift, yshift
def find_cod_fine(list_hor_lines, list_ver_lines, xcenter, ycenter, dot_dist):
"""
Find the best center of distortion (CoD) by searching around the coarse
estimation of the CoD.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
xcenter : float
Coarse estimation of the CoD in x-direction.
ycenter : float
Coarse estimation of the CoD in y-direction.
dot_dist : float
Median distance of two nearest dots.
Returns
-------
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
"""
step0 = 2.0
list_xshift = np.arange(-dot_dist, dot_dist + step0, step0)
list_yshift = list_xshift
(xshift, yshift) = _calc_metric(
list_hor_lines, list_ver_lines, xcenter, ycenter, list_xshift,
list_yshift)
xcenter1 = xcenter + xshift
ycenter1 = ycenter + yshift
step = 0.5
list_xshift = np.arange(-step0, step0 + step, step)
list_yshift = list_xshift
(xshift, yshift) = _calc_metric(
list_hor_lines, list_ver_lines, xcenter1, ycenter1, list_xshift,
list_yshift)
xcenter2 = xcenter1 + xshift
ycenter2 = ycenter1 + yshift
return xcenter2, ycenter2
def _check_missing_lines(list_coef_hor, list_coef_ver):
"""
Check if there are missing lines
Parameters
----------
list_coef_hor : list of 1D arrays
Coefficients of parabolic fits of horizontal lines.
list_coef_ver : list of 1D arrays
Coefficients of parabolic fits of vertical lines.
Returns
-------
bool
"""
check = False
list_dist_hor = np.abs(np.diff(list_coef_hor[:, 2]))
list_dist_ver = np.abs(np.diff(list_coef_ver[:, 2]))
list_hindex = np.arange(len(list_dist_hor))
list_vindex = np.arange(len(list_dist_ver))
hfact = np.polyfit(list_hindex, list_dist_hor, 2)
vfact = np.polyfit(list_vindex, list_dist_ver, 2)
list_fit_hor = hfact[0] * list_hindex ** 2 + \
hfact[1] * list_hindex + hfact[2]
list_fit_ver = vfact[0] * list_vindex ** 2 + \
vfact[1] * list_vindex + vfact[2]
herror = np.max(np.abs((list_dist_hor - list_fit_hor) / list_fit_hor))
verror = np.max(np.abs((list_dist_ver - list_fit_ver) / list_fit_ver))
if (herror > 0.3) or (verror > 0.3):
check = True
return check
def _func_opt(d0, c0, indexc0, *list_inter):
"""
Function for finding the optimum undistorted distance for radial
distortion correction.
"""
return np.sum(
np.asarray([(np.sign(c) * np.abs(i - indexc0) * d0 + c0 - c) ** 2
for i, c in enumerate(list_inter)]))
def _optimize_intercept(dist_hv, pos_hv, list_inter):
"""
Find the optimum undistorted distance for radial-distortion correction.
"""
list_arg = [list_inter[pos_hv], pos_hv]
list_arg.extend(list_inter)
minimum = optimize.minimize(_func_opt, dist_hv, args=tuple(list_arg))
return minimum.x[0]
def _calc_undistor_intercept(list_hor_lines, list_ver_lines, xcenter, ycenter,
optimizing=False):
"""
Calculate the intercepts of undistorted lines.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
optimizing : bool, optional
Apply optimization if True.
Returns
-------
list_hor_uc : list of floats
Intercepts of undistorted horizontal lines.
list_ver_uc : list of floats
Intercepts of undistorted vertical lines.
"""
(list_coef_hor, list_hor_lines) = _para_fit_hor(
list_hor_lines, xcenter, ycenter)
(list_coef_ver, list_ver_lines) = _para_fit_ver(
list_ver_lines, xcenter, ycenter)
check = _check_missing_lines(list_coef_hor, list_coef_ver)
if check:
print("!!! ERROR !!!")
print("Parameters of the methods of grouping dots need to be adjusted")
raise ValueError("There're missing lines, algorithm will not work!!!")
pos_hor = np.argmin(np.abs(list_coef_hor[:, 2]))
pos_ver = np.argmin(np.abs(list_coef_ver[:, 2]))
num_hline = len(list_hor_lines)
num_vline = len(list_ver_lines)
num_use = min(3, num_hline // 2 - 1, num_vline // 2 - 1)
(posh1, posh2) = (
max(0, pos_hor - num_use), min(num_hline, pos_hor + num_use + 1))
(posv1, posv2) = (
max(0, pos_ver - num_use), min(num_vline, pos_ver + num_use + 1))
dist_hor = np.mean(np.abs(np.diff(list_coef_hor[posh1: posh2, 2])))
dist_ver = np.mean(np.abs(np.diff(list_coef_ver[posv1: posv2, 2])))
if optimizing is True:
dist_hor = _optimize_intercept(dist_hor, pos_hor, list_coef_hor[:, 2])
dist_ver = _optimize_intercept(dist_ver, pos_ver, list_coef_ver[:, 2])
list_hor_uc = np.zeros(num_hline, dtype=np.float32)
list_ver_uc = np.zeros(num_vline, dtype=np.float32)
for i in range(num_hline):
dist = np.abs(i - pos_hor) * dist_hor
list_hor_uc[i] = np.sign(list_coef_hor[i, 2]) * dist + list_coef_hor[
pos_hor, 2]
for i in range(num_vline):
dist = np.abs(i - pos_ver) * dist_ver
list_ver_uc[i] = np.sign(list_coef_ver[i, 2]) * dist + list_coef_ver[
pos_ver, 2]
return list_hor_uc, list_ver_uc
def calc_coef_backward(list_hor_lines, list_ver_lines, xcenter, ycenter,
num_fact):
"""
Calculate the distortion coefficients of a backward mode.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
num_fact : int
Number of the factors of polynomial.
Returns
-------
list_fact : list of float
Coefficients of the polynomial.
"""
num_fact = np.int16(np.clip(num_fact, 1, None))
(list_hor_uc, list_ver_uc) = _calc_undistor_intercept(
list_hor_lines, list_ver_lines, xcenter, ycenter)
(list_coef_hor, list_hor_lines) = _para_fit_hor(
list_hor_lines, xcenter, ycenter)
(list_coef_ver, list_ver_lines) = _para_fit_ver(
list_ver_lines, xcenter, ycenter)
Amatrix = []
Bmatrix = []
list_expo = np.arange(num_fact, dtype=np.int16)
for i, line in enumerate(list_hor_lines):
(a_coef, _, c_coef) = np.float64(list_coef_hor[i])
uc_coef = np.float64(list_hor_uc[i])
for _, point in enumerate(line):
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
Fb = (a_coef * xd * xd + c_coef) / uc_coef
Amatrix.append(np.power(rd / Fb, list_expo))
Bmatrix.append(Fb)
for i, line in enumerate(list_ver_lines):
(a_coef, _, c_coef) = np.float64(list_coef_ver[i])
uc_coef = np.float64(list_ver_uc[i])
for _, point in enumerate(line):
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
Fb = (a_coef * yd * yd + c_coef) / uc_coef
Amatrix.append(np.power(rd / Fb, list_expo))
Bmatrix.append(Fb)
Amatrix = np.asarray(Amatrix, dtype=np.float64)
Bmatrix = np.asarray(Bmatrix, dtype=np.float64)
list_fact = np.linalg.lstsq(Amatrix, Bmatrix, rcond=1e-64)[0]
return list_fact
def calc_coef_forward(list_hor_lines, list_ver_lines, xcenter, ycenter,
num_fact):
"""
Calculate the distortion coefficients of a forward mode.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
num_fact : int
Number of the factors of polynomial.
Returns
-------
list_fact : list of float
Coefficients of the polynomial.
"""
num_fact = np.int16(np.clip(num_fact, 1, None))
(list_hor_uc, list_ver_uc) = _calc_undistor_intercept(
list_hor_lines, list_ver_lines, xcenter, ycenter)
(list_coef_hor, list_hor_lines) = _para_fit_hor(
list_hor_lines, xcenter, ycenter)
(list_coef_ver, list_ver_lines) = _para_fit_ver(
list_ver_lines, xcenter, ycenter)
list_expo = np.arange(num_fact, dtype=np.int16)
Amatrix = []
Bmatrix = []
for i, line in enumerate(list_hor_lines):
(a_coef, _, c_coef) = np.float64(list_coef_hor[i])
uc_coef = np.float64(list_hor_uc[i])
if uc_coef != 0.0:
for _, point in enumerate(line):
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
Fb = uc_coef / (a_coef * xd * xd + c_coef)
if Fb != 0.0:
Amatrix.append(np.power(rd, list_expo))
Bmatrix.append(Fb)
for i, line in enumerate(list_ver_lines):
(a_coef, _, c_coef) = np.float64(list_coef_ver[i])
uc_coef = np.float64(list_ver_uc[i])
if uc_coef != 0.0:
for _, point in enumerate(line):
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
Fb = uc_coef / (a_coef * yd * yd + c_coef)
if Fb != 0.0:
Amatrix.append(np.power(rd, list_expo))
Bmatrix.append(Fb)
Amatrix = np.asarray(Amatrix, dtype=np.float64)
Bmatrix = np.asarray(Bmatrix, dtype=np.float64)
list_fact = np.linalg.lstsq(Amatrix, Bmatrix, rcond=1e-64)[0]
return list_fact
def calc_coef_backward_from_forward(list_hor_lines, list_ver_lines, xcenter,
ycenter, num_fact):
"""
Calculate the distortion coefficients of a backward mode from a forward
model.
Parameters
----------
list_hor_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each horizontal line.
list_ver_lines : list of 2D arrays
List of the (y,x)-coordinates of dot-centroids on each vertical line.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
num_fact : int
Number of the factors of polynomial.
Returns
-------
list_ffact : list of floats
Polynomial coefficients of the forward model.
list_bfact : list of floats
Polynomial coefficients of the backward model.
"""
num_fact = np.int16(np.clip(num_fact, 1, None))
list_ffact = np.float64(
calc_coef_forward(list_hor_lines, list_ver_lines, xcenter, ycenter,
num_fact))
(_, list_hor_lines) = _para_fit_hor(list_hor_lines, xcenter, ycenter)
(_, list_ver_lines) = _para_fit_ver(list_ver_lines, xcenter, ycenter)
list_expo = np.arange(num_fact, dtype=np.int16)
Amatrix = []
Bmatrix = []
for _, line in enumerate(list_hor_lines):
for _, point in enumerate(line):
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
ffactor = np.float64(np.sum(list_ffact * np.power(rd, list_expo)))
if ffactor != 0.0:
Fb = 1 / ffactor
ru = ffactor * rd
Amatrix.append(np.power(ru, list_expo))
Bmatrix.append(Fb)
for _, line in enumerate(list_ver_lines):
for _, point in enumerate(line):
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
ffactor = np.float64(np.sum(list_ffact * np.power(rd, list_expo)))
if ffactor != 0.0:
Fb = 1 / ffactor
ru = ffactor * rd
Amatrix.append(np.power(ru, list_expo))
Bmatrix.append(Fb)
Amatrix = np.asarray(Amatrix, dtype=np.float64)
Bmatrix = np.asarray(Bmatrix, dtype=np.float64)
list_bfact = np.linalg.lstsq(Amatrix, Bmatrix, rcond=1e-64)[0]
return list_ffact, list_bfact
def transform_coef_backward_and_forward(list_fact, mapping="backward",
ref_points=None):
"""
Transform polynomial coefficients of a radial distortion model between
forward mapping and backward mapping.
Parameters
----------
list_fact : list of floats
Polynomial coefficients of the radial distortion model.
mapping : {'backward', 'forward'}
Transformation direction.
ref_points : list of 1D-arrays, optional
List of the (y,x)-coordinates of points used for the transformation.
Generated if None given.
Returns
-------
list of floats
Polynomial coefficients of the reversed model.
"""
if ref_points is None:
ref_points = [[i, j] for i in np.arange(-1000, 1000, 50) for j in
np.arange(-1000, 1000, 50)]
else:
num_points = len(ref_points)
if num_points < len(list_fact):
raise ValueError("Number of reference-points must be equal or "
"larger than the number of coefficients!!!")
Amatrix = []
Bmatrix = []
list_expo = np.arange(len(list_fact), dtype=np.int16)
if mapping == "forward":
for point in ref_points:
xu = np.float64(point[1])
yu = np.float64(point[0])
ru = np.sqrt(xu * xu + yu * yu)
factor = np.float64(
np.sum(list_fact * np.power(ru, list_expo)))
if factor != 0.0:
Fb = 1 / factor
rd = factor * ru
Amatrix.append(np.power(rd, list_expo))
Bmatrix.append(Fb)
else:
for point in ref_points:
xd = np.float64(point[1])
yd = np.float64(point[0])
rd = np.sqrt(xd * xd + yd * yd)
factor = np.float64(
np.sum(list_fact * np.power(rd, list_expo)))
if factor != 0.0:
Fb = 1 / factor
ru = factor * rd
Amatrix.append(np.power(ru, list_expo))
Bmatrix.append(Fb)
Amatrix = np.asarray(Amatrix, dtype=np.float64)
Bmatrix = np.asarray(Bmatrix, dtype=np.float64)
trans_fact = np.linalg.lstsq(Amatrix, Bmatrix, rcond=1e-64)[0]
return trans_fact
def find_cod_bailey(list_hor_lines, list_ver_lines, iteration=2):
"""
Find the center of distortion (COD) using the Bailey's approach (Ref. [1]).
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
Returns
-------
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
References
----------
[1].. https://www-ist.massey.ac.nz/dbailey/sprg/pdfs/2002_IVCNZ_59.pdf
"""
(xcenter, ycenter) = find_cod_coarse(list_hor_lines, list_ver_lines)
list_coef_hor = _para_fit_hor(list_hor_lines, xcenter, ycenter)[0]
list_coef_ver = _para_fit_ver(list_ver_lines, xcenter, ycenter)[0]
a1, b1 = np.polyfit(list_coef_hor[:, 2], list_coef_hor[:, 0], 1)[0:2]
a2, b2 = np.polyfit(list_coef_ver[:, 2], list_coef_ver[:, 0], 1)[0:2]
xcenter = xcenter - b2 / a2
ycenter = ycenter - b1 / a1
for i in range(iteration):
list_coef_hor = _para_fit_hor(list_hor_lines, xcenter, ycenter)[0]
list_coef_ver = _para_fit_ver(list_ver_lines, xcenter, ycenter)[0]
a1, b1 = np.polyfit(list_coef_hor[:, 2], list_coef_hor[:, 0], 1)[0:2]
a2, b2 = np.polyfit(list_coef_ver[:, 2], list_coef_ver[:, 0], 1)[0:2]
xcenter = xcenter - b2 / a2
ycenter = ycenter - b1 / a1
return xcenter, ycenter
def _generate_non_perspective_parabola_coef(list_hor_lines, list_ver_lines):
"""
Correct the deviation of fitted parabola coefficients of each line caused
by perspective distortion. Note that the resulting coefficients are
referred to a different origin-coordinate instead of (0, 0).
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
Returns
-------
list_coef_hor : list of 1D-arrays
List of the corrected coefficients for horizontal lines.
list_coef_ver : list of 1D-arrays
List of the corrected coefficients for vertical lines.
xcenter : float
Center of distortion in x-direction.
ycenter : float
Center of distortion in y-direction.
"""
num_hline, num_vline = len(list_hor_lines), len(list_ver_lines)
xcenter, ycenter = find_cod_bailey(list_hor_lines, list_ver_lines)
list_coef_hor = _para_fit_hor(list_hor_lines, xcenter, ycenter)[0]
list_coef_ver = _para_fit_ver(list_ver_lines, xcenter, ycenter)[0]
ah, bh = np.polyfit(list_coef_hor[:, 2], list_coef_hor[:, 1], 1)[0:2]
av, bv = np.polyfit(list_coef_ver[:, 2], -list_coef_ver[:, 1], 1)[0:2]
if np.abs(ah - av) >= 0.001:
b0 = (ah * bv - av * bh) / (ah - av)
else:
b0 = (bh + bv) * 0.5
list_coef_hor[:, 1] = b0 * np.ones(num_hline)
list_coef_ver[:, 1] = -b0 * np.ones(num_vline)
pos_hor = np.argmax(np.abs(np.diff(np.sign(list_coef_hor[:, 0])))) + 1
pos_ver = np.argmax(np.abs(np.diff(np.sign(list_coef_ver[:, 0])))) + 1
num_use = min(3, num_hline // 2 - 1, num_vline // 2 - 1)
(posh1, posh2) = (
max(0, pos_hor - num_use), min(num_hline, pos_hor + num_use + 1))
(posv1, posv2) = (
max(0, pos_ver - num_use), min(num_vline, pos_ver + num_use + 1))
dist_hor = np.mean(np.abs(np.diff(list_coef_hor[posh1: posh2, 2])))
dist_ver = np.mean(np.abs(np.diff(list_coef_ver[posv1: posv2, 2])))
if dist_hor > dist_ver:
list_coef_ver[:, 2] = list_coef_ver[:, 2] * dist_hor / dist_ver
list_coef_ver[:, 0] = list_coef_ver[:, 0] * dist_hor / dist_ver
else:
list_coef_hor[:, 2] = list_coef_hor[:, 2] * dist_ver / dist_hor
list_coef_hor[:, 0] = list_coef_hor[:, 0] * dist_ver / dist_hor
return list_coef_hor, list_coef_ver, xcenter, ycenter
def _find_cross_point_between_parabolas(para_coef_hor, para_coef_ver):
"""
Find a cross point between two parabolas.
Parameters
----------
para_coef_hor : array_like
Coefficients of a horizontal parabola (y=ax**2+bx+c).
para_coef_ver : array_like
Coefficients of a vertical parabola (x=ay**2+by+c).
Returns
-------
x, y : floats
Coordinate of the cross point.
"""
a1, b1, c1 = para_coef_hor[0:3]
a2, b2, c2 = para_coef_ver[0:3]
xvals = np.float32(np.real(
np.roots([a1 ** 2 * a2, 2 * a1 * a2 * b1,
a2 * b1 ** 2 + a1 * b2 + 2 * a1 * a2 * c1,
-1 + b1 * b2 + 2 * a2 * b1 * c1,
b2 * c1 + a2 * c1 ** 2 + c2])))
if len(xvals) == 0:
raise ValueError("Can't find a cross point between two parabolas")
if len(xvals) > 1:
x = xvals[np.argmin(np.abs(xvals - c2))]
else:
x = xvals[0]
y = a1 * x ** 2 + b1 * x + c1
return x, y
def regenerate_grid_points_parabola(list_hor_lines, list_ver_lines,
perspective=True):
"""
Regenerating grid points by finding cross points between horizontal lines
and vertical lines using their parabola coefficients.
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
perspective : bool, optional
Apply perspective correction if True.
Returns
-------
new_hor_lines : list of 2D-arrays
List of the updated (y,x)-coordinates of points on each horizontal
line.
new_ver_lines : list of 2D-arrays
List of the updated (y,x)-coordinates of points on each vertical line.
"""
if perspective is True:
results = _generate_non_perspective_parabola_coef(list_hor_lines,
list_ver_lines)
list_coef_hor, list_coef_ver, xcenter, ycenter = results
else:
xcenter, ycenter = find_cod_bailey(list_hor_lines, list_ver_lines)
list_coef_hor = _para_fit_hor(list_hor_lines, xcenter, ycenter)[0]
list_coef_ver = _para_fit_ver(list_ver_lines, xcenter, ycenter)[0]
num_hline, num_vline = len(list_coef_hor), len(list_coef_ver)
new_hor_lines = np.zeros((num_hline, num_vline, 2), dtype=np.float32)
new_ver_lines = np.zeros((num_vline, num_hline, 2), dtype=np.float32)
for i in range(num_hline):
for j in range(num_vline):
x, y = _find_cross_point_between_parabolas(list_coef_hor[i],
list_coef_ver[j])
new_hor_lines[i, j] = np.asarray([y + ycenter, x + xcenter])
new_ver_lines[j, i] = np.asarray([y + ycenter, x + xcenter])
return new_hor_lines, new_ver_lines
def _generate_linear_coef(list_hor_lines, list_ver_lines, xcenter=0.0,
ycenter=0.0):
"""
Get linear coefficients of horizontal and vertical lines from linear fit.
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
xcenter : float
X-origin of the coordinate system.
ycenter : float
Y-origin of the coordinate system.
Returns
-------
list_coef_hor : list of 1D-arrays
List of the linear coefficients for horizontal lines.
list_coef_ver : list of 1D-arrays
List of the linear coefficients for vertical lines.
"""
num_hline, num_vline = len(list_hor_lines), len(list_ver_lines)
list_coef_hor = np.zeros((num_hline, 2), dtype=np.float32)
list_coef_ver = np.zeros((num_vline, 2), dtype=np.float32)
for i in range(num_hline):
list_coef_hor[i] = np.polyfit(list_hor_lines[i][:, 1] - xcenter,
list_hor_lines[i][:, 0] - ycenter, 1)
for i in range(num_vline):
list_coef_ver[i] = np.polyfit(list_ver_lines[i][:, 0] - ycenter,
list_ver_lines[i][:, 1] - xcenter, 1)
return list_coef_hor, list_coef_ver
def _find_cross_point_between_lines(line_coef_hor, line_coef_ver):
"""
Find a cross point between two lines.
Parameters
----------
line_coef_hor : array_like
Coefficients of a horizontal line (y=ax+b).
line_coef_ver : array_like
Coefficients of a vertical line (x=ay+b).
Returns
-------
x, y : floats
Coordinate of the cross point.
"""
a1, b1 = line_coef_hor
a2, b2 = line_coef_ver
y = (a1 * b2 + b1) / (1.0 - a1 * a2)
x = a2 * y + b2
return x, y
def _func_opt_pers(d0, c0, indexc0, *list_inter):
"""
Function for finding the optimum undistorted distance for
perspective-distortion correction.
"""
return np.sum(
np.asarray([((i - indexc0) * d0 + c0 - c) ** 2
for i, c in enumerate(list_inter)]))
def _optimize_intercept_perspective(dist_hv, pos_hv, list_inter):
"""
Find the optimum undistorted distance for perspective-distortion
correction.
"""
list_arg = [list_inter[pos_hv], pos_hv]
list_arg.extend(list_inter)
minimum = optimize.minimize(_func_opt_pers, dist_hv, args=tuple(list_arg))
return minimum.x[0]
def _calc_undistor_intercept_perspective(list_hor_lines, list_ver_lines,
equal_dist=True, scale="mean",
optimizing=True):
"""
Calculate the intercepts of undistorted lines from perspective distortion.
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
equal_dist : bool
Use the condition that lines are equidistant if True.
scale : {'mean', 'median', 'min', 'max'}
Scale option for the undistorted grid.
optimizing : bool
Apply optimization for finding line-distance if True.
Returns
-------
u_intercept_hor : array_like
1D array. List of undistorted intercepts of the horizontal lines.
u_intercept_ver : array_like
1D array. List of undistorted intercepts of the vertical lines.
"""
list_coef_hor, list_coef_ver = _generate_linear_coef(list_hor_lines,
list_ver_lines)
num_hline, num_vline = len(list_hor_lines), len(list_ver_lines)
pos_hor, pos_ver = num_hline // 2, num_vline // 2
num_use = min(num_hline // 2 - 1, num_vline // 2 - 1)
(posh1, posh2) = (max(0, pos_hor - num_use),
min(num_hline, pos_hor + num_use + 1))
(posv1, posv2) = (max(0, pos_ver - num_use),
min(num_vline, pos_ver + num_use + 1))
if scale == "max":
dist_hor = np.max(np.abs(np.diff(list_coef_hor[posh1: posh2, 1])))
dist_ver = np.max(np.abs(np.diff(list_coef_ver[posv1: posv2, 1])))
elif scale == "min":
dist_hor = np.min(np.abs(np.diff(list_coef_hor[posh1: posh2, 1])))
dist_ver = np.min(np.abs(np.diff(list_coef_ver[posv1: posv2, 1])))
elif scale == "median":
dist_hor = np.median(np.abs(np.diff(list_coef_hor[posh1: posh2, 1])))
dist_ver = np.median(np.abs(np.diff(list_coef_ver[posv1: posv2, 1])))
else:
dist_hor = np.mean(np.abs(np.diff(list_coef_hor[posh1: posh2, 1])))
dist_ver = np.mean(np.abs(np.diff(list_coef_ver[posv1: posv2, 1])))
if optimizing is True:
dist_hor = _optimize_intercept_perspective(dist_hor, pos_hor,
list_coef_hor[:, 1])
dist_ver = _optimize_intercept_perspective(dist_ver, pos_ver,
list_coef_ver[:, 1])
if equal_dist is True:
if scale == "max":
dist = max(dist_hor, dist_ver)
elif scale == "min":
dist = min(dist_hor, dist_ver)
else:
dist = (dist_hor + dist_ver) * 0.5
dist_hor = dist_ver = dist
u_intercept_hor = np.zeros(num_hline, dtype=np.float32)
u_intercept_ver = np.zeros(num_vline, dtype=np.float32)
for i in range(num_hline):
dist = (i - pos_hor) * dist_hor
u_intercept_hor[i] = dist + list_coef_hor[pos_hor, 1]
for i in range(num_vline):
dist = (i - pos_ver) * dist_ver
u_intercept_ver[i] = dist + list_coef_ver[pos_ver, 1]
return u_intercept_hor, u_intercept_ver
def regenerate_grid_points_linear(list_hor_lines, list_ver_lines):
"""
Regenerating grid points by finding cross points between horizontal lines
and vertical lines using their linear coefficients.
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
Returns
-------
new_hor_lines : list of 2D-arrays
List of the updated (y,x)-coordinates of points on each horizontal
line.
new_ver_lines : list of 2D-arrays
List of the updated (y,x)-coordinates of points on each vertical line.
"""
num_hline, num_vline = len(list_hor_lines), len(list_ver_lines)
list_coef_hor, list_coef_ver = _generate_linear_coef(list_hor_lines,
list_ver_lines)
new_hor_lines = np.zeros((num_hline, num_vline, 2), dtype=np.float32)
new_ver_lines = np.zeros((num_vline, num_hline, 2), dtype=np.float32)
for i in range(num_hline):
for j in range(num_vline):
x, y = _find_cross_point_between_lines(list_coef_hor[i],
list_coef_ver[j])
new_hor_lines[i, j] = np.asarray([y, x])
new_ver_lines[j, i] = np.asarray([y, x])
return new_hor_lines, new_ver_lines
def generate_undistorted_perspective_lines(list_hor_lines, list_ver_lines,
equal_dist=True, scale="mean",
optimizing=True):
"""
Generate undistorted lines from perspective lines.
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
equal_dist : bool
Use the condition that lines are equidistant if True.
scale : {'mean', 'median', 'min', 'max'}
Scale option for the undistorted grid.
optimizing : bool
Apply optimization for finding line-distance if True.
Returns
-------
list_uhor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on undistorted horizontal
lines.
list_uver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on undistorted vertical lines.
"""
num_hline, num_vline = len(list_hor_lines), len(list_ver_lines)
list_coef_hor, list_coef_ver = _generate_linear_coef(list_hor_lines,
list_ver_lines)
ah, bh = np.polyfit(list_coef_hor[:, 1], list_coef_hor[:, 0], 1)[0:2]
av, bv = np.polyfit(list_coef_ver[:, 1], -list_coef_ver[:, 0], 1)[0:2]
if np.abs(ah - av) >= 0.0001:
a0 = (ah * bv - av * bh) / (ah - av)
else:
a0 = (bh + bv) * 0.5
list_coef_uhor = np.copy(list_coef_hor)
list_coef_uver = np.copy(list_coef_ver)
list_coef_uhor[:, 0] = a0 * np.ones(num_hline)
list_coef_uver[:, 0] = -a0 * np.ones(num_vline)
results = _calc_undistor_intercept_perspective(list_hor_lines,
list_ver_lines, equal_dist,
scale, optimizing)
list_coef_uhor[:, 1] = results[0]
list_coef_uver[:, 1] = results[1]
list_uhor_lines = np.zeros((num_hline, num_vline, 2), dtype=np.float32)
list_uver_lines = np.zeros((num_vline, num_hline, 2), dtype=np.float32)
for i in range(num_hline):
for j in range(num_vline):
x, y = _find_cross_point_between_lines(list_coef_uhor[i],
list_coef_uver[j])
list_uhor_lines[i, j] = np.asarray([y, x])
list_uver_lines[j, i] = np.asarray([y, x])
return list_uhor_lines, list_uver_lines
def generate_source_target_perspective_points(list_hor_lines, list_ver_lines,
equal_dist=True, scale="mean",
optimizing=True):
"""
Generate source points (distorted) and target points (undistorted).
Parameters
----------
list_hor_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each horizontal line.
list_ver_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on each vertical line.
equal_dist : bool
Use the condition that lines are equidistant if True.
scale : {'mean', 'median', 'min', 'max'}
Scale option for the undistorted grid.
optimizing : bool
Apply optimization for finding line-distance if True.
Returns
-------
source_points : list of 1D-arrays
List of the (y,x)-coordinates of distorted points.
target_points : list of 1D-arrays
List of the (y,x)-coordinates of undistorted points.
"""
list_hor_slines, list_ver_slines = regenerate_grid_points_linear(
list_hor_lines, list_ver_lines)
list_hor_tlines, _ = generate_undistorted_perspective_lines(
list_hor_slines, list_ver_slines, equal_dist, scale, optimizing)
source_points = []
target_points = []
for i in range(len(list_hor_slines)):
for j in range(len(list_ver_slines)):
p1 = list_hor_slines[i, j]
p2 = list_hor_tlines[i, j]
if p1[0] > 0 and p1[1] > 0 and p2[0] > 0 and p2[1] > 0:
source_points.append(list_hor_slines[i, j])
target_points.append(list_hor_tlines[i, j])
return np.asarray(source_points), np.asarray(target_points)
def generate_4_source_target_perspective_points(points, input_order="yx",
equal_dist=False,
scale="mean"):
"""
Generate 4 rectangular points corresponding to 4 perspective-distorted
points.
Parameters
----------
points : list of 1D-arrays
List of the coordinates of 4 perspective-distorted points.
input_order : {'yx', 'xy'}
Order of the coordinates of input-points.
equal_dist : bool
Use the condition that the rectangular making of 4-points is square if
True.
scale : {'mean', 'min', 'max', float}
Scale option for the undistorted points.
Returns
-------
source_points : list of 1D-arrays
List of the (y,x)-coordinates of distorted points.
target_points : list of 1D-arrays
List of the (y,x)-coordinates of undistorted points.
"""
points = np.asarray(points, dtype=np.float32)
if input_order == "xy":
points = np.fliplr(points)
if len(points) != 4:
raise ValueError("Input must be a list of 4 points!!!")
list_sort = points[points[:, 0].argsort()]
p12 = list_sort[0:2]
p12 = p12[p12[:, 1].argsort()]
((y1, x1), (y2, x2)) = p12
p34 = list_sort[-2:]
p34 = p34[p34[:, 1].argsort()]
((y3, x3), (y4, x4)) = p34
source_points = np.asarray([[y1, x1], [y2, x2], [y3, x3], [y4, x4]])
a12 = (y1 - y2) / (x1 - x2)
b12 = y1 - a12 * x1
a34 = (y3 - y4) / (x3 - x4)
b34 = y3 - a34 * x3
ah, bh = (a12 + a34) * 0.5, (b12 + b34) * 0.5
a13 = (x1 - x3) / (y1 - y3)
b13 = x1 - a13 * y1
a24 = (x2 - x4) / (y2 - y4)
b24 = x2 - a24 * y2
av, bv = (a13 + a24) * 0.5, (b13 + b24) * 0.5
a0 = np.sign(ah) * (np.abs(ah) + np.abs(av)) * 0.5
dist12 = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
dist13 = np.sqrt((x1 - x3) ** 2 + (y1 - y3) ** 2)
dist24 = np.sqrt((x2 - x4) ** 2 + (y2 - y4) ** 2)
dist34 = np.sqrt((x3 - x4) ** 2 + (y3 - y4) ** 2)
if scale == "max":
dist_h = max(dist12, dist34)
dist_v = max(dist13, dist24)
if equal_dist is True:
dist_h = dist_v = max(dist_v, dist_h)
elif scale == "min":
dist_h = min(dist12, dist34)
dist_v = min(dist13, dist24)
if equal_dist is True:
dist_h = dist_v = min(dist_v, dist_h)
else:
dist_h = (dist12 + dist34) * 0.5
dist_v = (dist13 + dist24) * 0.5
if isinstance(scale, float):
dist_h = dist_h * scale
dist_v = dist_v * scale
if equal_dist is True:
dist_h = dist_v = (dist_v + dist_h) * 0.5
dist_h, dist_v = dist_h * 0.5, dist_v * 0.5
b1 = bh - np.abs(dist_v / np.cos(np.arctan(a0)))
b2 = bh + np.abs(dist_v / np.cos(np.arctan(a0)))
b3 = bv - np.abs(dist_h / np.cos(np.arctan(a0)))
b4 = bv + np.abs(dist_h / np.cos(np.arctan(a0)))
y1 = (a0 * b3 + b1) / (1.0 + a0 ** 2)
x1 = -a0 * y1 + b3
y2 = (a0 * b4 + b1) / (1.0 + a0 ** 2)
x2 = -a0 * y2 + b4
y3 = (a0 * b3 + b2) / (1.0 + a0 ** 2)
x3 = -a0 * y3 + b3
y4 = (a0 * b4 + b2) / (1.0 + a0 ** 2)
x4 = -a0 * y4 + b4
target_points = np.asarray([[y1, x1], [y2, x2], [y3, x3], [y4, x4]])
return source_points, target_points
def calc_perspective_coefficients(source_points, target_points,
mapping="backward"):
"""
Calculate perspective coefficients of a matrix to map from source points
to target points (Ref. [1]). Note that the coordinate of a point are in
(y,x)-order. This is to be consistent with other functions in the module.
Parameters
----------
source_points : array_like
List of the (y,x)-coordinates of distorted points.
target_points : array_like
List of the (y,x)-coordinates of undistorted points.
mapping : {'backward', 'forward'}
To select mapping direction.
Returns
-------
array_like
1D array of 8 coefficients.
References
----------
[1].. https://doi.org/10.1016/S0262-8856(98)00183-8
"""
if mapping == "forward":
s_points = np.fliplr(np.asarray(source_points))
t_points = np.fliplr(np.asarray(target_points))
else:
s_points = np.fliplr(np.asarray(target_points))
t_points = np.fliplr(np.asarray(source_points))
Amatrix = []
for p1, p2 in zip(s_points, t_points):
Amatrix.append(
[p1[0], p1[1], 1, 0, 0, 0, -p2[0] * p1[0], -p2[0] * p1[1]])
Amatrix.append(
[0, 0, 0, p1[0], p1[1], 1, -p2[1] * p1[0], -p2[1] * p1[1]])
Amatrix = np.asarray(Amatrix, dtype=np.float64)
Bmatrix = np.transpose(
np.ndarray.flatten(np.asarray(t_points, dtype=np.float64)))
list_coef = np.linalg.lstsq(Amatrix, Bmatrix, rcond=1e-64)[0]
return list_coef
def update_center(list_lines, xcenter, ycenter):
"""
Update the coordinate-center of points on lines.
Parameters
----------
list_lines : list of 2D-arrays
List of the (y,x)-coordinates of points on lines.
xcenter : float
X-origin of the coordinate system.
ycenter : float
Y-origin of the coordinate system.
Returns
-------
list of 2D-arrays.
"""
updated_lines = []
for i, iline in enumerate(list_lines):
line = np.asarray(iline)
list_temp = np.asarray(
[(dot[0] + ycenter, dot[1] + xcenter) for dot in line])
updated_lines.append(list_temp)
return updated_lines
| StarcoderdataPython |
4955667 | <reponame>cbaziotis/fast_align
#!/usr/bin/env python
import os
import subprocess
import sys
import threading
# Simplified, non-threadsafe version for force_align.py
# Use the version in realtime for development
class Aligner:
def __init__(self, fwd_params, fwd_err, rev_params, rev_err, heuristic='grow-diag-final-and'):
build_root = os.path.dirname(os.path.abspath(__file__))
fast_align = os.path.join(build_root, 'fast_align')
atools = os.path.join(build_root, 'atools')
(fwd_T, fwd_m) = self.read_err(fwd_err)
(rev_T, rev_m) = self.read_err(rev_err)
fwd_cmd = [fast_align, '-i', '-', '-d', '-T', fwd_T, '-m', fwd_m, '-f', fwd_params]
rev_cmd = [fast_align, '-i', '-', '-d', '-T', rev_T, '-m', rev_m, '-f', rev_params, '-r']
tools_cmd = [atools, '-i', '-', '-j', '-', '-c', heuristic]
self.fwd_align = popen_io(fwd_cmd)
self.rev_align = popen_io(rev_cmd)
self.tools = popen_io(tools_cmd)
def align(self, line):
self.fwd_align.stdin.write('{}\n'.format(line))
self.rev_align.stdin.write('{}\n'.format(line))
# f words ||| e words ||| links ||| score
fwd_line = self.fwd_align.stdout.readline().split('|||')[2].strip()
rev_line = self.rev_align.stdout.readline().split('|||')[2].strip()
self.tools.stdin.write('{}\n'.format(fwd_line))
self.tools.stdin.write('{}\n'.format(rev_line))
al_line = self.tools.stdout.readline().strip()
return al_line
def close(self):
self.fwd_align.stdin.close()
self.fwd_align.wait()
self.rev_align.stdin.close()
self.rev_align.wait()
self.tools.stdin.close()
self.tools.wait()
def read_err(self, err):
(T, m) = ('', '')
for line in open(err):
# expected target length = source length * N
if 'expected target length' in line:
m = line.split()[-1]
# final tension: N
elif 'final tension' in line:
T = line.split()[-1]
return (T, m)
def popen_io(cmd):
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True)
def consume(s):
for _ in s:
pass
threading.Thread(target=consume, args=(p.stderr,)).start()
return p
def main():
if len(sys.argv[1:]) < 4:
sys.stderr.write('run:\n')
sys.stderr.write(' fast_align -i corpus.f-e -d -v -o -p fwd_params >fwd_align 2>fwd_err\n')
sys.stderr.write(' fast_align -i corpus.f-e -r -d -v -o -p rev_params >rev_align 2>rev_err\n')
sys.stderr.write('\n')
sys.stderr.write('then run:\n')
sys.stderr.write(' {} fwd_params fwd_err rev_params rev_err [heuristic] <in.f-e >out.f-e.gdfa\n'.format(sys.argv[0]))
sys.stderr.write('\n')
sys.stderr.write('where heuristic is one of: (intersect union grow-diag grow-diag-final grow-diag-final-and) default=grow-diag-final-and\n')
sys.exit(2)
aligner = Aligner(*sys.argv[1:])
while True:
line = sys.stdin.readline()
if not line:
break
sys.stdout.write('{}\n'.format(aligner.align(line.strip())))
sys.stdout.flush()
aligner.close()
if __name__ == '__main__':
main()
| StarcoderdataPython |
9776479 | import os.path
import jinja2
from . import TEMPLATES_DIR
from .utils import drop_extension, get_number_of_digits_to_name
def textfiles_to_xhtml_files(input_dir, output_dir, fragment_type, include_heading=False):
"""
Converts plain text files in `input_dir` to a list of XHTML files
and saves them to `output_dir`.
Each XHTML file consists of fragments – <span> elements with id='f[0-9]+' grouped by <p></p>.
"""
os.makedirs(output_dir, exist_ok=True)
input_filenames = sorted(x for x in os.listdir(input_dir) if x.endswith('.txt'))
texts_contents = []
for filename in input_filenames:
with open(os.path.join(input_dir, filename), 'r') as f:
texts_contents.append(f.read())
xhtmls = _text_contents_to_xhtmls(texts_contents, fragment_type, include_heading)
for filename, xhtml in zip(input_filenames, xhtmls):
file_path = os.path.join(output_dir, f'{drop_extension(filename)}.xhtml')
with open(file_path, 'w') as f:
f.write(xhtml)
print(f'\n✔ {len(texts_contents)} plain text files have been converted to XHTML.\n')
def _text_contents_to_xhtmls(texts_contents, fragment_type, include_heading):
texts = [_get_paragraphs(texts_content, fragment_type) for texts_content in texts_contents]
# calculate total number of fragments to give fragments proper ids
fragments_num = sum(sum(len(p) for p in t) for t in texts)
n = get_number_of_digits_to_name(fragments_num)
# render xhtmls
xhtmls = []
fragment_id = 1
for t in texts:
paragraphs = []
for p in t:
fragments = []
for f in p:
fragments.append({'id': f'f{fragment_id:0>{n}}', 'text': f})
fragment_id += 1
paragraphs.append(fragments)
heading = None
if include_heading:
heading = {
'id': paragraphs[0][0]['id'],
'text': ''. join(f['text'] for f in paragraphs[0])
}
paragraphs = paragraphs[1:]
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(TEMPLATES_DIR),
autoescape=True
)
template = env.get_template('text.xhtml')
xhtml = template.render(heading=heading, paragraphs=paragraphs)
xhtmls.append(xhtml)
return xhtmls
def _get_paragraphs(texts_content, fragment_type):
"""
Returns a list of paragraphs in a text where
each paragraph is a list of fragments.
"""
paragraphs = []
for paragraphs_content in _get_paragraphs_contents(texts_content):
fragments = _get_fragments(paragraphs_content, fragment_type)
paragraphs.append(fragments)
return paragraphs
def _get_paragraphs_contents(texts_content):
return [p.strip().replace('\n', ' ') for p in texts_content.split('\n\n') if p.strip()]
def _get_fragments(paragraphs_content, fragment_type):
if fragment_type == 'sentence':
return _get_sentences(paragraphs_content)
elif fragment_type == 'paragraph':
return [paragraphs_content]
else:
raise ValueError(f'\n❌ Unknown fragment_type: {fragment_type}\n')
def _get_sentences(text):
"""
Fragment by "{sentence_ending}{space}"
"""
sentence_endings = {'.', '!', '?'}
fragments = []
sentence_start_idx = 0
sentence_ended = False
for i, c in enumerate(text):
if i == len(text) - 1:
fragments.append(text[sentence_start_idx:i+1])
if c in sentence_endings:
sentence_ended = True
continue
if sentence_ended and c == ' ':
fragments.append(text[sentence_start_idx:i+1])
sentence_start_idx = i+1
sentence_ended = False
return fragments | StarcoderdataPython |
11213029 | <filename>_solutions/basics/types/type_float_f.py
emu = round(4.3*psi/hPa, 1)
orlan = round(40*kPa/hPa, 1)
| StarcoderdataPython |
6455272 | import autocomplete_light
from django import forms
from models import Taggable
class TaggableForm(forms.ModelForm):
class Meta:
model = Taggable
widgets = {
'tags': autocomplete_light.TextWidget('TagAutocomplete'),
}
| StarcoderdataPython |
3334701 | <filename>models_all_solvable2/syn40m03hfsg.py
# MINLP written by GAMS Convert at 05/07/21 17:13:06
#
# Equation counts
# Total E G L N X C B
# 1998 756 162 1080 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 1146 906 240 0 0 0 0 0
# FX 0
#
# Nonzero counts
# Total const NL
# 4332 4080 252
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x2 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x3 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x4 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x5 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x6 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x7 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x8 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x9 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x10 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x11 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x12 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x13 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x14 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x15 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x16 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x17 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x18 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x19 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x20 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x21 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x22 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x23 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x24 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x25 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x26 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x27 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x28 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x29 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x30 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x31 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x32 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x33 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x34 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x35 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x36 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x37 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x38 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x39 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x40 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x41 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x42 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x43 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x44 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x45 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x46 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x47 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x48 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x49 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x50 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x51 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x52 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x53 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x54 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x55 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x56 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x57 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x58 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x59 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x60 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x61 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x62 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x63 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x64 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x65 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x66 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x67 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x68 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x69 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x70 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x71 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x72 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x73 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x74 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x75 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x76 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x77 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x78 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x79 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x80 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x81 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x82 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x83 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x84 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x85 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x86 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x87 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x88 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x89 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x90 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x91 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x92 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x93 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x94 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x95 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x96 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x97 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x98 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x99 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x100 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x101 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x102 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x103 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x104 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x105 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x106 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x107 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x108 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x109 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x110 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x111 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x112 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x113 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x114 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x115 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x116 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x117 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x118 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x119 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x120 = Var(within=Reals, bounds=(None,None), initialize=0)
m.x121 = Var(within=Reals, bounds=(0,40), initialize=0)
m.x122 = Var(within=Reals, bounds=(0,40), initialize=0)
m.x123 = Var(within=Reals, bounds=(0,40), initialize=0)
m.x124 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x125 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x126 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x127 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x128 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x129 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x130 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x131 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x132 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x133 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x134 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x135 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x136 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x137 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x138 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x139 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x140 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x141 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x142 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x143 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x144 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x145 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x146 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x147 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x148 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x149 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x150 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x151 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x152 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x153 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x154 = Var(within=Reals, bounds=(0,30), initialize=0)
m.x155 = Var(within=Reals, bounds=(0,30), initialize=0)
m.x156 = Var(within=Reals, bounds=(0,30), initialize=0)
m.x157 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x158 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x159 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x160 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x161 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x162 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x163 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x164 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x165 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x166 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x167 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x168 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x169 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x170 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x171 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x172 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x173 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x174 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x175 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x176 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x177 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x178 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x179 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x180 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x181 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x182 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x183 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x184 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x185 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x186 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x187 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x188 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x189 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x190 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x191 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x192 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x193 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x194 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x195 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x196 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x197 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x198 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x199 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x200 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x201 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x202 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x203 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x204 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x205 = Var(within=Reals, bounds=(0,20), initialize=0)
m.x206 = Var(within=Reals, bounds=(0,20), initialize=0)
m.x207 = Var(within=Reals, bounds=(0,20), initialize=0)
m.x208 = Var(within=Reals, bounds=(0,20), initialize=0)
m.x209 = Var(within=Reals, bounds=(0,20), initialize=0)
m.x210 = Var(within=Reals, bounds=(0,20), initialize=0)
m.x211 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x212 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x213 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x214 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x215 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x216 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x217 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x218 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x219 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x220 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x221 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x222 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x223 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x224 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x225 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x226 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x227 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x228 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x229 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x230 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x231 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x232 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x233 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x234 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x235 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x236 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x237 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x238 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x239 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x240 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x241 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x242 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x243 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x244 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x245 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x246 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x247 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x248 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x249 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x250 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x251 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x252 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x253 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x254 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x255 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x256 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x257 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x258 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x259 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x260 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x261 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x262 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x263 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x264 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x265 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x266 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x267 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x268 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x269 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x270 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x271 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x272 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x273 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x274 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x275 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x276 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x277 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x278 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x279 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x280 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x281 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x282 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x283 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x284 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x285 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x286 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x287 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x288 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x289 = Var(within=Reals, bounds=(0,30), initialize=0)
m.x290 = Var(within=Reals, bounds=(0,30), initialize=0)
m.x291 = Var(within=Reals, bounds=(0,30), initialize=0)
m.x292 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x293 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x294 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x295 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x296 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x297 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x298 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x299 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x300 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x301 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x302 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x303 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x304 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x305 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x306 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x307 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x308 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x309 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x310 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x311 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x312 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x313 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x314 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x315 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x316 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x317 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x318 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x319 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x320 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x321 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x322 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x323 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x324 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x325 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x326 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x327 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x328 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x329 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x330 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x331 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x332 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x333 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x334 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x335 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x336 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x337 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x338 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x339 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x340 = Var(within=Reals, bounds=(0,25), initialize=0)
m.x341 = Var(within=Reals, bounds=(0,25), initialize=0)
m.x342 = Var(within=Reals, bounds=(0,25), initialize=0)
m.x343 = Var(within=Reals, bounds=(0,25), initialize=0)
m.x344 = Var(within=Reals, bounds=(0,25), initialize=0)
m.x345 = Var(within=Reals, bounds=(0,25), initialize=0)
m.x346 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x347 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x348 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x349 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x350 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x351 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x352 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x353 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x354 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x355 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x356 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x357 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x358 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x359 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x360 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x361 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x362 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x363 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x364 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x365 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x366 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x367 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x368 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x369 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x370 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x371 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x372 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x373 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x374 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x375 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x376 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x377 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x378 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x379 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x380 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x381 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x382 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x383 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x384 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x385 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x386 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x387 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x388 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x389 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x390 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x391 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x392 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x393 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x394 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x395 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x396 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x397 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x398 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x399 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x400 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x401 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x402 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x403 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x404 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x405 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x406 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x407 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x408 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x409 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x410 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x411 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x412 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x413 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x414 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x415 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x416 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x417 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x418 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x419 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x420 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x421 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x422 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x423 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x424 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x425 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x426 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x427 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x428 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x429 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x430 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x431 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x432 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x433 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x434 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x435 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x436 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x437 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x438 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x439 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x440 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x441 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x442 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x443 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x444 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x445 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x446 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x447 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x448 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x449 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x450 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x451 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x452 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x453 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x454 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x455 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x456 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x457 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x458 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x459 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x460 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x461 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x462 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x463 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x464 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x465 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x466 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x467 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x468 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x469 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x470 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x471 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x472 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x473 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x474 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x475 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x476 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x477 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x478 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x479 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x480 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x481 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x482 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x483 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x484 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x485 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x486 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x487 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x488 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x489 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x490 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x491 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x492 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x493 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x494 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x495 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x496 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x497 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x498 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x499 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x500 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x501 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x502 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x503 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x504 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x505 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x506 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x507 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x508 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x509 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x510 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x511 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x512 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x513 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x514 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x515 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x516 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x517 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x518 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x519 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x520 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x521 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x522 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x523 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x524 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x525 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x526 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x527 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x528 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x529 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x530 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x531 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x532 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x533 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x534 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x535 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x536 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x537 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x538 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x539 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x540 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x541 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x542 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x543 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x544 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x545 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x546 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x547 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x548 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x549 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x550 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x551 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x552 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x553 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x554 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x555 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x556 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x557 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x558 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x559 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x560 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x561 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x562 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x563 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x564 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x565 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x566 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x567 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x568 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x569 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x570 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x571 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x572 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x573 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x574 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x575 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x576 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x577 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x578 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x579 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x580 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x581 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x582 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x583 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x584 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x585 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x586 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x587 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x588 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x589 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x590 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x591 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x592 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x593 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x594 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x595 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x596 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x597 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x598 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x599 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x600 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x601 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x602 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x603 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x604 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x605 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x606 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x607 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x608 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x609 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x610 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x611 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x612 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x613 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x614 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x615 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x616 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x617 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x618 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x619 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x620 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x621 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x622 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x623 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x624 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x625 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x626 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x627 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x628 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x629 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x630 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x631 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x632 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x633 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x634 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x635 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x636 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x637 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x638 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x639 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x640 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x641 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x642 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x643 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x644 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x645 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x646 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x647 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x648 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x649 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x650 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x651 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x652 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x653 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x654 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x655 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x656 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x657 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x658 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x659 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x660 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x661 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x662 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x663 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x664 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x665 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x666 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x667 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x668 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x669 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x670 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x671 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x672 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x673 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x674 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x675 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x676 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x677 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x678 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x679 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x680 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x681 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x682 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x683 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x684 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x685 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x686 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x687 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x688 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x689 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x690 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x691 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x692 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x693 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x694 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x695 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x696 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x697 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x698 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x699 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x700 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x701 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x702 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x703 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x704 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x705 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x706 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x707 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x708 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x709 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x710 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x711 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x712 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x713 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x714 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x715 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x716 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x717 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x718 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x719 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x720 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x721 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x722 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x723 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x724 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x725 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x726 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x727 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x728 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x729 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x730 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x731 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x732 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x733 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x734 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x735 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x736 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x737 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x738 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x739 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x740 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x741 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x742 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x743 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x744 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x745 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x746 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x747 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x748 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x749 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x750 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x751 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x752 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x753 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x754 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x755 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x756 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x757 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x758 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x759 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x760 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x761 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x762 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x763 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x764 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x765 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x766 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x767 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x768 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x769 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x770 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x771 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x772 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x773 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x774 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x775 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x776 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x777 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x778 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x779 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x780 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x781 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x782 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x783 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x784 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x785 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x786 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x787 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x788 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x789 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x790 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x791 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x792 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x793 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x794 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x795 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x796 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x797 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x798 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x799 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x800 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x801 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x802 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x803 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x804 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x805 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x806 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x807 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x808 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x809 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x810 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x811 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x812 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x813 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x814 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x815 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x816 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x817 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x818 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x819 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x820 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x821 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x822 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x823 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x824 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x825 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x826 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x827 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x828 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x829 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x830 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x831 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x832 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x833 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x834 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x835 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x836 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x837 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x838 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x839 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x840 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x841 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x842 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x843 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x844 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x845 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x846 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x847 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x848 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x849 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x850 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x851 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x852 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x853 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x854 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x855 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x856 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x857 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x858 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x859 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x860 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x861 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x862 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x863 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x864 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x865 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x866 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x867 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x868 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x869 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x870 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x871 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x872 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x873 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x874 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x875 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x876 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x877 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x878 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x879 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x880 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x881 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x882 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x883 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x884 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x885 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x886 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x887 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x888 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x889 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x890 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x891 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x892 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x893 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x894 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x895 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x896 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x897 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x898 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x899 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x900 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x901 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x902 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x903 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x904 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x905 = Var(within=Reals, bounds=(0,None), initialize=0)
m.x906 = Var(within=Reals, bounds=(0,None), initialize=0)
m.b907 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b908 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b909 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b910 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b911 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b912 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b913 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b914 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b915 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b916 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b917 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b918 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b919 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b920 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b921 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b922 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b923 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b924 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b925 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b926 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b927 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b928 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b929 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b930 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b931 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b932 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b933 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b934 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b935 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b936 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b937 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b938 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b939 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b940 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b941 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b942 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b943 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b944 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b945 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b946 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b947 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b948 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b949 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b950 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b951 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b952 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b953 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b954 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b955 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b956 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b957 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b958 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b959 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b960 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b961 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b962 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b963 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b964 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b965 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b966 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b967 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b968 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b969 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b970 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b971 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b972 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b973 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b974 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b975 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b976 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b977 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b978 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b979 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b980 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b981 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b982 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b983 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b984 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b985 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b986 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b987 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b988 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b989 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b990 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b991 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b992 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b993 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b994 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b995 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b996 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b997 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b998 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b999 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1000 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1001 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1002 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1003 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1004 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1005 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1006 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1007 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1008 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1009 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1010 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1011 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1012 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1013 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1014 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1015 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1016 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1017 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1018 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1019 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1020 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1021 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1022 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1023 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1024 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1025 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1026 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1027 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1028 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1029 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1030 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1031 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1032 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1033 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1034 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1035 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1036 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1037 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1038 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1039 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1040 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1041 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1042 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1043 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1044 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1045 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1046 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1047 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1048 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1049 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1050 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1051 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1052 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1053 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1054 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1055 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1056 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1057 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1058 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1059 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1060 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1061 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1062 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1063 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1064 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1065 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1066 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1067 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1068 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1069 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1070 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1071 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1072 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1073 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1074 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1075 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1076 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1077 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1078 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1079 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1080 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1081 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1082 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1083 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1084 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1085 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1086 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1087 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1088 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1089 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1090 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1091 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1092 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1093 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1094 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1095 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1096 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1097 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1098 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1099 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1100 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1101 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1102 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1103 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1104 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1105 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1106 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1107 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1108 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1109 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1110 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1111 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1112 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1113 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1114 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1115 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1116 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1117 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1118 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1119 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1120 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1121 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1122 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1123 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1124 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1125 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1126 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1127 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1128 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1129 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1130 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1131 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1132 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1133 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1134 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1135 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1136 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1137 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1138 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1139 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1140 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1141 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1142 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1143 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1144 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1145 = Var(within=Binary, bounds=(0,1), initialize=0)
m.b1146 = Var(within=Binary, bounds=(0,1), initialize=0)
m.obj = Objective(sense=maximize, expr= -m.x121 - m.x122 - m.x123 + 5 * m.x139
+ 10 * m.x140 + 5 * m.x141 - 2 * m.x154 - m.x155 - 2 * m.x156 - 10 *
m.x205 - 5 * m.x206 - 5 * m.x207 - 5 * m.x208 - 5 * m.x209 - 5 * m.x210 +
40 * m.x229 + 30 * m.x230 + 15 * m.x231 + 15 * m.x232 + 20 * m.x233 + 25 *
m.x234 + 10 * m.x235 + 30 * m.x236 + 40 * m.x237 + 30 * m.x238 + 20 *
m.x239 + 20 * m.x240 + 35 * m.x241 + 50 * m.x242 + 20 * m.x243 + 20 *
m.x244 + 30 * m.x245 + 35 * m.x246 + 25 * m.x247 + 50 * m.x248 + 10 *
m.x249 + 15 * m.x250 + 20 * m.x251 + 20 * m.x252 + 30 * m.x274 + 40 *
m.x275 + 40 * m.x276 - m.x289 - m.x290 - m.x291 - 5 * m.x340 - 3 * m.x341
- 4 * m.x342 - m.x343 - m.x344 - m.x345 + 120 * m.x364 + 110 * m.x365 +
150 * m.x366 + 140 * m.x367 + 120 * m.x368 + 100 * m.x369 + 90 * m.x370 +
60 * m.x371 + 150 * m.x372 + 80 * m.x373 + 90 * m.x374 + 120 * m.x375 + 285
* m.x376 + 390 * m.x377 + 350 * m.x378 + 290 * m.x379 + 405 * m.x380 + 190
* m.x381 + 280 * m.x382 + 400 * m.x383 + 430 * m.x384 + 290 * m.x385 + 300
* m.x386 + 240 * m.x387 + 350 * m.x388 + 250 * m.x389 + 300 * m.x390 - 5 *
m.b1027 - 4 * m.b1028 - 6 * m.b1029 - 8 * m.b1030 - 7 * m.b1031 - 6 *
m.b1032 - 6 * m.b1033 - 9 * m.b1034 - 4 * m.b1035 - 10 * m.b1036 - 9 *
m.b1037 - 5 * m.b1038 - 6 * m.b1039 - 10 * m.b1040 - 6 * m.b1041 - 7 *
m.b1042 - 7 * m.b1043 - 4 * m.b1044 - 4 * m.b1045 - 3 * m.b1046 - 2 *
m.b1047 - 5 * m.b1048 - 6 * m.b1049 - 7 * m.b1050 - 2 * m.b1051 - 5 *
m.b1052 - 2 * m.b1053 - 4 * m.b1054 - 7 * m.b1055 - 4 * m.b1056 - 3 *
m.b1057 - 9 * m.b1058 - 3 * m.b1059 - 7 * m.b1060 - 2 * m.b1061 - 9 *
m.b1062 - 3 * m.b1063 - m.b1064 - 9 * m.b1065 - 2 * m.b1066 - 6 * m.b1067
- 3 * m.b1068 - 4 * m.b1069 - 8 * m.b1070 - m.b1071 - 2 * m.b1072 - 5 *
m.b1073 - 2 * m.b1074 - 3 * m.b1075 - 4 * m.b1076 - 3 * m.b1077 - 5 *
m.b1078 - 7 * m.b1079 - 6 * m.b1080 - 2 * m.b1081 - 8 * m.b1082 - 4 *
m.b1083 - m.b1084 - 4 * m.b1085 - m.b1086 - 2 * m.b1087 - 5 * m.b1088 - 2 *
m.b1089 - 9 * m.b1090 - 2 * m.b1091 - 9 * m.b1092 - 5 * m.b1093 - 8 *
m.b1094 - 4 * m.b1095 - 2 * m.b1096 - 3 * m.b1097 - 8 * m.b1098 - 10 *
m.b1099 - 6 * m.b1100 - 3 * m.b1101 - 4 * m.b1102 - 8 * m.b1103 - 7 *
m.b1104 - 7 * m.b1105 - 3 * m.b1106 - 9 * m.b1107 - 4 * m.b1108 - 8 *
m.b1109 - 6 * m.b1110 - 2 * m.b1111 - m.b1112 - 3 * m.b1113 - 8 * m.b1114
- 3 * m.b1115 - 4 * m.b1116 - 9 * m.b1117 - 5 * m.b1118 - m.b1119 - 3 *
m.b1120 - 9 * m.b1121 - 5 * m.b1122 - 5 * m.b1123 - 3 * m.b1124 - 3 *
m.b1125 - 5 * m.b1126 - 3 * m.b1127 - 2 * m.b1128 - 6 * m.b1129 - 4 *
m.b1130 - 6 * m.b1131 - 2 * m.b1132 - 6 * m.b1133 - 6 * m.b1134 - 6 *
m.b1135 - 4 * m.b1136 - 3 * m.b1137 - 3 * m.b1138 - 2 * m.b1139 - m.b1140
- 5 * m.b1141 - 8 * m.b1142 - 6 * m.b1143 - 9 * m.b1144 - 5 * m.b1145 - 2
* m.b1146)
m.e1 = Constraint(expr= m.x121 - m.x124 - m.x127 == 0)
m.e2 = Constraint(expr= m.x122 - m.x125 - m.x128 == 0)
m.e3 = Constraint(expr= m.x123 - m.x126 - m.x129 == 0)
m.e4 = Constraint(expr= -m.x130 - m.x133 + m.x136 == 0)
m.e5 = Constraint(expr= -m.x131 - m.x134 + m.x137 == 0)
m.e6 = Constraint(expr= -m.x132 - m.x135 + m.x138 == 0)
m.e7 = Constraint(expr= m.x136 - m.x139 - m.x142 == 0)
m.e8 = Constraint(expr= m.x137 - m.x140 - m.x143 == 0)
m.e9 = Constraint(expr= m.x138 - m.x141 - m.x144 == 0)
m.e10 = Constraint(expr= m.x142 - m.x145 - m.x148 - m.x151 == 0)
m.e11 = Constraint(expr= m.x143 - m.x146 - m.x149 - m.x152 == 0)
m.e12 = Constraint(expr= m.x144 - m.x147 - m.x150 - m.x153 == 0)
m.e13 = Constraint(expr= m.x157 - m.x166 - m.x169 == 0)
m.e14 = Constraint(expr= m.x158 - m.x167 - m.x170 == 0)
m.e15 = Constraint(expr= m.x159 - m.x168 - m.x171 == 0)
m.e16 = Constraint(expr= m.x163 - m.x172 - m.x175 - m.x178 == 0)
m.e17 = Constraint(expr= m.x164 - m.x173 - m.x176 - m.x179 == 0)
m.e18 = Constraint(expr= m.x165 - m.x174 - m.x177 - m.x180 == 0)
m.e19 = Constraint(expr= m.x187 - m.x199 - m.x202 == 0)
m.e20 = Constraint(expr= m.x188 - m.x200 - m.x203 == 0)
m.e21 = Constraint(expr= m.x189 - m.x201 - m.x204 == 0)
m.e22 = Constraint(expr= -m.x190 - m.x208 + m.x211 == 0)
m.e23 = Constraint(expr= -m.x191 - m.x209 + m.x212 == 0)
m.e24 = Constraint(expr= -m.x192 - m.x210 + m.x213 == 0)
m.e25 = Constraint(expr= m.x193 - m.x214 - m.x217 == 0)
m.e26 = Constraint(expr= m.x194 - m.x215 - m.x218 == 0)
m.e27 = Constraint(expr= m.x195 - m.x216 - m.x219 == 0)
m.e28 = Constraint(expr= m.x196 - m.x220 - m.x223 - m.x226 == 0)
m.e29 = Constraint(expr= m.x197 - m.x221 - m.x224 - m.x227 == 0)
m.e30 = Constraint(expr= m.x198 - m.x222 - m.x225 - m.x228 == 0)
m.e31 = Constraint(expr= m.x253 - m.x256 == 0)
m.e32 = Constraint(expr= m.x254 - m.x257 == 0)
m.e33 = Constraint(expr= m.x255 - m.x258 == 0)
m.e34 = Constraint(expr= m.x256 - m.x259 - m.x262 == 0)
m.e35 = Constraint(expr= m.x257 - m.x260 - m.x263 == 0)
m.e36 = Constraint(expr= m.x258 - m.x261 - m.x264 == 0)
m.e37 = Constraint(expr= -m.x265 - m.x268 + m.x271 == 0)
m.e38 = Constraint(expr= -m.x266 - m.x269 + m.x272 == 0)
m.e39 = Constraint(expr= -m.x267 - m.x270 + m.x273 == 0)
m.e40 = Constraint(expr= m.x271 - m.x274 - m.x277 == 0)
m.e41 = Constraint(expr= m.x272 - m.x275 - m.x278 == 0)
m.e42 = Constraint(expr= m.x273 - m.x276 - m.x279 == 0)
m.e43 = Constraint(expr= m.x277 - m.x280 - m.x283 - m.x286 == 0)
m.e44 = Constraint(expr= m.x278 - m.x281 - m.x284 - m.x287 == 0)
m.e45 = Constraint(expr= m.x279 - m.x282 - m.x285 - m.x288 == 0)
m.e46 = Constraint(expr= m.x292 - m.x301 - m.x304 == 0)
m.e47 = Constraint(expr= m.x293 - m.x302 - m.x305 == 0)
m.e48 = Constraint(expr= m.x294 - m.x303 - m.x306 == 0)
m.e49 = Constraint(expr= m.x298 - m.x307 - m.x310 - m.x313 == 0)
m.e50 = Constraint(expr= m.x299 - m.x308 - m.x311 - m.x314 == 0)
m.e51 = Constraint(expr= m.x300 - m.x309 - m.x312 - m.x315 == 0)
m.e52 = Constraint(expr= m.x322 - m.x334 - m.x337 == 0)
m.e53 = Constraint(expr= m.x323 - m.x335 - m.x338 == 0)
m.e54 = Constraint(expr= m.x324 - m.x336 - m.x339 == 0)
m.e55 = Constraint(expr= -m.x325 - m.x343 + m.x346 == 0)
m.e56 = Constraint(expr= -m.x326 - m.x344 + m.x347 == 0)
m.e57 = Constraint(expr= -m.x327 - m.x345 + m.x348 == 0)
m.e58 = Constraint(expr= m.x328 - m.x349 - m.x352 == 0)
m.e59 = Constraint(expr= m.x329 - m.x350 - m.x353 == 0)
m.e60 = Constraint(expr= m.x330 - m.x351 - m.x354 == 0)
m.e61 = Constraint(expr= m.x331 - m.x355 - m.x358 - m.x361 == 0)
m.e62 = Constraint(expr= m.x332 - m.x356 - m.x359 - m.x362 == 0)
m.e63 = Constraint(expr= m.x333 - m.x357 - m.x360 - m.x363 == 0)
m.e64 = Constraint(expr= (m.x403 / (0.001 + 0.999 * m.b907) - log(m.x391 / (
0.001 + 0.999 * m.b907) + 1)) * (0.001 + 0.999 * m.b907) <= 0)
m.e65 = Constraint(expr= (m.x404 / (0.001 + 0.999 * m.b908) - log(m.x392 / (
0.001 + 0.999 * m.b908) + 1)) * (0.001 + 0.999 * m.b908) <= 0)
m.e66 = Constraint(expr= (m.x405 / (0.001 + 0.999 * m.b909) - log(m.x393 / (
0.001 + 0.999 * m.b909) + 1)) * (0.001 + 0.999 * m.b909) <= 0)
m.e67 = Constraint(expr= m.x394 == 0)
m.e68 = Constraint(expr= m.x395 == 0)
m.e69 = Constraint(expr= m.x396 == 0)
m.e70 = Constraint(expr= m.x406 == 0)
m.e71 = Constraint(expr= m.x407 == 0)
m.e72 = Constraint(expr= m.x408 == 0)
m.e73 = Constraint(expr= m.x124 - m.x391 - m.x394 == 0)
m.e74 = Constraint(expr= m.x125 - m.x392 - m.x395 == 0)
m.e75 = Constraint(expr= m.x126 - m.x393 - m.x396 == 0)
m.e76 = Constraint(expr= m.x130 - m.x403 - m.x406 == 0)
m.e77 = Constraint(expr= m.x131 - m.x404 - m.x407 == 0)
m.e78 = Constraint(expr= m.x132 - m.x405 - m.x408 == 0)
m.e79 = Constraint(expr= m.x391 - 40 * m.b907 <= 0)
m.e80 = Constraint(expr= m.x392 - 40 * m.b908 <= 0)
m.e81 = Constraint(expr= m.x393 - 40 * m.b909 <= 0)
m.e82 = Constraint(expr= m.x394 + 40 * m.b907 <= 40)
m.e83 = Constraint(expr= m.x395 + 40 * m.b908 <= 40)
m.e84 = Constraint(expr= m.x396 + 40 * m.b909 <= 40)
m.e85 = Constraint(expr= m.x403 - 3.71357206670431 * m.b907 <= 0)
m.e86 = Constraint(expr= m.x404 - 3.71357206670431 * m.b908 <= 0)
m.e87 = Constraint(expr= m.x405 - 3.71357206670431 * m.b909 <= 0)
m.e88 = Constraint(expr= m.x406 + 3.71357206670431 * m.b907
<= 3.71357206670431)
m.e89 = Constraint(expr= m.x407 + 3.71357206670431 * m.b908
<= 3.71357206670431)
m.e90 = Constraint(expr= m.x408 + 3.71357206670431 * m.b909
<= 3.71357206670431)
m.e91 = Constraint(expr= (m.x409 / (0.001 + 0.999 * m.b910) - 1.2 * log(m.x397
/ (0.001 + 0.999 * m.b910) + 1)) * (0.001 + 0.999 * m.b910) <= 0)
m.e92 = Constraint(expr= (m.x410 / (0.001 + 0.999 * m.b911) - 1.2 * log(m.x398
/ (0.001 + 0.999 * m.b911) + 1)) * (0.001 + 0.999 * m.b911) <= 0)
m.e93 = Constraint(expr= (m.x411 / (0.001 + 0.999 * m.b912) - 1.2 * log(m.x399
/ (0.001 + 0.999 * m.b912) + 1)) * (0.001 + 0.999 * m.b912) <= 0)
m.e94 = Constraint(expr= m.x400 == 0)
m.e95 = Constraint(expr= m.x401 == 0)
m.e96 = Constraint(expr= m.x402 == 0)
m.e97 = Constraint(expr= m.x412 == 0)
m.e98 = Constraint(expr= m.x413 == 0)
m.e99 = Constraint(expr= m.x414 == 0)
m.e100 = Constraint(expr= m.x127 - m.x397 - m.x400 == 0)
m.e101 = Constraint(expr= m.x128 - m.x398 - m.x401 == 0)
m.e102 = Constraint(expr= m.x129 - m.x399 - m.x402 == 0)
m.e103 = Constraint(expr= m.x133 - m.x409 - m.x412 == 0)
m.e104 = Constraint(expr= m.x134 - m.x410 - m.x413 == 0)
m.e105 = Constraint(expr= m.x135 - m.x411 - m.x414 == 0)
m.e106 = Constraint(expr= m.x397 - 40 * m.b910 <= 0)
m.e107 = Constraint(expr= m.x398 - 40 * m.b911 <= 0)
m.e108 = Constraint(expr= m.x399 - 40 * m.b912 <= 0)
m.e109 = Constraint(expr= m.x400 + 40 * m.b910 <= 40)
m.e110 = Constraint(expr= m.x401 + 40 * m.b911 <= 40)
m.e111 = Constraint(expr= m.x402 + 40 * m.b912 <= 40)
m.e112 = Constraint(expr= m.x409 - 4.45628648004517 * m.b910 <= 0)
m.e113 = Constraint(expr= m.x410 - 4.45628648004517 * m.b911 <= 0)
m.e114 = Constraint(expr= m.x411 - 4.45628648004517 * m.b912 <= 0)
m.e115 = Constraint(expr= m.x412 + 4.45628648004517 * m.b910
<= 4.45628648004517)
m.e116 = Constraint(expr= m.x413 + 4.45628648004517 * m.b911
<= 4.45628648004517)
m.e117 = Constraint(expr= m.x414 + 4.45628648004517 * m.b912
<= 4.45628648004517)
m.e118 = Constraint(expr= -0.75 * m.x415 + m.x439 == 0)
m.e119 = Constraint(expr= -0.75 * m.x416 + m.x440 == 0)
m.e120 = Constraint(expr= -0.75 * m.x417 + m.x441 == 0)
m.e121 = Constraint(expr= m.x418 == 0)
m.e122 = Constraint(expr= m.x419 == 0)
m.e123 = Constraint(expr= m.x420 == 0)
m.e124 = Constraint(expr= m.x442 == 0)
m.e125 = Constraint(expr= m.x443 == 0)
m.e126 = Constraint(expr= m.x444 == 0)
m.e127 = Constraint(expr= m.x145 - m.x415 - m.x418 == 0)
m.e128 = Constraint(expr= m.x146 - m.x416 - m.x419 == 0)
m.e129 = Constraint(expr= m.x147 - m.x417 - m.x420 == 0)
m.e130 = Constraint(expr= m.x157 - m.x439 - m.x442 == 0)
m.e131 = Constraint(expr= m.x158 - m.x440 - m.x443 == 0)
m.e132 = Constraint(expr= m.x159 - m.x441 - m.x444 == 0)
m.e133 = Constraint(expr= m.x415 - 4.45628648004517 * m.b913 <= 0)
m.e134 = Constraint(expr= m.x416 - 4.45628648004517 * m.b914 <= 0)
m.e135 = Constraint(expr= m.x417 - 4.45628648004517 * m.b915 <= 0)
m.e136 = Constraint(expr= m.x418 + 4.45628648004517 * m.b913
<= 4.45628648004517)
m.e137 = Constraint(expr= m.x419 + 4.45628648004517 * m.b914
<= 4.45628648004517)
m.e138 = Constraint(expr= m.x420 + 4.45628648004517 * m.b915
<= 4.45628648004517)
m.e139 = Constraint(expr= m.x439 - 3.34221486003388 * m.b913 <= 0)
m.e140 = Constraint(expr= m.x440 - 3.34221486003388 * m.b914 <= 0)
m.e141 = Constraint(expr= m.x441 - 3.34221486003388 * m.b915 <= 0)
m.e142 = Constraint(expr= m.x442 + 3.34221486003388 * m.b913
<= 3.34221486003388)
m.e143 = Constraint(expr= m.x443 + 3.34221486003388 * m.b914
<= 3.34221486003388)
m.e144 = Constraint(expr= m.x444 + 3.34221486003388 * m.b915
<= 3.34221486003388)
m.e145 = Constraint(expr= (m.x445 / (0.001 + 0.999 * m.b916) - 1.5 * log(m.x421
/ (0.001 + 0.999 * m.b916) + 1)) * (0.001 + 0.999 * m.b916) <= 0)
m.e146 = Constraint(expr= (m.x446 / (0.001 + 0.999 * m.b917) - 1.5 * log(m.x422
/ (0.001 + 0.999 * m.b917) + 1)) * (0.001 + 0.999 * m.b917) <= 0)
m.e147 = Constraint(expr= (m.x447 / (0.001 + 0.999 * m.b918) - 1.5 * log(m.x423
/ (0.001 + 0.999 * m.b918) + 1)) * (0.001 + 0.999 * m.b918) <= 0)
m.e148 = Constraint(expr= m.x424 == 0)
m.e149 = Constraint(expr= m.x425 == 0)
m.e150 = Constraint(expr= m.x426 == 0)
m.e151 = Constraint(expr= m.x451 == 0)
m.e152 = Constraint(expr= m.x452 == 0)
m.e153 = Constraint(expr= m.x453 == 0)
m.e154 = Constraint(expr= m.x148 - m.x421 - m.x424 == 0)
m.e155 = Constraint(expr= m.x149 - m.x422 - m.x425 == 0)
m.e156 = Constraint(expr= m.x150 - m.x423 - m.x426 == 0)
m.e157 = Constraint(expr= m.x160 - m.x445 - m.x451 == 0)
m.e158 = Constraint(expr= m.x161 - m.x446 - m.x452 == 0)
m.e159 = Constraint(expr= m.x162 - m.x447 - m.x453 == 0)
m.e160 = Constraint(expr= m.x421 - 4.45628648004517 * m.b916 <= 0)
m.e161 = Constraint(expr= m.x422 - 4.45628648004517 * m.b917 <= 0)
m.e162 = Constraint(expr= m.x423 - 4.45628648004517 * m.b918 <= 0)
m.e163 = Constraint(expr= m.x424 + 4.45628648004517 * m.b916
<= 4.45628648004517)
m.e164 = Constraint(expr= m.x425 + 4.45628648004517 * m.b917
<= 4.45628648004517)
m.e165 = Constraint(expr= m.x426 + 4.45628648004517 * m.b918
<= 4.45628648004517)
m.e166 = Constraint(expr= m.x445 - 2.54515263975353 * m.b916 <= 0)
m.e167 = Constraint(expr= m.x446 - 2.54515263975353 * m.b917 <= 0)
m.e168 = Constraint(expr= m.x447 - 2.54515263975353 * m.b918 <= 0)
m.e169 = Constraint(expr= m.x451 + 2.54515263975353 * m.b916
<= 2.54515263975353)
m.e170 = Constraint(expr= m.x452 + 2.54515263975353 * m.b917
<= 2.54515263975353)
m.e171 = Constraint(expr= m.x453 + 2.54515263975353 * m.b918
<= 2.54515263975353)
m.e172 = Constraint(expr= -m.x427 + m.x457 == 0)
m.e173 = Constraint(expr= -m.x428 + m.x458 == 0)
m.e174 = Constraint(expr= -m.x429 + m.x459 == 0)
m.e175 = Constraint(expr= -0.5 * m.x433 + m.x457 == 0)
m.e176 = Constraint(expr= -0.5 * m.x434 + m.x458 == 0)
m.e177 = Constraint(expr= -0.5 * m.x435 + m.x459 == 0)
m.e178 = Constraint(expr= m.x430 == 0)
m.e179 = Constraint(expr= m.x431 == 0)
m.e180 = Constraint(expr= m.x432 == 0)
m.e181 = Constraint(expr= m.x436 == 0)
m.e182 = Constraint(expr= m.x437 == 0)
m.e183 = Constraint(expr= m.x438 == 0)
m.e184 = Constraint(expr= m.x460 == 0)
m.e185 = Constraint(expr= m.x461 == 0)
m.e186 = Constraint(expr= m.x462 == 0)
m.e187 = Constraint(expr= m.x151 - m.x427 - m.x430 == 0)
m.e188 = Constraint(expr= m.x152 - m.x428 - m.x431 == 0)
m.e189 = Constraint(expr= m.x153 - m.x429 - m.x432 == 0)
m.e190 = Constraint(expr= m.x154 - m.x433 - m.x436 == 0)
m.e191 = Constraint(expr= m.x155 - m.x434 - m.x437 == 0)
m.e192 = Constraint(expr= m.x156 - m.x435 - m.x438 == 0)
m.e193 = Constraint(expr= m.x163 - m.x457 - m.x460 == 0)
m.e194 = Constraint(expr= m.x164 - m.x458 - m.x461 == 0)
m.e195 = Constraint(expr= m.x165 - m.x459 - m.x462 == 0)
m.e196 = Constraint(expr= m.x427 - 4.45628648004517 * m.b919 <= 0)
m.e197 = Constraint(expr= m.x428 - 4.45628648004517 * m.b920 <= 0)
m.e198 = Constraint(expr= m.x429 - 4.45628648004517 * m.b921 <= 0)
m.e199 = Constraint(expr= m.x430 + 4.45628648004517 * m.b919
<= 4.45628648004517)
m.e200 = Constraint(expr= m.x431 + 4.45628648004517 * m.b920
<= 4.45628648004517)
m.e201 = Constraint(expr= m.x432 + 4.45628648004517 * m.b921
<= 4.45628648004517)
m.e202 = Constraint(expr= m.x433 - 30 * m.b919 <= 0)
m.e203 = Constraint(expr= m.x434 - 30 * m.b920 <= 0)
m.e204 = Constraint(expr= m.x435 - 30 * m.b921 <= 0)
m.e205 = Constraint(expr= m.x436 + 30 * m.b919 <= 30)
m.e206 = Constraint(expr= m.x437 + 30 * m.b920 <= 30)
m.e207 = Constraint(expr= m.x438 + 30 * m.b921 <= 30)
m.e208 = Constraint(expr= m.x457 - 15 * m.b919 <= 0)
m.e209 = Constraint(expr= m.x458 - 15 * m.b920 <= 0)
m.e210 = Constraint(expr= m.x459 - 15 * m.b921 <= 0)
m.e211 = Constraint(expr= m.x460 + 15 * m.b919 <= 15)
m.e212 = Constraint(expr= m.x461 + 15 * m.b920 <= 15)
m.e213 = Constraint(expr= m.x462 + 15 * m.b921 <= 15)
m.e214 = Constraint(expr= (m.x493 / (0.001 + 0.999 * m.b922) - 1.25 * log(
m.x463 / (0.001 + 0.999 * m.b922) + 1)) * (0.001 + 0.999 * m.b922) <= 0)
m.e215 = Constraint(expr= (m.x494 / (0.001 + 0.999 * m.b923) - 1.25 * log(
m.x464 / (0.001 + 0.999 * m.b923) + 1)) * (0.001 + 0.999 * m.b923) <= 0)
m.e216 = Constraint(expr= (m.x495 / (0.001 + 0.999 * m.b924) - 1.25 * log(
m.x465 / (0.001 + 0.999 * m.b924) + 1)) * (0.001 + 0.999 * m.b924) <= 0)
m.e217 = Constraint(expr= m.x466 == 0)
m.e218 = Constraint(expr= m.x467 == 0)
m.e219 = Constraint(expr= m.x468 == 0)
m.e220 = Constraint(expr= m.x499 == 0)
m.e221 = Constraint(expr= m.x500 == 0)
m.e222 = Constraint(expr= m.x501 == 0)
m.e223 = Constraint(expr= m.x166 - m.x463 - m.x466 == 0)
m.e224 = Constraint(expr= m.x167 - m.x464 - m.x467 == 0)
m.e225 = Constraint(expr= m.x168 - m.x465 - m.x468 == 0)
m.e226 = Constraint(expr= m.x181 - m.x493 - m.x499 == 0)
m.e227 = Constraint(expr= m.x182 - m.x494 - m.x500 == 0)
m.e228 = Constraint(expr= m.x183 - m.x495 - m.x501 == 0)
m.e229 = Constraint(expr= m.x463 - 3.34221486003388 * m.b922 <= 0)
m.e230 = Constraint(expr= m.x464 - 3.34221486003388 * m.b923 <= 0)
m.e231 = Constraint(expr= m.x465 - 3.34221486003388 * m.b924 <= 0)
m.e232 = Constraint(expr= m.x466 + 3.34221486003388 * m.b922
<= 3.34221486003388)
m.e233 = Constraint(expr= m.x467 + 3.34221486003388 * m.b923
<= 3.34221486003388)
m.e234 = Constraint(expr= m.x468 + 3.34221486003388 * m.b924
<= 3.34221486003388)
m.e235 = Constraint(expr= m.x493 - 1.83548069293539 * m.b922 <= 0)
m.e236 = Constraint(expr= m.x494 - 1.83548069293539 * m.b923 <= 0)
m.e237 = Constraint(expr= m.x495 - 1.83548069293539 * m.b924 <= 0)
m.e238 = Constraint(expr= m.x499 + 1.83548069293539 * m.b922
<= 1.83548069293539)
m.e239 = Constraint(expr= m.x500 + 1.83548069293539 * m.b923
<= 1.83548069293539)
m.e240 = Constraint(expr= m.x501 + 1.83548069293539 * m.b924
<= 1.83548069293539)
m.e241 = Constraint(expr= (m.x505 / (0.001 + 0.999 * m.b925) - 0.9 * log(m.x469
/ (0.001 + 0.999 * m.b925) + 1)) * (0.001 + 0.999 * m.b925) <= 0)
m.e242 = Constraint(expr= (m.x506 / (0.001 + 0.999 * m.b926) - 0.9 * log(m.x470
/ (0.001 + 0.999 * m.b926) + 1)) * (0.001 + 0.999 * m.b926) <= 0)
m.e243 = Constraint(expr= (m.x507 / (0.001 + 0.999 * m.b927) - 0.9 * log(m.x471
/ (0.001 + 0.999 * m.b927) + 1)) * (0.001 + 0.999 * m.b927) <= 0)
m.e244 = Constraint(expr= m.x472 == 0)
m.e245 = Constraint(expr= m.x473 == 0)
m.e246 = Constraint(expr= m.x474 == 0)
m.e247 = Constraint(expr= m.x511 == 0)
m.e248 = Constraint(expr= m.x512 == 0)
m.e249 = Constraint(expr= m.x513 == 0)
m.e250 = Constraint(expr= m.x169 - m.x469 - m.x472 == 0)
m.e251 = Constraint(expr= m.x170 - m.x470 - m.x473 == 0)
m.e252 = Constraint(expr= m.x171 - m.x471 - m.x474 == 0)
m.e253 = Constraint(expr= m.x184 - m.x505 - m.x511 == 0)
m.e254 = Constraint(expr= m.x185 - m.x506 - m.x512 == 0)
m.e255 = Constraint(expr= m.x186 - m.x507 - m.x513 == 0)
m.e256 = Constraint(expr= m.x469 - 3.34221486003388 * m.b925 <= 0)
m.e257 = Constraint(expr= m.x470 - 3.34221486003388 * m.b926 <= 0)
m.e258 = Constraint(expr= m.x471 - 3.34221486003388 * m.b927 <= 0)
m.e259 = Constraint(expr= m.x472 + 3.34221486003388 * m.b925
<= 3.34221486003388)
m.e260 = Constraint(expr= m.x473 + 3.34221486003388 * m.b926
<= 3.34221486003388)
m.e261 = Constraint(expr= m.x474 + 3.34221486003388 * m.b927
<= 3.34221486003388)
m.e262 = Constraint(expr= m.x505 - 1.32154609891348 * m.b925 <= 0)
m.e263 = Constraint(expr= m.x506 - 1.32154609891348 * m.b926 <= 0)
m.e264 = Constraint(expr= m.x507 - 1.32154609891348 * m.b927 <= 0)
m.e265 = Constraint(expr= m.x511 + 1.32154609891348 * m.b925
<= 1.32154609891348)
m.e266 = Constraint(expr= m.x512 + 1.32154609891348 * m.b926
<= 1.32154609891348)
m.e267 = Constraint(expr= m.x513 + 1.32154609891348 * m.b927
<= 1.32154609891348)
m.e268 = Constraint(expr= (m.x517 / (0.001 + 0.999 * m.b928) - log(m.x448 / (
0.001 + 0.999 * m.b928) + 1)) * (0.001 + 0.999 * m.b928) <= 0)
m.e269 = Constraint(expr= (m.x518 / (0.001 + 0.999 * m.b929) - log(m.x449 / (
0.001 + 0.999 * m.b929) + 1)) * (0.001 + 0.999 * m.b929) <= 0)
m.e270 = Constraint(expr= (m.x519 / (0.001 + 0.999 * m.b930) - log(m.x450 / (
0.001 + 0.999 * m.b930) + 1)) * (0.001 + 0.999 * m.b930) <= 0)
m.e271 = Constraint(expr= m.x454 == 0)
m.e272 = Constraint(expr= m.x455 == 0)
m.e273 = Constraint(expr= m.x456 == 0)
m.e274 = Constraint(expr= m.x520 == 0)
m.e275 = Constraint(expr= m.x521 == 0)
m.e276 = Constraint(expr= m.x522 == 0)
m.e277 = Constraint(expr= m.x160 - m.x448 - m.x454 == 0)
m.e278 = Constraint(expr= m.x161 - m.x449 - m.x455 == 0)
m.e279 = Constraint(expr= m.x162 - m.x450 - m.x456 == 0)
m.e280 = Constraint(expr= m.x187 - m.x517 - m.x520 == 0)
m.e281 = Constraint(expr= m.x188 - m.x518 - m.x521 == 0)
m.e282 = Constraint(expr= m.x189 - m.x519 - m.x522 == 0)
m.e283 = Constraint(expr= m.x448 - 2.54515263975353 * m.b928 <= 0)
m.e284 = Constraint(expr= m.x449 - 2.54515263975353 * m.b929 <= 0)
m.e285 = Constraint(expr= m.x450 - 2.54515263975353 * m.b930 <= 0)
m.e286 = Constraint(expr= m.x454 + 2.54515263975353 * m.b928
<= 2.54515263975353)
m.e287 = Constraint(expr= m.x455 + 2.54515263975353 * m.b929
<= 2.54515263975353)
m.e288 = Constraint(expr= m.x456 + 2.54515263975353 * m.b930
<= 2.54515263975353)
m.e289 = Constraint(expr= m.x517 - 1.26558121681553 * m.b928 <= 0)
m.e290 = Constraint(expr= m.x518 - 1.26558121681553 * m.b929 <= 0)
m.e291 = Constraint(expr= m.x519 - 1.26558121681553 * m.b930 <= 0)
m.e292 = Constraint(expr= m.x520 + 1.26558121681553 * m.b928
<= 1.26558121681553)
m.e293 = Constraint(expr= m.x521 + 1.26558121681553 * m.b929
<= 1.26558121681553)
m.e294 = Constraint(expr= m.x522 + 1.26558121681553 * m.b930
<= 1.26558121681553)
m.e295 = Constraint(expr= -0.9 * m.x475 + m.x523 == 0)
m.e296 = Constraint(expr= -0.9 * m.x476 + m.x524 == 0)
m.e297 = Constraint(expr= -0.9 * m.x477 + m.x525 == 0)
m.e298 = Constraint(expr= m.x478 == 0)
m.e299 = Constraint(expr= m.x479 == 0)
m.e300 = Constraint(expr= m.x480 == 0)
m.e301 = Constraint(expr= m.x526 == 0)
m.e302 = Constraint(expr= m.x527 == 0)
m.e303 = Constraint(expr= m.x528 == 0)
m.e304 = Constraint(expr= m.x172 - m.x475 - m.x478 == 0)
m.e305 = Constraint(expr= m.x173 - m.x476 - m.x479 == 0)
m.e306 = Constraint(expr= m.x174 - m.x477 - m.x480 == 0)
m.e307 = Constraint(expr= m.x190 - m.x523 - m.x526 == 0)
m.e308 = Constraint(expr= m.x191 - m.x524 - m.x527 == 0)
m.e309 = Constraint(expr= m.x192 - m.x525 - m.x528 == 0)
m.e310 = Constraint(expr= m.x475 - 15 * m.b931 <= 0)
m.e311 = Constraint(expr= m.x476 - 15 * m.b932 <= 0)
m.e312 = Constraint(expr= m.x477 - 15 * m.b933 <= 0)
m.e313 = Constraint(expr= m.x478 + 15 * m.b931 <= 15)
m.e314 = Constraint(expr= m.x479 + 15 * m.b932 <= 15)
m.e315 = Constraint(expr= m.x480 + 15 * m.b933 <= 15)
m.e316 = Constraint(expr= m.x523 - 13.5 * m.b931 <= 0)
m.e317 = Constraint(expr= m.x524 - 13.5 * m.b932 <= 0)
m.e318 = Constraint(expr= m.x525 - 13.5 * m.b933 <= 0)
m.e319 = Constraint(expr= m.x526 + 13.5 * m.b931 <= 13.5)
m.e320 = Constraint(expr= m.x527 + 13.5 * m.b932 <= 13.5)
m.e321 = Constraint(expr= m.x528 + 13.5 * m.b933 <= 13.5)
m.e322 = Constraint(expr= -0.6 * m.x481 + m.x529 == 0)
m.e323 = Constraint(expr= -0.6 * m.x482 + m.x530 == 0)
m.e324 = Constraint(expr= -0.6 * m.x483 + m.x531 == 0)
m.e325 = Constraint(expr= m.x484 == 0)
m.e326 = Constraint(expr= m.x485 == 0)
m.e327 = Constraint(expr= m.x486 == 0)
m.e328 = Constraint(expr= m.x532 == 0)
m.e329 = Constraint(expr= m.x533 == 0)
m.e330 = Constraint(expr= m.x534 == 0)
m.e331 = Constraint(expr= m.x175 - m.x481 - m.x484 == 0)
m.e332 = Constraint(expr= m.x176 - m.x482 - m.x485 == 0)
m.e333 = Constraint(expr= m.x177 - m.x483 - m.x486 == 0)
m.e334 = Constraint(expr= m.x193 - m.x529 - m.x532 == 0)
m.e335 = Constraint(expr= m.x194 - m.x530 - m.x533 == 0)
m.e336 = Constraint(expr= m.x195 - m.x531 - m.x534 == 0)
m.e337 = Constraint(expr= m.x481 - 15 * m.b934 <= 0)
m.e338 = Constraint(expr= m.x482 - 15 * m.b935 <= 0)
m.e339 = Constraint(expr= m.x483 - 15 * m.b936 <= 0)
m.e340 = Constraint(expr= m.x484 + 15 * m.b934 <= 15)
m.e341 = Constraint(expr= m.x485 + 15 * m.b935 <= 15)
m.e342 = Constraint(expr= m.x486 + 15 * m.b936 <= 15)
m.e343 = Constraint(expr= m.x529 - 9 * m.b934 <= 0)
m.e344 = Constraint(expr= m.x530 - 9 * m.b935 <= 0)
m.e345 = Constraint(expr= m.x531 - 9 * m.b936 <= 0)
m.e346 = Constraint(expr= m.x532 + 9 * m.b934 <= 9)
m.e347 = Constraint(expr= m.x533 + 9 * m.b935 <= 9)
m.e348 = Constraint(expr= m.x534 + 9 * m.b936 <= 9)
m.e349 = Constraint(expr= (m.x535 / (0.001 + 0.999 * m.b937) - 1.1 * log(m.x487
/ (0.001 + 0.999 * m.b937) + 1)) * (0.001 + 0.999 * m.b937) <= 0)
m.e350 = Constraint(expr= (m.x536 / (0.001 + 0.999 * m.b938) - 1.1 * log(m.x488
/ (0.001 + 0.999 * m.b938) + 1)) * (0.001 + 0.999 * m.b938) <= 0)
m.e351 = Constraint(expr= (m.x537 / (0.001 + 0.999 * m.b939) - 1.1 * log(m.x489
/ (0.001 + 0.999 * m.b939) + 1)) * (0.001 + 0.999 * m.b939) <= 0)
m.e352 = Constraint(expr= m.x490 == 0)
m.e353 = Constraint(expr= m.x491 == 0)
m.e354 = Constraint(expr= m.x492 == 0)
m.e355 = Constraint(expr= m.x538 == 0)
m.e356 = Constraint(expr= m.x539 == 0)
m.e357 = Constraint(expr= m.x540 == 0)
m.e358 = Constraint(expr= m.x178 - m.x487 - m.x490 == 0)
m.e359 = Constraint(expr= m.x179 - m.x488 - m.x491 == 0)
m.e360 = Constraint(expr= m.x180 - m.x489 - m.x492 == 0)
m.e361 = Constraint(expr= m.x196 - m.x535 - m.x538 == 0)
m.e362 = Constraint(expr= m.x197 - m.x536 - m.x539 == 0)
m.e363 = Constraint(expr= m.x198 - m.x537 - m.x540 == 0)
m.e364 = Constraint(expr= m.x487 - 15 * m.b937 <= 0)
m.e365 = Constraint(expr= m.x488 - 15 * m.b938 <= 0)
m.e366 = Constraint(expr= m.x489 - 15 * m.b939 <= 0)
m.e367 = Constraint(expr= m.x490 + 15 * m.b937 <= 15)
m.e368 = Constraint(expr= m.x491 + 15 * m.b938 <= 15)
m.e369 = Constraint(expr= m.x492 + 15 * m.b939 <= 15)
m.e370 = Constraint(expr= m.x535 - 3.04984759446376 * m.b937 <= 0)
m.e371 = Constraint(expr= m.x536 - 3.04984759446376 * m.b938 <= 0)
m.e372 = Constraint(expr= m.x537 - 3.04984759446376 * m.b939 <= 0)
m.e373 = Constraint(expr= m.x538 + 3.04984759446376 * m.b937
<= 3.04984759446376)
m.e374 = Constraint(expr= m.x539 + 3.04984759446376 * m.b938
<= 3.04984759446376)
m.e375 = Constraint(expr= m.x540 + 3.04984759446376 * m.b939
<= 3.04984759446376)
m.e376 = Constraint(expr= -0.9 * m.x496 + m.x595 == 0)
m.e377 = Constraint(expr= -0.9 * m.x497 + m.x596 == 0)
m.e378 = Constraint(expr= -0.9 * m.x498 + m.x597 == 0)
m.e379 = Constraint(expr= -m.x553 + m.x595 == 0)
m.e380 = Constraint(expr= -m.x554 + m.x596 == 0)
m.e381 = Constraint(expr= -m.x555 + m.x597 == 0)
m.e382 = Constraint(expr= m.x502 == 0)
m.e383 = Constraint(expr= m.x503 == 0)
m.e384 = Constraint(expr= m.x504 == 0)
m.e385 = Constraint(expr= m.x556 == 0)
m.e386 = Constraint(expr= m.x557 == 0)
m.e387 = Constraint(expr= m.x558 == 0)
m.e388 = Constraint(expr= m.x598 == 0)
m.e389 = Constraint(expr= m.x599 == 0)
m.e390 = Constraint(expr= m.x600 == 0)
m.e391 = Constraint(expr= m.x181 - m.x496 - m.x502 == 0)
m.e392 = Constraint(expr= m.x182 - m.x497 - m.x503 == 0)
m.e393 = Constraint(expr= m.x183 - m.x498 - m.x504 == 0)
m.e394 = Constraint(expr= m.x205 - m.x553 - m.x556 == 0)
m.e395 = Constraint(expr= m.x206 - m.x554 - m.x557 == 0)
m.e396 = Constraint(expr= m.x207 - m.x555 - m.x558 == 0)
m.e397 = Constraint(expr= m.x229 - m.x595 - m.x598 == 0)
m.e398 = Constraint(expr= m.x230 - m.x596 - m.x599 == 0)
m.e399 = Constraint(expr= m.x231 - m.x597 - m.x600 == 0)
m.e400 = Constraint(expr= m.x496 - 1.83548069293539 * m.b940 <= 0)
m.e401 = Constraint(expr= m.x497 - 1.83548069293539 * m.b941 <= 0)
m.e402 = Constraint(expr= m.x498 - 1.83548069293539 * m.b942 <= 0)
m.e403 = Constraint(expr= m.x502 + 1.83548069293539 * m.b940
<= 1.83548069293539)
m.e404 = Constraint(expr= m.x503 + 1.83548069293539 * m.b941
<= 1.83548069293539)
m.e405 = Constraint(expr= m.x504 + 1.83548069293539 * m.b942
<= 1.83548069293539)
m.e406 = Constraint(expr= m.x553 - 20 * m.b940 <= 0)
m.e407 = Constraint(expr= m.x554 - 20 * m.b941 <= 0)
m.e408 = Constraint(expr= m.x555 - 20 * m.b942 <= 0)
m.e409 = Constraint(expr= m.x556 + 20 * m.b940 <= 20)
m.e410 = Constraint(expr= m.x557 + 20 * m.b941 <= 20)
m.e411 = Constraint(expr= m.x558 + 20 * m.b942 <= 20)
m.e412 = Constraint(expr= m.x595 - 20 * m.b940 <= 0)
m.e413 = Constraint(expr= m.x596 - 20 * m.b941 <= 0)
m.e414 = Constraint(expr= m.x597 - 20 * m.b942 <= 0)
m.e415 = Constraint(expr= m.x598 + 20 * m.b940 <= 20)
m.e416 = Constraint(expr= m.x599 + 20 * m.b941 <= 20)
m.e417 = Constraint(expr= m.x600 + 20 * m.b942 <= 20)
m.e418 = Constraint(expr= (m.x601 / (0.001 + 0.999 * m.b943) - log(m.x508 / (
0.001 + 0.999 * m.b943) + 1)) * (0.001 + 0.999 * m.b943) <= 0)
m.e419 = Constraint(expr= (m.x602 / (0.001 + 0.999 * m.b944) - log(m.x509 / (
0.001 + 0.999 * m.b944) + 1)) * (0.001 + 0.999 * m.b944) <= 0)
m.e420 = Constraint(expr= (m.x603 / (0.001 + 0.999 * m.b945) - log(m.x510 / (
0.001 + 0.999 * m.b945) + 1)) * (0.001 + 0.999 * m.b945) <= 0)
m.e421 = Constraint(expr= m.x514 == 0)
m.e422 = Constraint(expr= m.x515 == 0)
m.e423 = Constraint(expr= m.x516 == 0)
m.e424 = Constraint(expr= m.x604 == 0)
m.e425 = Constraint(expr= m.x605 == 0)
m.e426 = Constraint(expr= m.x606 == 0)
m.e427 = Constraint(expr= m.x184 - m.x508 - m.x514 == 0)
m.e428 = Constraint(expr= m.x185 - m.x509 - m.x515 == 0)
m.e429 = Constraint(expr= m.x186 - m.x510 - m.x516 == 0)
m.e430 = Constraint(expr= m.x232 - m.x601 - m.x604 == 0)
m.e431 = Constraint(expr= m.x233 - m.x602 - m.x605 == 0)
m.e432 = Constraint(expr= m.x234 - m.x603 - m.x606 == 0)
m.e433 = Constraint(expr= m.x508 - 1.32154609891348 * m.b943 <= 0)
m.e434 = Constraint(expr= m.x509 - 1.32154609891348 * m.b944 <= 0)
m.e435 = Constraint(expr= m.x510 - 1.32154609891348 * m.b945 <= 0)
m.e436 = Constraint(expr= m.x514 + 1.32154609891348 * m.b943
<= 1.32154609891348)
m.e437 = Constraint(expr= m.x515 + 1.32154609891348 * m.b944
<= 1.32154609891348)
m.e438 = Constraint(expr= m.x516 + 1.32154609891348 * m.b945
<= 1.32154609891348)
m.e439 = Constraint(expr= m.x601 - 0.842233385663186 * m.b943 <= 0)
m.e440 = Constraint(expr= m.x602 - 0.842233385663186 * m.b944 <= 0)
m.e441 = Constraint(expr= m.x603 - 0.842233385663186 * m.b945 <= 0)
m.e442 = Constraint(expr= m.x604 + 0.842233385663186 * m.b943
<= 0.842233385663186)
m.e443 = Constraint(expr= m.x605 + 0.842233385663186 * m.b944
<= 0.842233385663186)
m.e444 = Constraint(expr= m.x606 + 0.842233385663186 * m.b945
<= 0.842233385663186)
m.e445 = Constraint(expr= (m.x607 / (0.001 + 0.999 * m.b946) - 0.7 * log(m.x541
/ (0.001 + 0.999 * m.b946) + 1)) * (0.001 + 0.999 * m.b946) <= 0)
m.e446 = Constraint(expr= (m.x608 / (0.001 + 0.999 * m.b947) - 0.7 * log(m.x542
/ (0.001 + 0.999 * m.b947) + 1)) * (0.001 + 0.999 * m.b947) <= 0)
m.e447 = Constraint(expr= (m.x609 / (0.001 + 0.999 * m.b948) - 0.7 * log(m.x543
/ (0.001 + 0.999 * m.b948) + 1)) * (0.001 + 0.999 * m.b948) <= 0)
m.e448 = Constraint(expr= m.x544 == 0)
m.e449 = Constraint(expr= m.x545 == 0)
m.e450 = Constraint(expr= m.x546 == 0)
m.e451 = Constraint(expr= m.x610 == 0)
m.e452 = Constraint(expr= m.x611 == 0)
m.e453 = Constraint(expr= m.x612 == 0)
m.e454 = Constraint(expr= m.x199 - m.x541 - m.x544 == 0)
m.e455 = Constraint(expr= m.x200 - m.x542 - m.x545 == 0)
m.e456 = Constraint(expr= m.x201 - m.x543 - m.x546 == 0)
m.e457 = Constraint(expr= m.x235 - m.x607 - m.x610 == 0)
m.e458 = Constraint(expr= m.x236 - m.x608 - m.x611 == 0)
m.e459 = Constraint(expr= m.x237 - m.x609 - m.x612 == 0)
m.e460 = Constraint(expr= m.x541 - 1.26558121681553 * m.b946 <= 0)
m.e461 = Constraint(expr= m.x542 - 1.26558121681553 * m.b947 <= 0)
m.e462 = Constraint(expr= m.x543 - 1.26558121681553 * m.b948 <= 0)
m.e463 = Constraint(expr= m.x544 + 1.26558121681553 * m.b946
<= 1.26558121681553)
m.e464 = Constraint(expr= m.x545 + 1.26558121681553 * m.b947
<= 1.26558121681553)
m.e465 = Constraint(expr= m.x546 + 1.26558121681553 * m.b948
<= 1.26558121681553)
m.e466 = Constraint(expr= m.x607 - 0.572481933717686 * m.b946 <= 0)
m.e467 = Constraint(expr= m.x608 - 0.572481933717686 * m.b947 <= 0)
m.e468 = Constraint(expr= m.x609 - 0.572481933717686 * m.b948 <= 0)
m.e469 = Constraint(expr= m.x610 + 0.572481933717686 * m.b946
<= 0.572481933717686)
m.e470 = Constraint(expr= m.x611 + 0.572481933717686 * m.b947
<= 0.572481933717686)
m.e471 = Constraint(expr= m.x612 + 0.572481933717686 * m.b948
<= 0.572481933717686)
m.e472 = Constraint(expr= (m.x613 / (0.001 + 0.999 * m.b949) - 0.65 * log(
m.x547 / (0.001 + 0.999 * m.b949) + 1)) * (0.001 + 0.999 * m.b949) <= 0)
m.e473 = Constraint(expr= (m.x614 / (0.001 + 0.999 * m.b950) - 0.65 * log(
m.x548 / (0.001 + 0.999 * m.b950) + 1)) * (0.001 + 0.999 * m.b950) <= 0)
m.e474 = Constraint(expr= (m.x615 / (0.001 + 0.999 * m.b951) - 0.65 * log(
m.x549 / (0.001 + 0.999 * m.b951) + 1)) * (0.001 + 0.999 * m.b951) <= 0)
m.e475 = Constraint(expr= (m.x613 / (0.001 + 0.999 * m.b949) - 0.65 * log(
m.x559 / (0.001 + 0.999 * m.b949) + 1)) * (0.001 + 0.999 * m.b949) <= 0)
m.e476 = Constraint(expr= (m.x614 / (0.001 + 0.999 * m.b950) - 0.65 * log(
m.x560 / (0.001 + 0.999 * m.b950) + 1)) * (0.001 + 0.999 * m.b950) <= 0)
m.e477 = Constraint(expr= (m.x615 / (0.001 + 0.999 * m.b951) - 0.65 * log(
m.x561 / (0.001 + 0.999 * m.b951) + 1)) * (0.001 + 0.999 * m.b951) <= 0)
m.e478 = Constraint(expr= m.x550 == 0)
m.e479 = Constraint(expr= m.x551 == 0)
m.e480 = Constraint(expr= m.x552 == 0)
m.e481 = Constraint(expr= m.x562 == 0)
m.e482 = Constraint(expr= m.x563 == 0)
m.e483 = Constraint(expr= m.x564 == 0)
m.e484 = Constraint(expr= m.x616 == 0)
m.e485 = Constraint(expr= m.x617 == 0)
m.e486 = Constraint(expr= m.x618 == 0)
m.e487 = Constraint(expr= m.x202 - m.x547 - m.x550 == 0)
m.e488 = Constraint(expr= m.x203 - m.x548 - m.x551 == 0)
m.e489 = Constraint(expr= m.x204 - m.x549 - m.x552 == 0)
m.e490 = Constraint(expr= m.x211 - m.x559 - m.x562 == 0)
m.e491 = Constraint(expr= m.x212 - m.x560 - m.x563 == 0)
m.e492 = Constraint(expr= m.x213 - m.x561 - m.x564 == 0)
m.e493 = Constraint(expr= m.x238 - m.x613 - m.x616 == 0)
m.e494 = Constraint(expr= m.x239 - m.x614 - m.x617 == 0)
m.e495 = Constraint(expr= m.x240 - m.x615 - m.x618 == 0)
m.e496 = Constraint(expr= m.x547 - 1.26558121681553 * m.b949 <= 0)
m.e497 = Constraint(expr= m.x548 - 1.26558121681553 * m.b950 <= 0)
m.e498 = Constraint(expr= m.x549 - 1.26558121681553 * m.b951 <= 0)
m.e499 = Constraint(expr= m.x550 + 1.26558121681553 * m.b949
<= 1.26558121681553)
m.e500 = Constraint(expr= m.x551 + 1.26558121681553 * m.b950
<= 1.26558121681553)
m.e501 = Constraint(expr= m.x552 + 1.26558121681553 * m.b951
<= 1.26558121681553)
m.e502 = Constraint(expr= m.x559 - 33.5 * m.b949 <= 0)
m.e503 = Constraint(expr= m.x560 - 33.5 * m.b950 <= 0)
m.e504 = Constraint(expr= m.x561 - 33.5 * m.b951 <= 0)
m.e505 = Constraint(expr= m.x562 + 33.5 * m.b949 <= 33.5)
m.e506 = Constraint(expr= m.x563 + 33.5 * m.b950 <= 33.5)
m.e507 = Constraint(expr= m.x564 + 33.5 * m.b951 <= 33.5)
m.e508 = Constraint(expr= m.x613 - 2.30162356062425 * m.b949 <= 0)
m.e509 = Constraint(expr= m.x614 - 2.30162356062425 * m.b950 <= 0)
m.e510 = Constraint(expr= m.x615 - 2.30162356062425 * m.b951 <= 0)
m.e511 = Constraint(expr= m.x616 + 2.30162356062425 * m.b949
<= 2.30162356062425)
m.e512 = Constraint(expr= m.x617 + 2.30162356062425 * m.b950
<= 2.30162356062425)
m.e513 = Constraint(expr= m.x618 + 2.30162356062425 * m.b951
<= 2.30162356062425)
m.e514 = Constraint(expr= -m.x565 + m.x619 == 0)
m.e515 = Constraint(expr= -m.x566 + m.x620 == 0)
m.e516 = Constraint(expr= -m.x567 + m.x621 == 0)
m.e517 = Constraint(expr= m.x568 == 0)
m.e518 = Constraint(expr= m.x569 == 0)
m.e519 = Constraint(expr= m.x570 == 0)
m.e520 = Constraint(expr= m.x622 == 0)
m.e521 = Constraint(expr= m.x623 == 0)
m.e522 = Constraint(expr= m.x624 == 0)
m.e523 = Constraint(expr= m.x214 - m.x565 - m.x568 == 0)
m.e524 = Constraint(expr= m.x215 - m.x566 - m.x569 == 0)
m.e525 = Constraint(expr= m.x216 - m.x567 - m.x570 == 0)
m.e526 = Constraint(expr= m.x241 - m.x619 - m.x622 == 0)
m.e527 = Constraint(expr= m.x242 - m.x620 - m.x623 == 0)
m.e528 = Constraint(expr= m.x243 - m.x621 - m.x624 == 0)
m.e529 = Constraint(expr= m.x565 - 9 * m.b952 <= 0)
m.e530 = Constraint(expr= m.x566 - 9 * m.b953 <= 0)
m.e531 = Constraint(expr= m.x567 - 9 * m.b954 <= 0)
m.e532 = Constraint(expr= m.x568 + 9 * m.b952 <= 9)
m.e533 = Constraint(expr= m.x569 + 9 * m.b953 <= 9)
m.e534 = Constraint(expr= m.x570 + 9 * m.b954 <= 9)
m.e535 = Constraint(expr= m.x619 - 9 * m.b952 <= 0)
m.e536 = Constraint(expr= m.x620 - 9 * m.b953 <= 0)
m.e537 = Constraint(expr= m.x621 - 9 * m.b954 <= 0)
m.e538 = Constraint(expr= m.x622 + 9 * m.b952 <= 9)
m.e539 = Constraint(expr= m.x623 + 9 * m.b953 <= 9)
m.e540 = Constraint(expr= m.x624 + 9 * m.b954 <= 9)
m.e541 = Constraint(expr= -m.x571 + m.x625 == 0)
m.e542 = Constraint(expr= -m.x572 + m.x626 == 0)
m.e543 = Constraint(expr= -m.x573 + m.x627 == 0)
m.e544 = Constraint(expr= m.x574 == 0)
m.e545 = Constraint(expr= m.x575 == 0)
m.e546 = Constraint(expr= m.x576 == 0)
m.e547 = Constraint(expr= m.x628 == 0)
m.e548 = Constraint(expr= m.x629 == 0)
m.e549 = Constraint(expr= m.x630 == 0)
m.e550 = Constraint(expr= m.x217 - m.x571 - m.x574 == 0)
m.e551 = Constraint(expr= m.x218 - m.x572 - m.x575 == 0)
m.e552 = Constraint(expr= m.x219 - m.x573 - m.x576 == 0)
m.e553 = Constraint(expr= m.x244 - m.x625 - m.x628 == 0)
m.e554 = Constraint(expr= m.x245 - m.x626 - m.x629 == 0)
m.e555 = Constraint(expr= m.x246 - m.x627 - m.x630 == 0)
m.e556 = Constraint(expr= m.x571 - 9 * m.b955 <= 0)
m.e557 = Constraint(expr= m.x572 - 9 * m.b956 <= 0)
m.e558 = Constraint(expr= m.x573 - 9 * m.b957 <= 0)
m.e559 = Constraint(expr= m.x574 + 9 * m.b955 <= 9)
m.e560 = Constraint(expr= m.x575 + 9 * m.b956 <= 9)
m.e561 = Constraint(expr= m.x576 + 9 * m.b957 <= 9)
m.e562 = Constraint(expr= m.x625 - 9 * m.b955 <= 0)
m.e563 = Constraint(expr= m.x626 - 9 * m.b956 <= 0)
m.e564 = Constraint(expr= m.x627 - 9 * m.b957 <= 0)
m.e565 = Constraint(expr= m.x628 + 9 * m.b955 <= 9)
m.e566 = Constraint(expr= m.x629 + 9 * m.b956 <= 9)
m.e567 = Constraint(expr= m.x630 + 9 * m.b957 <= 9)
m.e568 = Constraint(expr= (m.x631 / (0.001 + 0.999 * m.b958) - 0.75 * log(
m.x577 / (0.001 + 0.999 * m.b958) + 1)) * (0.001 + 0.999 * m.b958) <= 0)
m.e569 = Constraint(expr= (m.x632 / (0.001 + 0.999 * m.b959) - 0.75 * log(
m.x578 / (0.001 + 0.999 * m.b959) + 1)) * (0.001 + 0.999 * m.b959) <= 0)
m.e570 = Constraint(expr= (m.x633 / (0.001 + 0.999 * m.b960) - 0.75 * log(
m.x579 / (0.001 + 0.999 * m.b960) + 1)) * (0.001 + 0.999 * m.b960) <= 0)
m.e571 = Constraint(expr= m.x580 == 0)
m.e572 = Constraint(expr= m.x581 == 0)
m.e573 = Constraint(expr= m.x582 == 0)
m.e574 = Constraint(expr= m.x634 == 0)
m.e575 = Constraint(expr= m.x635 == 0)
m.e576 = Constraint(expr= m.x636 == 0)
m.e577 = Constraint(expr= m.x220 - m.x577 - m.x580 == 0)
m.e578 = Constraint(expr= m.x221 - m.x578 - m.x581 == 0)
m.e579 = Constraint(expr= m.x222 - m.x579 - m.x582 == 0)
m.e580 = Constraint(expr= m.x247 - m.x631 - m.x634 == 0)
m.e581 = Constraint(expr= m.x248 - m.x632 - m.x635 == 0)
m.e582 = Constraint(expr= m.x249 - m.x633 - m.x636 == 0)
m.e583 = Constraint(expr= m.x577 - 3.04984759446376 * m.b958 <= 0)
m.e584 = Constraint(expr= m.x578 - 3.04984759446376 * m.b959 <= 0)
m.e585 = Constraint(expr= m.x579 - 3.04984759446376 * m.b960 <= 0)
m.e586 = Constraint(expr= m.x580 + 3.04984759446376 * m.b958
<= 3.04984759446376)
m.e587 = Constraint(expr= m.x581 + 3.04984759446376 * m.b959
<= 3.04984759446376)
m.e588 = Constraint(expr= m.x582 + 3.04984759446376 * m.b960
<= 3.04984759446376)
m.e589 = Constraint(expr= m.x631 - 1.04900943706034 * m.b958 <= 0)
m.e590 = Constraint(expr= m.x632 - 1.04900943706034 * m.b959 <= 0)
m.e591 = Constraint(expr= m.x633 - 1.04900943706034 * m.b960 <= 0)
m.e592 = Constraint(expr= m.x634 + 1.04900943706034 * m.b958
<= 1.04900943706034)
m.e593 = Constraint(expr= m.x635 + 1.04900943706034 * m.b959
<= 1.04900943706034)
m.e594 = Constraint(expr= m.x636 + 1.04900943706034 * m.b960
<= 1.04900943706034)
m.e595 = Constraint(expr= (m.x637 / (0.001 + 0.999 * m.b961) - 0.8 * log(m.x583
/ (0.001 + 0.999 * m.b961) + 1)) * (0.001 + 0.999 * m.b961) <= 0)
m.e596 = Constraint(expr= (m.x638 / (0.001 + 0.999 * m.b962) - 0.8 * log(m.x584
/ (0.001 + 0.999 * m.b962) + 1)) * (0.001 + 0.999 * m.b962) <= 0)
m.e597 = Constraint(expr= (m.x639 / (0.001 + 0.999 * m.b963) - 0.8 * log(m.x585
/ (0.001 + 0.999 * m.b963) + 1)) * (0.001 + 0.999 * m.b963) <= 0)
m.e598 = Constraint(expr= m.x586 == 0)
m.e599 = Constraint(expr= m.x587 == 0)
m.e600 = Constraint(expr= m.x588 == 0)
m.e601 = Constraint(expr= m.x640 == 0)
m.e602 = Constraint(expr= m.x641 == 0)
m.e603 = Constraint(expr= m.x642 == 0)
m.e604 = Constraint(expr= m.x223 - m.x583 - m.x586 == 0)
m.e605 = Constraint(expr= m.x224 - m.x584 - m.x587 == 0)
m.e606 = Constraint(expr= m.x225 - m.x585 - m.x588 == 0)
m.e607 = Constraint(expr= m.x250 - m.x637 - m.x640 == 0)
m.e608 = Constraint(expr= m.x251 - m.x638 - m.x641 == 0)
m.e609 = Constraint(expr= m.x252 - m.x639 - m.x642 == 0)
m.e610 = Constraint(expr= m.x583 - 3.04984759446376 * m.b961 <= 0)
m.e611 = Constraint(expr= m.x584 - 3.04984759446376 * m.b962 <= 0)
m.e612 = Constraint(expr= m.x585 - 3.04984759446376 * m.b963 <= 0)
m.e613 = Constraint(expr= m.x586 + 3.04984759446376 * m.b961
<= 3.04984759446376)
m.e614 = Constraint(expr= m.x587 + 3.04984759446376 * m.b962
<= 3.04984759446376)
m.e615 = Constraint(expr= m.x588 + 3.04984759446376 * m.b963
<= 3.04984759446376)
m.e616 = Constraint(expr= m.x637 - 1.11894339953103 * m.b961 <= 0)
m.e617 = Constraint(expr= m.x638 - 1.11894339953103 * m.b962 <= 0)
m.e618 = Constraint(expr= m.x639 - 1.11894339953103 * m.b963 <= 0)
m.e619 = Constraint(expr= m.x640 + 1.11894339953103 * m.b961
<= 1.11894339953103)
m.e620 = Constraint(expr= m.x641 + 1.11894339953103 * m.b962
<= 1.11894339953103)
m.e621 = Constraint(expr= m.x642 + 1.11894339953103 * m.b963
<= 1.11894339953103)
m.e622 = Constraint(expr= (m.x643 / (0.001 + 0.999 * m.b964) - 0.85 * log(
m.x589 / (0.001 + 0.999 * m.b964) + 1)) * (0.001 + 0.999 * m.b964) <= 0)
m.e623 = Constraint(expr= (m.x644 / (0.001 + 0.999 * m.b965) - 0.85 * log(
m.x590 / (0.001 + 0.999 * m.b965) + 1)) * (0.001 + 0.999 * m.b965) <= 0)
m.e624 = Constraint(expr= (m.x645 / (0.001 + 0.999 * m.b966) - 0.85 * log(
m.x591 / (0.001 + 0.999 * m.b966) + 1)) * (0.001 + 0.999 * m.b966) <= 0)
m.e625 = Constraint(expr= m.x592 == 0)
m.e626 = Constraint(expr= m.x593 == 0)
m.e627 = Constraint(expr= m.x594 == 0)
m.e628 = Constraint(expr= m.x646 == 0)
m.e629 = Constraint(expr= m.x647 == 0)
m.e630 = Constraint(expr= m.x648 == 0)
m.e631 = Constraint(expr= m.x226 - m.x589 - m.x592 == 0)
m.e632 = Constraint(expr= m.x227 - m.x590 - m.x593 == 0)
m.e633 = Constraint(expr= m.x228 - m.x591 - m.x594 == 0)
m.e634 = Constraint(expr= m.x253 - m.x643 - m.x646 == 0)
m.e635 = Constraint(expr= m.x254 - m.x644 - m.x647 == 0)
m.e636 = Constraint(expr= m.x255 - m.x645 - m.x648 == 0)
m.e637 = Constraint(expr= m.x589 - 3.04984759446376 * m.b964 <= 0)
m.e638 = Constraint(expr= m.x590 - 3.04984759446376 * m.b965 <= 0)
m.e639 = Constraint(expr= m.x591 - 3.04984759446376 * m.b966 <= 0)
m.e640 = Constraint(expr= m.x592 + 3.04984759446376 * m.b964
<= 3.04984759446376)
m.e641 = Constraint(expr= m.x593 + 3.04984759446376 * m.b965
<= 3.04984759446376)
m.e642 = Constraint(expr= m.x594 + 3.04984759446376 * m.b966
<= 3.04984759446376)
m.e643 = Constraint(expr= m.x643 - 1.18887736200171 * m.b964 <= 0)
m.e644 = Constraint(expr= m.x644 - 1.18887736200171 * m.b965 <= 0)
m.e645 = Constraint(expr= m.x645 - 1.18887736200171 * m.b966 <= 0)
m.e646 = Constraint(expr= m.x646 + 1.18887736200171 * m.b964
<= 1.18887736200171)
m.e647 = Constraint(expr= m.x647 + 1.18887736200171 * m.b965
<= 1.18887736200171)
m.e648 = Constraint(expr= m.x648 + 1.18887736200171 * m.b966
<= 1.18887736200171)
m.e649 = Constraint(expr= (m.x661 / (0.001 + 0.999 * m.b967) - log(m.x649 / (
0.001 + 0.999 * m.b967) + 1)) * (0.001 + 0.999 * m.b967) <= 0)
m.e650 = Constraint(expr= (m.x662 / (0.001 + 0.999 * m.b968) - log(m.x650 / (
0.001 + 0.999 * m.b968) + 1)) * (0.001 + 0.999 * m.b968) <= 0)
m.e651 = Constraint(expr= (m.x663 / (0.001 + 0.999 * m.b969) - log(m.x651 / (
0.001 + 0.999 * m.b969) + 1)) * (0.001 + 0.999 * m.b969) <= 0)
m.e652 = Constraint(expr= m.x652 == 0)
m.e653 = Constraint(expr= m.x653 == 0)
m.e654 = Constraint(expr= m.x654 == 0)
m.e655 = Constraint(expr= m.x664 == 0)
m.e656 = Constraint(expr= m.x665 == 0)
m.e657 = Constraint(expr= m.x666 == 0)
m.e658 = Constraint(expr= m.x259 - m.x649 - m.x652 == 0)
m.e659 = Constraint(expr= m.x260 - m.x650 - m.x653 == 0)
m.e660 = Constraint(expr= m.x261 - m.x651 - m.x654 == 0)
m.e661 = Constraint(expr= m.x265 - m.x661 - m.x664 == 0)
m.e662 = Constraint(expr= m.x266 - m.x662 - m.x665 == 0)
m.e663 = Constraint(expr= m.x267 - m.x663 - m.x666 == 0)
m.e664 = Constraint(expr= m.x649 - 1.18887736200171 * m.b967 <= 0)
m.e665 = Constraint(expr= m.x650 - 1.18887736200171 * m.b968 <= 0)
m.e666 = Constraint(expr= m.x651 - 1.18887736200171 * m.b969 <= 0)
m.e667 = Constraint(expr= m.x652 + 1.18887736200171 * m.b967
<= 1.18887736200171)
m.e668 = Constraint(expr= m.x653 + 1.18887736200171 * m.b968
<= 1.18887736200171)
m.e669 = Constraint(expr= m.x654 + 1.18887736200171 * m.b969
<= 1.18887736200171)
m.e670 = Constraint(expr= m.x661 - 0.78338879230327 * m.b967 <= 0)
m.e671 = Constraint(expr= m.x662 - 0.78338879230327 * m.b968 <= 0)
m.e672 = Constraint(expr= m.x663 - 0.78338879230327 * m.b969 <= 0)
m.e673 = Constraint(expr= m.x664 + 0.78338879230327 * m.b967
<= 0.78338879230327)
m.e674 = Constraint(expr= m.x665 + 0.78338879230327 * m.b968
<= 0.78338879230327)
m.e675 = Constraint(expr= m.x666 + 0.78338879230327 * m.b969
<= 0.78338879230327)
m.e676 = Constraint(expr= (m.x667 / (0.001 + 0.999 * m.b970) - 1.2 * log(m.x655
/ (0.001 + 0.999 * m.b970) + 1)) * (0.001 + 0.999 * m.b970) <= 0)
m.e677 = Constraint(expr= (m.x668 / (0.001 + 0.999 * m.b971) - 1.2 * log(m.x656
/ (0.001 + 0.999 * m.b971) + 1)) * (0.001 + 0.999 * m.b971) <= 0)
m.e678 = Constraint(expr= (m.x669 / (0.001 + 0.999 * m.b972) - 1.2 * log(m.x657
/ (0.001 + 0.999 * m.b972) + 1)) * (0.001 + 0.999 * m.b972) <= 0)
m.e679 = Constraint(expr= m.x658 == 0)
m.e680 = Constraint(expr= m.x659 == 0)
m.e681 = Constraint(expr= m.x660 == 0)
m.e682 = Constraint(expr= m.x670 == 0)
m.e683 = Constraint(expr= m.x671 == 0)
m.e684 = Constraint(expr= m.x672 == 0)
m.e685 = Constraint(expr= m.x262 - m.x655 - m.x658 == 0)
m.e686 = Constraint(expr= m.x263 - m.x656 - m.x659 == 0)
m.e687 = Constraint(expr= m.x264 - m.x657 - m.x660 == 0)
m.e688 = Constraint(expr= m.x268 - m.x667 - m.x670 == 0)
m.e689 = Constraint(expr= m.x269 - m.x668 - m.x671 == 0)
m.e690 = Constraint(expr= m.x270 - m.x669 - m.x672 == 0)
m.e691 = Constraint(expr= m.x655 - 1.18887736200171 * m.b970 <= 0)
m.e692 = Constraint(expr= m.x656 - 1.18887736200171 * m.b971 <= 0)
m.e693 = Constraint(expr= m.x657 - 1.18887736200171 * m.b972 <= 0)
m.e694 = Constraint(expr= m.x658 + 1.18887736200171 * m.b970
<= 1.18887736200171)
m.e695 = Constraint(expr= m.x659 + 1.18887736200171 * m.b971
<= 1.18887736200171)
m.e696 = Constraint(expr= m.x660 + 1.18887736200171 * m.b972
<= 1.18887736200171)
m.e697 = Constraint(expr= m.x667 - 0.940066550763924 * m.b970 <= 0)
m.e698 = Constraint(expr= m.x668 - 0.940066550763924 * m.b971 <= 0)
m.e699 = Constraint(expr= m.x669 - 0.940066550763924 * m.b972 <= 0)
m.e700 = Constraint(expr= m.x670 + 0.940066550763924 * m.b970
<= 0.940066550763924)
m.e701 = Constraint(expr= m.x671 + 0.940066550763924 * m.b971
<= 0.940066550763924)
m.e702 = Constraint(expr= m.x672 + 0.940066550763924 * m.b972
<= 0.940066550763924)
m.e703 = Constraint(expr= -0.75 * m.x673 + m.x697 == 0)
m.e704 = Constraint(expr= -0.75 * m.x674 + m.x698 == 0)
m.e705 = Constraint(expr= -0.75 * m.x675 + m.x699 == 0)
m.e706 = Constraint(expr= m.x676 == 0)
m.e707 = Constraint(expr= m.x677 == 0)
m.e708 = Constraint(expr= m.x678 == 0)
m.e709 = Constraint(expr= m.x700 == 0)
m.e710 = Constraint(expr= m.x701 == 0)
m.e711 = Constraint(expr= m.x702 == 0)
m.e712 = Constraint(expr= m.x280 - m.x673 - m.x676 == 0)
m.e713 = Constraint(expr= m.x281 - m.x674 - m.x677 == 0)
m.e714 = Constraint(expr= m.x282 - m.x675 - m.x678 == 0)
m.e715 = Constraint(expr= m.x292 - m.x697 - m.x700 == 0)
m.e716 = Constraint(expr= m.x293 - m.x698 - m.x701 == 0)
m.e717 = Constraint(expr= m.x294 - m.x699 - m.x702 == 0)
m.e718 = Constraint(expr= m.x673 - 0.940066550763924 * m.b973 <= 0)
m.e719 = Constraint(expr= m.x674 - 0.940066550763924 * m.b974 <= 0)
m.e720 = Constraint(expr= m.x675 - 0.940066550763924 * m.b975 <= 0)
m.e721 = Constraint(expr= m.x676 + 0.940066550763924 * m.b973
<= 0.940066550763924)
m.e722 = Constraint(expr= m.x677 + 0.940066550763924 * m.b974
<= 0.940066550763924)
m.e723 = Constraint(expr= m.x678 + 0.940066550763924 * m.b975
<= 0.940066550763924)
m.e724 = Constraint(expr= m.x697 - 0.705049913072943 * m.b973 <= 0)
m.e725 = Constraint(expr= m.x698 - 0.705049913072943 * m.b974 <= 0)
m.e726 = Constraint(expr= m.x699 - 0.705049913072943 * m.b975 <= 0)
m.e727 = Constraint(expr= m.x700 + 0.705049913072943 * m.b973
<= 0.705049913072943)
m.e728 = Constraint(expr= m.x701 + 0.705049913072943 * m.b974
<= 0.705049913072943)
m.e729 = Constraint(expr= m.x702 + 0.705049913072943 * m.b975
<= 0.705049913072943)
m.e730 = Constraint(expr= (m.x703 / (0.001 + 0.999 * m.b976) - 1.5 * log(m.x679
/ (0.001 + 0.999 * m.b976) + 1)) * (0.001 + 0.999 * m.b976) <= 0)
m.e731 = Constraint(expr= (m.x704 / (0.001 + 0.999 * m.b977) - 1.5 * log(m.x680
/ (0.001 + 0.999 * m.b977) + 1)) * (0.001 + 0.999 * m.b977) <= 0)
m.e732 = Constraint(expr= (m.x705 / (0.001 + 0.999 * m.b978) - 1.5 * log(m.x681
/ (0.001 + 0.999 * m.b978) + 1)) * (0.001 + 0.999 * m.b978) <= 0)
m.e733 = Constraint(expr= m.x682 == 0)
m.e734 = Constraint(expr= m.x683 == 0)
m.e735 = Constraint(expr= m.x684 == 0)
m.e736 = Constraint(expr= m.x709 == 0)
m.e737 = Constraint(expr= m.x710 == 0)
m.e738 = Constraint(expr= m.x711 == 0)
m.e739 = Constraint(expr= m.x283 - m.x679 - m.x682 == 0)
m.e740 = Constraint(expr= m.x284 - m.x680 - m.x683 == 0)
m.e741 = Constraint(expr= m.x285 - m.x681 - m.x684 == 0)
m.e742 = Constraint(expr= m.x295 - m.x703 - m.x709 == 0)
m.e743 = Constraint(expr= m.x296 - m.x704 - m.x710 == 0)
m.e744 = Constraint(expr= m.x297 - m.x705 - m.x711 == 0)
m.e745 = Constraint(expr= m.x679 - 0.940066550763924 * m.b976 <= 0)
m.e746 = Constraint(expr= m.x680 - 0.940066550763924 * m.b977 <= 0)
m.e747 = Constraint(expr= m.x681 - 0.940066550763924 * m.b978 <= 0)
m.e748 = Constraint(expr= m.x682 + 0.940066550763924 * m.b976
<= 0.940066550763924)
m.e749 = Constraint(expr= m.x683 + 0.940066550763924 * m.b977
<= 0.940066550763924)
m.e750 = Constraint(expr= m.x684 + 0.940066550763924 * m.b978
<= 0.940066550763924)
m.e751 = Constraint(expr= m.x703 - 0.994083415506506 * m.b976 <= 0)
m.e752 = Constraint(expr= m.x704 - 0.994083415506506 * m.b977 <= 0)
m.e753 = Constraint(expr= m.x705 - 0.994083415506506 * m.b978 <= 0)
m.e754 = Constraint(expr= m.x709 + 0.994083415506506 * m.b976
<= 0.994083415506506)
m.e755 = Constraint(expr= m.x710 + 0.994083415506506 * m.b977
<= 0.994083415506506)
m.e756 = Constraint(expr= m.x711 + 0.994083415506506 * m.b978
<= 0.994083415506506)
m.e757 = Constraint(expr= -m.x685 + m.x715 == 0)
m.e758 = Constraint(expr= -m.x686 + m.x716 == 0)
m.e759 = Constraint(expr= -m.x687 + m.x717 == 0)
m.e760 = Constraint(expr= -0.5 * m.x691 + m.x715 == 0)
m.e761 = Constraint(expr= -0.5 * m.x692 + m.x716 == 0)
m.e762 = Constraint(expr= -0.5 * m.x693 + m.x717 == 0)
m.e763 = Constraint(expr= m.x688 == 0)
m.e764 = Constraint(expr= m.x689 == 0)
m.e765 = Constraint(expr= m.x690 == 0)
m.e766 = Constraint(expr= m.x694 == 0)
m.e767 = Constraint(expr= m.x695 == 0)
m.e768 = Constraint(expr= m.x696 == 0)
m.e769 = Constraint(expr= m.x718 == 0)
m.e770 = Constraint(expr= m.x719 == 0)
m.e771 = Constraint(expr= m.x720 == 0)
m.e772 = Constraint(expr= m.x286 - m.x685 - m.x688 == 0)
m.e773 = Constraint(expr= m.x287 - m.x686 - m.x689 == 0)
m.e774 = Constraint(expr= m.x288 - m.x687 - m.x690 == 0)
m.e775 = Constraint(expr= m.x289 - m.x691 - m.x694 == 0)
m.e776 = Constraint(expr= m.x290 - m.x692 - m.x695 == 0)
m.e777 = Constraint(expr= m.x291 - m.x693 - m.x696 == 0)
m.e778 = Constraint(expr= m.x298 - m.x715 - m.x718 == 0)
m.e779 = Constraint(expr= m.x299 - m.x716 - m.x719 == 0)
m.e780 = Constraint(expr= m.x300 - m.x717 - m.x720 == 0)
m.e781 = Constraint(expr= m.x685 - 0.940066550763924 * m.b979 <= 0)
m.e782 = Constraint(expr= m.x686 - 0.940066550763924 * m.b980 <= 0)
m.e783 = Constraint(expr= m.x687 - 0.940066550763924 * m.b981 <= 0)
m.e784 = Constraint(expr= m.x688 + 0.940066550763924 * m.b979
<= 0.940066550763924)
m.e785 = Constraint(expr= m.x689 + 0.940066550763924 * m.b980
<= 0.940066550763924)
m.e786 = Constraint(expr= m.x690 + 0.940066550763924 * m.b981
<= 0.940066550763924)
m.e787 = Constraint(expr= m.x691 - 30 * m.b979 <= 0)
m.e788 = Constraint(expr= m.x692 - 30 * m.b980 <= 0)
m.e789 = Constraint(expr= m.x693 - 30 * m.b981 <= 0)
m.e790 = Constraint(expr= m.x694 + 30 * m.b979 <= 30)
m.e791 = Constraint(expr= m.x695 + 30 * m.b980 <= 30)
m.e792 = Constraint(expr= m.x696 + 30 * m.b981 <= 30)
m.e793 = Constraint(expr= m.x715 - 15 * m.b979 <= 0)
m.e794 = Constraint(expr= m.x716 - 15 * m.b980 <= 0)
m.e795 = Constraint(expr= m.x717 - 15 * m.b981 <= 0)
m.e796 = Constraint(expr= m.x718 + 15 * m.b979 <= 15)
m.e797 = Constraint(expr= m.x719 + 15 * m.b980 <= 15)
m.e798 = Constraint(expr= m.x720 + 15 * m.b981 <= 15)
m.e799 = Constraint(expr= (m.x751 / (0.001 + 0.999 * m.b982) - 1.25 * log(
m.x721 / (0.001 + 0.999 * m.b982) + 1)) * (0.001 + 0.999 * m.b982) <= 0)
m.e800 = Constraint(expr= (m.x752 / (0.001 + 0.999 * m.b983) - 1.25 * log(
m.x722 / (0.001 + 0.999 * m.b983) + 1)) * (0.001 + 0.999 * m.b983) <= 0)
m.e801 = Constraint(expr= (m.x753 / (0.001 + 0.999 * m.b984) - 1.25 * log(
m.x723 / (0.001 + 0.999 * m.b984) + 1)) * (0.001 + 0.999 * m.b984) <= 0)
m.e802 = Constraint(expr= m.x724 == 0)
m.e803 = Constraint(expr= m.x725 == 0)
m.e804 = Constraint(expr= m.x726 == 0)
m.e805 = Constraint(expr= m.x757 == 0)
m.e806 = Constraint(expr= m.x758 == 0)
m.e807 = Constraint(expr= m.x759 == 0)
m.e808 = Constraint(expr= m.x301 - m.x721 - m.x724 == 0)
m.e809 = Constraint(expr= m.x302 - m.x722 - m.x725 == 0)
m.e810 = Constraint(expr= m.x303 - m.x723 - m.x726 == 0)
m.e811 = Constraint(expr= m.x316 - m.x751 - m.x757 == 0)
m.e812 = Constraint(expr= m.x317 - m.x752 - m.x758 == 0)
m.e813 = Constraint(expr= m.x318 - m.x753 - m.x759 == 0)
m.e814 = Constraint(expr= m.x721 - 0.705049913072943 * m.b982 <= 0)
m.e815 = Constraint(expr= m.x722 - 0.705049913072943 * m.b983 <= 0)
m.e816 = Constraint(expr= m.x723 - 0.705049913072943 * m.b984 <= 0)
m.e817 = Constraint(expr= m.x724 + 0.705049913072943 * m.b982
<= 0.705049913072943)
m.e818 = Constraint(expr= m.x725 + 0.705049913072943 * m.b983
<= 0.705049913072943)
m.e819 = Constraint(expr= m.x726 + 0.705049913072943 * m.b984
<= 0.705049913072943)
m.e820 = Constraint(expr= m.x751 - 0.666992981045719 * m.b982 <= 0)
m.e821 = Constraint(expr= m.x752 - 0.666992981045719 * m.b983 <= 0)
m.e822 = Constraint(expr= m.x753 - 0.666992981045719 * m.b984 <= 0)
m.e823 = Constraint(expr= m.x757 + 0.666992981045719 * m.b982
<= 0.666992981045719)
m.e824 = Constraint(expr= m.x758 + 0.666992981045719 * m.b983
<= 0.666992981045719)
m.e825 = Constraint(expr= m.x759 + 0.666992981045719 * m.b984
<= 0.666992981045719)
m.e826 = Constraint(expr= (m.x763 / (0.001 + 0.999 * m.b985) - 0.9 * log(m.x727
/ (0.001 + 0.999 * m.b985) + 1)) * (0.001 + 0.999 * m.b985) <= 0)
m.e827 = Constraint(expr= (m.x764 / (0.001 + 0.999 * m.b986) - 0.9 * log(m.x728
/ (0.001 + 0.999 * m.b986) + 1)) * (0.001 + 0.999 * m.b986) <= 0)
m.e828 = Constraint(expr= (m.x765 / (0.001 + 0.999 * m.b987) - 0.9 * log(m.x729
/ (0.001 + 0.999 * m.b987) + 1)) * (0.001 + 0.999 * m.b987) <= 0)
m.e829 = Constraint(expr= m.x730 == 0)
m.e830 = Constraint(expr= m.x731 == 0)
m.e831 = Constraint(expr= m.x732 == 0)
m.e832 = Constraint(expr= m.x769 == 0)
m.e833 = Constraint(expr= m.x770 == 0)
m.e834 = Constraint(expr= m.x771 == 0)
m.e835 = Constraint(expr= m.x304 - m.x727 - m.x730 == 0)
m.e836 = Constraint(expr= m.x305 - m.x728 - m.x731 == 0)
m.e837 = Constraint(expr= m.x306 - m.x729 - m.x732 == 0)
m.e838 = Constraint(expr= m.x319 - m.x763 - m.x769 == 0)
m.e839 = Constraint(expr= m.x320 - m.x764 - m.x770 == 0)
m.e840 = Constraint(expr= m.x321 - m.x765 - m.x771 == 0)
m.e841 = Constraint(expr= m.x727 - 0.705049913072943 * m.b985 <= 0)
m.e842 = Constraint(expr= m.x728 - 0.705049913072943 * m.b986 <= 0)
m.e843 = Constraint(expr= m.x729 - 0.705049913072943 * m.b987 <= 0)
m.e844 = Constraint(expr= m.x730 + 0.705049913072943 * m.b985
<= 0.705049913072943)
m.e845 = Constraint(expr= m.x731 + 0.705049913072943 * m.b986
<= 0.705049913072943)
m.e846 = Constraint(expr= m.x732 + 0.705049913072943 * m.b987
<= 0.705049913072943)
m.e847 = Constraint(expr= m.x763 - 0.480234946352917 * m.b985 <= 0)
m.e848 = Constraint(expr= m.x764 - 0.480234946352917 * m.b986 <= 0)
m.e849 = Constraint(expr= m.x765 - 0.480234946352917 * m.b987 <= 0)
m.e850 = Constraint(expr= m.x769 + 0.480234946352917 * m.b985
<= 0.480234946352917)
m.e851 = Constraint(expr= m.x770 + 0.480234946352917 * m.b986
<= 0.480234946352917)
m.e852 = Constraint(expr= m.x771 + 0.480234946352917 * m.b987
<= 0.480234946352917)
m.e853 = Constraint(expr= (m.x775 / (0.001 + 0.999 * m.b988) - log(m.x706 / (
0.001 + 0.999 * m.b988) + 1)) * (0.001 + 0.999 * m.b988) <= 0)
m.e854 = Constraint(expr= (m.x776 / (0.001 + 0.999 * m.b989) - log(m.x707 / (
0.001 + 0.999 * m.b989) + 1)) * (0.001 + 0.999 * m.b989) <= 0)
m.e855 = Constraint(expr= (m.x777 / (0.001 + 0.999 * m.b990) - log(m.x708 / (
0.001 + 0.999 * m.b990) + 1)) * (0.001 + 0.999 * m.b990) <= 0)
m.e856 = Constraint(expr= m.x712 == 0)
m.e857 = Constraint(expr= m.x713 == 0)
m.e858 = Constraint(expr= m.x714 == 0)
m.e859 = Constraint(expr= m.x778 == 0)
m.e860 = Constraint(expr= m.x779 == 0)
m.e861 = Constraint(expr= m.x780 == 0)
m.e862 = Constraint(expr= m.x295 - m.x706 - m.x712 == 0)
m.e863 = Constraint(expr= m.x296 - m.x707 - m.x713 == 0)
m.e864 = Constraint(expr= m.x297 - m.x708 - m.x714 == 0)
m.e865 = Constraint(expr= m.x322 - m.x775 - m.x778 == 0)
m.e866 = Constraint(expr= m.x323 - m.x776 - m.x779 == 0)
m.e867 = Constraint(expr= m.x324 - m.x777 - m.x780 == 0)
m.e868 = Constraint(expr= m.x706 - 0.994083415506506 * m.b988 <= 0)
m.e869 = Constraint(expr= m.x707 - 0.994083415506506 * m.b989 <= 0)
m.e870 = Constraint(expr= m.x708 - 0.994083415506506 * m.b990 <= 0)
m.e871 = Constraint(expr= m.x712 + 0.994083415506506 * m.b988
<= 0.994083415506506)
m.e872 = Constraint(expr= m.x713 + 0.994083415506506 * m.b989
<= 0.994083415506506)
m.e873 = Constraint(expr= m.x714 + 0.994083415506506 * m.b990
<= 0.994083415506506)
m.e874 = Constraint(expr= m.x775 - 0.690184503917672 * m.b988 <= 0)
m.e875 = Constraint(expr= m.x776 - 0.690184503917672 * m.b989 <= 0)
m.e876 = Constraint(expr= m.x777 - 0.690184503917672 * m.b990 <= 0)
m.e877 = Constraint(expr= m.x778 + 0.690184503917672 * m.b988
<= 0.690184503917672)
m.e878 = Constraint(expr= m.x779 + 0.690184503917672 * m.b989
<= 0.690184503917672)
m.e879 = Constraint(expr= m.x780 + 0.690184503917672 * m.b990
<= 0.690184503917672)
m.e880 = Constraint(expr= -0.9 * m.x733 + m.x781 == 0)
m.e881 = Constraint(expr= -0.9 * m.x734 + m.x782 == 0)
m.e882 = Constraint(expr= -0.9 * m.x735 + m.x783 == 0)
m.e883 = Constraint(expr= m.x736 == 0)
m.e884 = Constraint(expr= m.x737 == 0)
m.e885 = Constraint(expr= m.x738 == 0)
m.e886 = Constraint(expr= m.x784 == 0)
m.e887 = Constraint(expr= m.x785 == 0)
m.e888 = Constraint(expr= m.x786 == 0)
m.e889 = Constraint(expr= m.x307 - m.x733 - m.x736 == 0)
m.e890 = Constraint(expr= m.x308 - m.x734 - m.x737 == 0)
m.e891 = Constraint(expr= m.x309 - m.x735 - m.x738 == 0)
m.e892 = Constraint(expr= m.x325 - m.x781 - m.x784 == 0)
m.e893 = Constraint(expr= m.x326 - m.x782 - m.x785 == 0)
m.e894 = Constraint(expr= m.x327 - m.x783 - m.x786 == 0)
m.e895 = Constraint(expr= m.x733 - 15 * m.b991 <= 0)
m.e896 = Constraint(expr= m.x734 - 15 * m.b992 <= 0)
m.e897 = Constraint(expr= m.x735 - 15 * m.b993 <= 0)
m.e898 = Constraint(expr= m.x736 + 15 * m.b991 <= 15)
m.e899 = Constraint(expr= m.x737 + 15 * m.b992 <= 15)
m.e900 = Constraint(expr= m.x738 + 15 * m.b993 <= 15)
m.e901 = Constraint(expr= m.x781 - 13.5 * m.b991 <= 0)
m.e902 = Constraint(expr= m.x782 - 13.5 * m.b992 <= 0)
m.e903 = Constraint(expr= m.x783 - 13.5 * m.b993 <= 0)
m.e904 = Constraint(expr= m.x784 + 13.5 * m.b991 <= 13.5)
m.e905 = Constraint(expr= m.x785 + 13.5 * m.b992 <= 13.5)
m.e906 = Constraint(expr= m.x786 + 13.5 * m.b993 <= 13.5)
m.e907 = Constraint(expr= -0.6 * m.x739 + m.x787 == 0)
m.e908 = Constraint(expr= -0.6 * m.x740 + m.x788 == 0)
m.e909 = Constraint(expr= -0.6 * m.x741 + m.x789 == 0)
m.e910 = Constraint(expr= m.x742 == 0)
m.e911 = Constraint(expr= m.x743 == 0)
m.e912 = Constraint(expr= m.x744 == 0)
m.e913 = Constraint(expr= m.x790 == 0)
m.e914 = Constraint(expr= m.x791 == 0)
m.e915 = Constraint(expr= m.x792 == 0)
m.e916 = Constraint(expr= m.x310 - m.x739 - m.x742 == 0)
m.e917 = Constraint(expr= m.x311 - m.x740 - m.x743 == 0)
m.e918 = Constraint(expr= m.x312 - m.x741 - m.x744 == 0)
m.e919 = Constraint(expr= m.x328 - m.x787 - m.x790 == 0)
m.e920 = Constraint(expr= m.x329 - m.x788 - m.x791 == 0)
m.e921 = Constraint(expr= m.x330 - m.x789 - m.x792 == 0)
m.e922 = Constraint(expr= m.x739 - 15 * m.b994 <= 0)
m.e923 = Constraint(expr= m.x740 - 15 * m.b995 <= 0)
m.e924 = Constraint(expr= m.x741 - 15 * m.b996 <= 0)
m.e925 = Constraint(expr= m.x742 + 15 * m.b994 <= 15)
m.e926 = Constraint(expr= m.x743 + 15 * m.b995 <= 15)
m.e927 = Constraint(expr= m.x744 + 15 * m.b996 <= 15)
m.e928 = Constraint(expr= m.x787 - 9 * m.b994 <= 0)
m.e929 = Constraint(expr= m.x788 - 9 * m.b995 <= 0)
m.e930 = Constraint(expr= m.x789 - 9 * m.b996 <= 0)
m.e931 = Constraint(expr= m.x790 + 9 * m.b994 <= 9)
m.e932 = Constraint(expr= m.x791 + 9 * m.b995 <= 9)
m.e933 = Constraint(expr= m.x792 + 9 * m.b996 <= 9)
m.e934 = Constraint(expr= (m.x793 / (0.001 + 0.999 * m.b997) - 1.1 * log(m.x745
/ (0.001 + 0.999 * m.b997) + 1)) * (0.001 + 0.999 * m.b997) <= 0)
m.e935 = Constraint(expr= (m.x794 / (0.001 + 0.999 * m.b998) - 1.1 * log(m.x746
/ (0.001 + 0.999 * m.b998) + 1)) * (0.001 + 0.999 * m.b998) <= 0)
m.e936 = Constraint(expr= (m.x795 / (0.001 + 0.999 * m.b999) - 1.1 * log(m.x747
/ (0.001 + 0.999 * m.b999) + 1)) * (0.001 + 0.999 * m.b999) <= 0)
m.e937 = Constraint(expr= m.x748 == 0)
m.e938 = Constraint(expr= m.x749 == 0)
m.e939 = Constraint(expr= m.x750 == 0)
m.e940 = Constraint(expr= m.x796 == 0)
m.e941 = Constraint(expr= m.x797 == 0)
m.e942 = Constraint(expr= m.x798 == 0)
m.e943 = Constraint(expr= m.x313 - m.x745 - m.x748 == 0)
m.e944 = Constraint(expr= m.x314 - m.x746 - m.x749 == 0)
m.e945 = Constraint(expr= m.x315 - m.x747 - m.x750 == 0)
m.e946 = Constraint(expr= m.x331 - m.x793 - m.x796 == 0)
m.e947 = Constraint(expr= m.x332 - m.x794 - m.x797 == 0)
m.e948 = Constraint(expr= m.x333 - m.x795 - m.x798 == 0)
m.e949 = Constraint(expr= m.x745 - 15 * m.b997 <= 0)
m.e950 = Constraint(expr= m.x746 - 15 * m.b998 <= 0)
m.e951 = Constraint(expr= m.x747 - 15 * m.b999 <= 0)
m.e952 = Constraint(expr= m.x748 + 15 * m.b997 <= 15)
m.e953 = Constraint(expr= m.x749 + 15 * m.b998 <= 15)
m.e954 = Constraint(expr= m.x750 + 15 * m.b999 <= 15)
m.e955 = Constraint(expr= m.x793 - 3.04984759446376 * m.b997 <= 0)
m.e956 = Constraint(expr= m.x794 - 3.04984759446376 * m.b998 <= 0)
m.e957 = Constraint(expr= m.x795 - 3.04984759446376 * m.b999 <= 0)
m.e958 = Constraint(expr= m.x796 + 3.04984759446376 * m.b997
<= 3.04984759446376)
m.e959 = Constraint(expr= m.x797 + 3.04984759446376 * m.b998
<= 3.04984759446376)
m.e960 = Constraint(expr= m.x798 + 3.04984759446376 * m.b999
<= 3.04984759446376)
m.e961 = Constraint(expr= -0.9 * m.x754 + m.x853 == 0)
m.e962 = Constraint(expr= -0.9 * m.x755 + m.x854 == 0)
m.e963 = Constraint(expr= -0.9 * m.x756 + m.x855 == 0)
m.e964 = Constraint(expr= -m.x811 + m.x853 == 0)
m.e965 = Constraint(expr= -m.x812 + m.x854 == 0)
m.e966 = Constraint(expr= -m.x813 + m.x855 == 0)
m.e967 = Constraint(expr= m.x760 == 0)
m.e968 = Constraint(expr= m.x761 == 0)
m.e969 = Constraint(expr= m.x762 == 0)
m.e970 = Constraint(expr= m.x814 == 0)
m.e971 = Constraint(expr= m.x815 == 0)
m.e972 = Constraint(expr= m.x816 == 0)
m.e973 = Constraint(expr= m.x856 == 0)
m.e974 = Constraint(expr= m.x857 == 0)
m.e975 = Constraint(expr= m.x858 == 0)
m.e976 = Constraint(expr= m.x316 - m.x754 - m.x760 == 0)
m.e977 = Constraint(expr= m.x317 - m.x755 - m.x761 == 0)
m.e978 = Constraint(expr= m.x318 - m.x756 - m.x762 == 0)
m.e979 = Constraint(expr= m.x340 - m.x811 - m.x814 == 0)
m.e980 = Constraint(expr= m.x341 - m.x812 - m.x815 == 0)
m.e981 = Constraint(expr= m.x342 - m.x813 - m.x816 == 0)
m.e982 = Constraint(expr= m.x364 - m.x853 - m.x856 == 0)
m.e983 = Constraint(expr= m.x365 - m.x854 - m.x857 == 0)
m.e984 = Constraint(expr= m.x366 - m.x855 - m.x858 == 0)
m.e985 = Constraint(expr= m.x754 - 0.666992981045719 * m.b1000 <= 0)
m.e986 = Constraint(expr= m.x755 - 0.666992981045719 * m.b1001 <= 0)
m.e987 = Constraint(expr= m.x756 - 0.666992981045719 * m.b1002 <= 0)
m.e988 = Constraint(expr= m.x760 + 0.666992981045719 * m.b1000
<= 0.666992981045719)
m.e989 = Constraint(expr= m.x761 + 0.666992981045719 * m.b1001
<= 0.666992981045719)
m.e990 = Constraint(expr= m.x762 + 0.666992981045719 * m.b1002
<= 0.666992981045719)
m.e991 = Constraint(expr= m.x811 - 25 * m.b1000 <= 0)
m.e992 = Constraint(expr= m.x812 - 25 * m.b1001 <= 0)
m.e993 = Constraint(expr= m.x813 - 25 * m.b1002 <= 0)
m.e994 = Constraint(expr= m.x814 + 25 * m.b1000 <= 25)
m.e995 = Constraint(expr= m.x815 + 25 * m.b1001 <= 25)
m.e996 = Constraint(expr= m.x816 + 25 * m.b1002 <= 25)
m.e997 = Constraint(expr= m.x853 - 25 * m.b1000 <= 0)
m.e998 = Constraint(expr= m.x854 - 25 * m.b1001 <= 0)
m.e999 = Constraint(expr= m.x855 - 25 * m.b1002 <= 0)
m.e1000 = Constraint(expr= m.x856 + 25 * m.b1000 <= 25)
m.e1001 = Constraint(expr= m.x857 + 25 * m.b1001 <= 25)
m.e1002 = Constraint(expr= m.x858 + 25 * m.b1002 <= 25)
m.e1003 = Constraint(expr= (m.x859 / (0.001 + 0.999 * m.b1003) - log(m.x766 / (
0.001 + 0.999 * m.b1003) + 1)) * (0.001 + 0.999 * m.b1003) <= 0)
m.e1004 = Constraint(expr= (m.x860 / (0.001 + 0.999 * m.b1004) - log(m.x767 / (
0.001 + 0.999 * m.b1004) + 1)) * (0.001 + 0.999 * m.b1004) <= 0)
m.e1005 = Constraint(expr= (m.x861 / (0.001 + 0.999 * m.b1005) - log(m.x768 / (
0.001 + 0.999 * m.b1005) + 1)) * (0.001 + 0.999 * m.b1005) <= 0)
m.e1006 = Constraint(expr= m.x772 == 0)
m.e1007 = Constraint(expr= m.x773 == 0)
m.e1008 = Constraint(expr= m.x774 == 0)
m.e1009 = Constraint(expr= m.x862 == 0)
m.e1010 = Constraint(expr= m.x863 == 0)
m.e1011 = Constraint(expr= m.x864 == 0)
m.e1012 = Constraint(expr= m.x319 - m.x766 - m.x772 == 0)
m.e1013 = Constraint(expr= m.x320 - m.x767 - m.x773 == 0)
m.e1014 = Constraint(expr= m.x321 - m.x768 - m.x774 == 0)
m.e1015 = Constraint(expr= m.x367 - m.x859 - m.x862 == 0)
m.e1016 = Constraint(expr= m.x368 - m.x860 - m.x863 == 0)
m.e1017 = Constraint(expr= m.x369 - m.x861 - m.x864 == 0)
m.e1018 = Constraint(expr= m.x766 - 0.480234946352917 * m.b1003 <= 0)
m.e1019 = Constraint(expr= m.x767 - 0.480234946352917 * m.b1004 <= 0)
m.e1020 = Constraint(expr= m.x768 - 0.480234946352917 * m.b1005 <= 0)
m.e1021 = Constraint(expr= m.x772 + 0.480234946352917 * m.b1003
<= 0.480234946352917)
m.e1022 = Constraint(expr= m.x773 + 0.480234946352917 * m.b1004
<= 0.480234946352917)
m.e1023 = Constraint(expr= m.x774 + 0.480234946352917 * m.b1005
<= 0.480234946352917)
m.e1024 = Constraint(expr= m.x859 - 0.392200822712722 * m.b1003 <= 0)
m.e1025 = Constraint(expr= m.x860 - 0.392200822712722 * m.b1004 <= 0)
m.e1026 = Constraint(expr= m.x861 - 0.392200822712722 * m.b1005 <= 0)
m.e1027 = Constraint(expr= m.x862 + 0.392200822712722 * m.b1003
<= 0.392200822712722)
m.e1028 = Constraint(expr= m.x863 + 0.392200822712722 * m.b1004
<= 0.392200822712722)
m.e1029 = Constraint(expr= m.x864 + 0.392200822712722 * m.b1005
<= 0.392200822712722)
m.e1030 = Constraint(expr= (m.x865 / (0.001 + 0.999 * m.b1006) - 0.7 * log(
m.x799 / (0.001 + 0.999 * m.b1006) + 1)) * (0.001 + 0.999 * m.b1006) <= 0)
m.e1031 = Constraint(expr= (m.x866 / (0.001 + 0.999 * m.b1007) - 0.7 * log(
m.x800 / (0.001 + 0.999 * m.b1007) + 1)) * (0.001 + 0.999 * m.b1007) <= 0)
m.e1032 = Constraint(expr= (m.x867 / (0.001 + 0.999 * m.b1008) - 0.7 * log(
m.x801 / (0.001 + 0.999 * m.b1008) + 1)) * (0.001 + 0.999 * m.b1008) <= 0)
m.e1033 = Constraint(expr= m.x802 == 0)
m.e1034 = Constraint(expr= m.x803 == 0)
m.e1035 = Constraint(expr= m.x804 == 0)
m.e1036 = Constraint(expr= m.x868 == 0)
m.e1037 = Constraint(expr= m.x869 == 0)
m.e1038 = Constraint(expr= m.x870 == 0)
m.e1039 = Constraint(expr= m.x334 - m.x799 - m.x802 == 0)
m.e1040 = Constraint(expr= m.x335 - m.x800 - m.x803 == 0)
m.e1041 = Constraint(expr= m.x336 - m.x801 - m.x804 == 0)
m.e1042 = Constraint(expr= m.x370 - m.x865 - m.x868 == 0)
m.e1043 = Constraint(expr= m.x371 - m.x866 - m.x869 == 0)
m.e1044 = Constraint(expr= m.x372 - m.x867 - m.x870 == 0)
m.e1045 = Constraint(expr= m.x799 - 0.690184503917672 * m.b1006 <= 0)
m.e1046 = Constraint(expr= m.x800 - 0.690184503917672 * m.b1007 <= 0)
m.e1047 = Constraint(expr= m.x801 - 0.690184503917672 * m.b1008 <= 0)
m.e1048 = Constraint(expr= m.x802 + 0.690184503917672 * m.b1006
<= 0.690184503917672)
m.e1049 = Constraint(expr= m.x803 + 0.690184503917672 * m.b1007
<= 0.690184503917672)
m.e1050 = Constraint(expr= m.x804 + 0.690184503917672 * m.b1008
<= 0.690184503917672)
m.e1051 = Constraint(expr= m.x865 - 0.367386387824208 * m.b1006 <= 0)
m.e1052 = Constraint(expr= m.x866 - 0.367386387824208 * m.b1007 <= 0)
m.e1053 = Constraint(expr= m.x867 - 0.367386387824208 * m.b1008 <= 0)
m.e1054 = Constraint(expr= m.x868 + 0.367386387824208 * m.b1006
<= 0.367386387824208)
m.e1055 = Constraint(expr= m.x869 + 0.367386387824208 * m.b1007
<= 0.367386387824208)
m.e1056 = Constraint(expr= m.x870 + 0.367386387824208 * m.b1008
<= 0.367386387824208)
m.e1057 = Constraint(expr= (m.x871 / (0.001 + 0.999 * m.b1009) - 0.65 * log(
m.x805 / (0.001 + 0.999 * m.b1009) + 1)) * (0.001 + 0.999 * m.b1009) <= 0)
m.e1058 = Constraint(expr= (m.x872 / (0.001 + 0.999 * m.b1010) - 0.65 * log(
m.x806 / (0.001 + 0.999 * m.b1010) + 1)) * (0.001 + 0.999 * m.b1010) <= 0)
m.e1059 = Constraint(expr= (m.x873 / (0.001 + 0.999 * m.b1011) - 0.65 * log(
m.x807 / (0.001 + 0.999 * m.b1011) + 1)) * (0.001 + 0.999 * m.b1011) <= 0)
m.e1060 = Constraint(expr= (m.x871 / (0.001 + 0.999 * m.b1009) - 0.65 * log(
m.x817 / (0.001 + 0.999 * m.b1009) + 1)) * (0.001 + 0.999 * m.b1009) <= 0)
m.e1061 = Constraint(expr= (m.x872 / (0.001 + 0.999 * m.b1010) - 0.65 * log(
m.x818 / (0.001 + 0.999 * m.b1010) + 1)) * (0.001 + 0.999 * m.b1010) <= 0)
m.e1062 = Constraint(expr= (m.x873 / (0.001 + 0.999 * m.b1011) - 0.65 * log(
m.x819 / (0.001 + 0.999 * m.b1011) + 1)) * (0.001 + 0.999 * m.b1011) <= 0)
m.e1063 = Constraint(expr= m.x808 == 0)
m.e1064 = Constraint(expr= m.x809 == 0)
m.e1065 = Constraint(expr= m.x810 == 0)
m.e1066 = Constraint(expr= m.x820 == 0)
m.e1067 = Constraint(expr= m.x821 == 0)
m.e1068 = Constraint(expr= m.x822 == 0)
m.e1069 = Constraint(expr= m.x874 == 0)
m.e1070 = Constraint(expr= m.x875 == 0)
m.e1071 = Constraint(expr= m.x876 == 0)
m.e1072 = Constraint(expr= m.x337 - m.x805 - m.x808 == 0)
m.e1073 = Constraint(expr= m.x338 - m.x806 - m.x809 == 0)
m.e1074 = Constraint(expr= m.x339 - m.x807 - m.x810 == 0)
m.e1075 = Constraint(expr= m.x346 - m.x817 - m.x820 == 0)
m.e1076 = Constraint(expr= m.x347 - m.x818 - m.x821 == 0)
m.e1077 = Constraint(expr= m.x348 - m.x819 - m.x822 == 0)
m.e1078 = Constraint(expr= m.x373 - m.x871 - m.x874 == 0)
m.e1079 = Constraint(expr= m.x374 - m.x872 - m.x875 == 0)
m.e1080 = Constraint(expr= m.x375 - m.x873 - m.x876 == 0)
m.e1081 = Constraint(expr= m.x805 - 0.690184503917672 * m.b1009 <= 0)
m.e1082 = Constraint(expr= m.x806 - 0.690184503917672 * m.b1010 <= 0)
m.e1083 = Constraint(expr= m.x807 - 0.690184503917672 * m.b1011 <= 0)
m.e1084 = Constraint(expr= m.x808 + 0.690184503917672 * m.b1009
<= 0.690184503917672)
m.e1085 = Constraint(expr= m.x809 + 0.690184503917672 * m.b1010
<= 0.690184503917672)
m.e1086 = Constraint(expr= m.x810 + 0.690184503917672 * m.b1011
<= 0.690184503917672)
m.e1087 = Constraint(expr= m.x817 - 38.5 * m.b1009 <= 0)
m.e1088 = Constraint(expr= m.x818 - 38.5 * m.b1010 <= 0)
m.e1089 = Constraint(expr= m.x819 - 38.5 * m.b1011 <= 0)
m.e1090 = Constraint(expr= m.x820 + 38.5 * m.b1009 <= 38.5)
m.e1091 = Constraint(expr= m.x821 + 38.5 * m.b1010 <= 38.5)
m.e1092 = Constraint(expr= m.x822 + 38.5 * m.b1011 <= 38.5)
m.e1093 = Constraint(expr= m.x871 - 2.3895954367396 * m.b1009 <= 0)
m.e1094 = Constraint(expr= m.x872 - 2.3895954367396 * m.b1010 <= 0)
m.e1095 = Constraint(expr= m.x873 - 2.3895954367396 * m.b1011 <= 0)
m.e1096 = Constraint(expr= m.x874 + 2.3895954367396 * m.b1009
<= 2.3895954367396)
m.e1097 = Constraint(expr= m.x875 + 2.3895954367396 * m.b1010
<= 2.3895954367396)
m.e1098 = Constraint(expr= m.x876 + 2.3895954367396 * m.b1011
<= 2.3895954367396)
m.e1099 = Constraint(expr= -m.x823 + m.x877 == 0)
m.e1100 = Constraint(expr= -m.x824 + m.x878 == 0)
m.e1101 = Constraint(expr= -m.x825 + m.x879 == 0)
m.e1102 = Constraint(expr= m.x826 == 0)
m.e1103 = Constraint(expr= m.x827 == 0)
m.e1104 = Constraint(expr= m.x828 == 0)
m.e1105 = Constraint(expr= m.x880 == 0)
m.e1106 = Constraint(expr= m.x881 == 0)
m.e1107 = Constraint(expr= m.x882 == 0)
m.e1108 = Constraint(expr= m.x349 - m.x823 - m.x826 == 0)
m.e1109 = Constraint(expr= m.x350 - m.x824 - m.x827 == 0)
m.e1110 = Constraint(expr= m.x351 - m.x825 - m.x828 == 0)
m.e1111 = Constraint(expr= m.x376 - m.x877 - m.x880 == 0)
m.e1112 = Constraint(expr= m.x377 - m.x878 - m.x881 == 0)
m.e1113 = Constraint(expr= m.x378 - m.x879 - m.x882 == 0)
m.e1114 = Constraint(expr= m.x823 - 9 * m.b1012 <= 0)
m.e1115 = Constraint(expr= m.x824 - 9 * m.b1013 <= 0)
m.e1116 = Constraint(expr= m.x825 - 9 * m.b1014 <= 0)
m.e1117 = Constraint(expr= m.x826 + 9 * m.b1012 <= 9)
m.e1118 = Constraint(expr= m.x827 + 9 * m.b1013 <= 9)
m.e1119 = Constraint(expr= m.x828 + 9 * m.b1014 <= 9)
m.e1120 = Constraint(expr= m.x877 - 9 * m.b1012 <= 0)
m.e1121 = Constraint(expr= m.x878 - 9 * m.b1013 <= 0)
m.e1122 = Constraint(expr= m.x879 - 9 * m.b1014 <= 0)
m.e1123 = Constraint(expr= m.x880 + 9 * m.b1012 <= 9)
m.e1124 = Constraint(expr= m.x881 + 9 * m.b1013 <= 9)
m.e1125 = Constraint(expr= m.x882 + 9 * m.b1014 <= 9)
m.e1126 = Constraint(expr= -m.x829 + m.x883 == 0)
m.e1127 = Constraint(expr= -m.x830 + m.x884 == 0)
m.e1128 = Constraint(expr= -m.x831 + m.x885 == 0)
m.e1129 = Constraint(expr= m.x832 == 0)
m.e1130 = Constraint(expr= m.x833 == 0)
m.e1131 = Constraint(expr= m.x834 == 0)
m.e1132 = Constraint(expr= m.x886 == 0)
m.e1133 = Constraint(expr= m.x887 == 0)
m.e1134 = Constraint(expr= m.x888 == 0)
m.e1135 = Constraint(expr= m.x352 - m.x829 - m.x832 == 0)
m.e1136 = Constraint(expr= m.x353 - m.x830 - m.x833 == 0)
m.e1137 = Constraint(expr= m.x354 - m.x831 - m.x834 == 0)
m.e1138 = Constraint(expr= m.x379 - m.x883 - m.x886 == 0)
m.e1139 = Constraint(expr= m.x380 - m.x884 - m.x887 == 0)
m.e1140 = Constraint(expr= m.x381 - m.x885 - m.x888 == 0)
m.e1141 = Constraint(expr= m.x829 - 9 * m.b1015 <= 0)
m.e1142 = Constraint(expr= m.x830 - 9 * m.b1016 <= 0)
m.e1143 = Constraint(expr= m.x831 - 9 * m.b1017 <= 0)
m.e1144 = Constraint(expr= m.x832 + 9 * m.b1015 <= 9)
m.e1145 = Constraint(expr= m.x833 + 9 * m.b1016 <= 9)
m.e1146 = Constraint(expr= m.x834 + 9 * m.b1017 <= 9)
m.e1147 = Constraint(expr= m.x883 - 9 * m.b1015 <= 0)
m.e1148 = Constraint(expr= m.x884 - 9 * m.b1016 <= 0)
m.e1149 = Constraint(expr= m.x885 - 9 * m.b1017 <= 0)
m.e1150 = Constraint(expr= m.x886 + 9 * m.b1015 <= 9)
m.e1151 = Constraint(expr= m.x887 + 9 * m.b1016 <= 9)
m.e1152 = Constraint(expr= m.x888 + 9 * m.b1017 <= 9)
m.e1153 = Constraint(expr= (m.x889 / (0.001 + 0.999 * m.b1018) - 0.75 * log(
m.x835 / (0.001 + 0.999 * m.b1018) + 1)) * (0.001 + 0.999 * m.b1018) <= 0)
m.e1154 = Constraint(expr= (m.x890 / (0.001 + 0.999 * m.b1019) - 0.75 * log(
m.x836 / (0.001 + 0.999 * m.b1019) + 1)) * (0.001 + 0.999 * m.b1019) <= 0)
m.e1155 = Constraint(expr= (m.x891 / (0.001 + 0.999 * m.b1020) - 0.75 * log(
m.x837 / (0.001 + 0.999 * m.b1020) + 1)) * (0.001 + 0.999 * m.b1020) <= 0)
m.e1156 = Constraint(expr= m.x838 == 0)
m.e1157 = Constraint(expr= m.x839 == 0)
m.e1158 = Constraint(expr= m.x840 == 0)
m.e1159 = Constraint(expr= m.x892 == 0)
m.e1160 = Constraint(expr= m.x893 == 0)
m.e1161 = Constraint(expr= m.x894 == 0)
m.e1162 = Constraint(expr= m.x355 - m.x835 - m.x838 == 0)
m.e1163 = Constraint(expr= m.x356 - m.x836 - m.x839 == 0)
m.e1164 = Constraint(expr= m.x357 - m.x837 - m.x840 == 0)
m.e1165 = Constraint(expr= m.x382 - m.x889 - m.x892 == 0)
m.e1166 = Constraint(expr= m.x383 - m.x890 - m.x893 == 0)
m.e1167 = Constraint(expr= m.x384 - m.x891 - m.x894 == 0)
m.e1168 = Constraint(expr= m.x835 - 3.04984759446376 * m.b1018 <= 0)
m.e1169 = Constraint(expr= m.x836 - 3.04984759446376 * m.b1019 <= 0)
m.e1170 = Constraint(expr= m.x837 - 3.04984759446376 * m.b1020 <= 0)
m.e1171 = Constraint(expr= m.x838 + 3.04984759446376 * m.b1018
<= 3.04984759446376)
m.e1172 = Constraint(expr= m.x839 + 3.04984759446376 * m.b1019
<= 3.04984759446376)
m.e1173 = Constraint(expr= m.x840 + 3.04984759446376 * m.b1020
<= 3.04984759446376)
m.e1174 = Constraint(expr= m.x889 - 1.04900943706034 * m.b1018 <= 0)
m.e1175 = Constraint(expr= m.x890 - 1.04900943706034 * m.b1019 <= 0)
m.e1176 = Constraint(expr= m.x891 - 1.04900943706034 * m.b1020 <= 0)
m.e1177 = Constraint(expr= m.x892 + 1.04900943706034 * m.b1018
<= 1.04900943706034)
m.e1178 = Constraint(expr= m.x893 + 1.04900943706034 * m.b1019
<= 1.04900943706034)
m.e1179 = Constraint(expr= m.x894 + 1.04900943706034 * m.b1020
<= 1.04900943706034)
m.e1180 = Constraint(expr= (m.x895 / (0.001 + 0.999 * m.b1021) - 0.8 * log(
m.x841 / (0.001 + 0.999 * m.b1021) + 1)) * (0.001 + 0.999 * m.b1021) <= 0)
m.e1181 = Constraint(expr= (m.x896 / (0.001 + 0.999 * m.b1022) - 0.8 * log(
m.x842 / (0.001 + 0.999 * m.b1022) + 1)) * (0.001 + 0.999 * m.b1022) <= 0)
m.e1182 = Constraint(expr= (m.x897 / (0.001 + 0.999 * m.b1023) - 0.8 * log(
m.x843 / (0.001 + 0.999 * m.b1023) + 1)) * (0.001 + 0.999 * m.b1023) <= 0)
m.e1183 = Constraint(expr= m.x844 == 0)
m.e1184 = Constraint(expr= m.x845 == 0)
m.e1185 = Constraint(expr= m.x846 == 0)
m.e1186 = Constraint(expr= m.x898 == 0)
m.e1187 = Constraint(expr= m.x899 == 0)
m.e1188 = Constraint(expr= m.x900 == 0)
m.e1189 = Constraint(expr= m.x358 - m.x841 - m.x844 == 0)
m.e1190 = Constraint(expr= m.x359 - m.x842 - m.x845 == 0)
m.e1191 = Constraint(expr= m.x360 - m.x843 - m.x846 == 0)
m.e1192 = Constraint(expr= m.x385 - m.x895 - m.x898 == 0)
m.e1193 = Constraint(expr= m.x386 - m.x896 - m.x899 == 0)
m.e1194 = Constraint(expr= m.x387 - m.x897 - m.x900 == 0)
m.e1195 = Constraint(expr= m.x841 - 3.04984759446376 * m.b1021 <= 0)
m.e1196 = Constraint(expr= m.x842 - 3.04984759446376 * m.b1022 <= 0)
m.e1197 = Constraint(expr= m.x843 - 3.04984759446376 * m.b1023 <= 0)
m.e1198 = Constraint(expr= m.x844 + 3.04984759446376 * m.b1021
<= 3.04984759446376)
m.e1199 = Constraint(expr= m.x845 + 3.04984759446376 * m.b1022
<= 3.04984759446376)
m.e1200 = Constraint(expr= m.x846 + 3.04984759446376 * m.b1023
<= 3.04984759446376)
m.e1201 = Constraint(expr= m.x895 - 1.11894339953103 * m.b1021 <= 0)
m.e1202 = Constraint(expr= m.x896 - 1.11894339953103 * m.b1022 <= 0)
m.e1203 = Constraint(expr= m.x897 - 1.11894339953103 * m.b1023 <= 0)
m.e1204 = Constraint(expr= m.x898 + 1.11894339953103 * m.b1021
<= 1.11894339953103)
m.e1205 = Constraint(expr= m.x899 + 1.11894339953103 * m.b1022
<= 1.11894339953103)
m.e1206 = Constraint(expr= m.x900 + 1.11894339953103 * m.b1023
<= 1.11894339953103)
m.e1207 = Constraint(expr= (m.x901 / (0.001 + 0.999 * m.b1024) - 0.85 * log(
m.x847 / (0.001 + 0.999 * m.b1024) + 1)) * (0.001 + 0.999 * m.b1024) <= 0)
m.e1208 = Constraint(expr= (m.x902 / (0.001 + 0.999 * m.b1025) - 0.85 * log(
m.x848 / (0.001 + 0.999 * m.b1025) + 1)) * (0.001 + 0.999 * m.b1025) <= 0)
m.e1209 = Constraint(expr= (m.x903 / (0.001 + 0.999 * m.b1026) - 0.85 * log(
m.x849 / (0.001 + 0.999 * m.b1026) + 1)) * (0.001 + 0.999 * m.b1026) <= 0)
m.e1210 = Constraint(expr= m.x850 == 0)
m.e1211 = Constraint(expr= m.x851 == 0)
m.e1212 = Constraint(expr= m.x852 == 0)
m.e1213 = Constraint(expr= m.x904 == 0)
m.e1214 = Constraint(expr= m.x905 == 0)
m.e1215 = Constraint(expr= m.x906 == 0)
m.e1216 = Constraint(expr= m.x361 - m.x847 - m.x850 == 0)
m.e1217 = Constraint(expr= m.x362 - m.x848 - m.x851 == 0)
m.e1218 = Constraint(expr= m.x363 - m.x849 - m.x852 == 0)
m.e1219 = Constraint(expr= m.x388 - m.x901 - m.x904 == 0)
m.e1220 = Constraint(expr= m.x389 - m.x902 - m.x905 == 0)
m.e1221 = Constraint(expr= m.x390 - m.x903 - m.x906 == 0)
m.e1222 = Constraint(expr= m.x847 - 3.04984759446376 * m.b1024 <= 0)
m.e1223 = Constraint(expr= m.x848 - 3.04984759446376 * m.b1025 <= 0)
m.e1224 = Constraint(expr= m.x849 - 3.04984759446376 * m.b1026 <= 0)
m.e1225 = Constraint(expr= m.x850 + 3.04984759446376 * m.b1024
<= 3.04984759446376)
m.e1226 = Constraint(expr= m.x851 + 3.04984759446376 * m.b1025
<= 3.04984759446376)
m.e1227 = Constraint(expr= m.x852 + 3.04984759446376 * m.b1026
<= 3.04984759446376)
m.e1228 = Constraint(expr= m.x901 - 1.18887736200171 * m.b1024 <= 0)
m.e1229 = Constraint(expr= m.x902 - 1.18887736200171 * m.b1025 <= 0)
m.e1230 = Constraint(expr= m.x903 - 1.18887736200171 * m.b1026 <= 0)
m.e1231 = Constraint(expr= m.x904 + 1.18887736200171 * m.b1024
<= 1.18887736200171)
m.e1232 = Constraint(expr= m.x905 + 1.18887736200171 * m.b1025
<= 1.18887736200171)
m.e1233 = Constraint(expr= m.x906 + 1.18887736200171 * m.b1026
<= 1.18887736200171)
m.e1234 = Constraint(expr= m.x1 + 5 * m.b1027 == 0)
m.e1235 = Constraint(expr= m.x2 + 4 * m.b1028 == 0)
m.e1236 = Constraint(expr= m.x3 + 6 * m.b1029 == 0)
m.e1237 = Constraint(expr= m.x4 + 8 * m.b1030 == 0)
m.e1238 = Constraint(expr= m.x5 + 7 * m.b1031 == 0)
m.e1239 = Constraint(expr= m.x6 + 6 * m.b1032 == 0)
m.e1240 = Constraint(expr= m.x7 + 6 * m.b1033 == 0)
m.e1241 = Constraint(expr= m.x8 + 9 * m.b1034 == 0)
m.e1242 = Constraint(expr= m.x9 + 4 * m.b1035 == 0)
m.e1243 = Constraint(expr= m.x10 + 10 * m.b1036 == 0)
m.e1244 = Constraint(expr= m.x11 + 9 * m.b1037 == 0)
m.e1245 = Constraint(expr= m.x12 + 5 * m.b1038 == 0)
m.e1246 = Constraint(expr= m.x13 + 6 * m.b1039 == 0)
m.e1247 = Constraint(expr= m.x14 + 10 * m.b1040 == 0)
m.e1248 = Constraint(expr= m.x15 + 6 * m.b1041 == 0)
m.e1249 = Constraint(expr= m.x16 + 7 * m.b1042 == 0)
m.e1250 = Constraint(expr= m.x17 + 7 * m.b1043 == 0)
m.e1251 = Constraint(expr= m.x18 + 4 * m.b1044 == 0)
m.e1252 = Constraint(expr= m.x19 + 4 * m.b1045 == 0)
m.e1253 = Constraint(expr= m.x20 + 3 * m.b1046 == 0)
m.e1254 = Constraint(expr= m.x21 + 2 * m.b1047 == 0)
m.e1255 = Constraint(expr= m.x22 + 5 * m.b1048 == 0)
m.e1256 = Constraint(expr= m.x23 + 6 * m.b1049 == 0)
m.e1257 = Constraint(expr= m.x24 + 7 * m.b1050 == 0)
m.e1258 = Constraint(expr= m.x25 + 2 * m.b1051 == 0)
m.e1259 = Constraint(expr= m.x26 + 5 * m.b1052 == 0)
m.e1260 = Constraint(expr= m.x27 + 2 * m.b1053 == 0)
m.e1261 = Constraint(expr= m.x28 + 4 * m.b1054 == 0)
m.e1262 = Constraint(expr= m.x29 + 7 * m.b1055 == 0)
m.e1263 = Constraint(expr= m.x30 + 4 * m.b1056 == 0)
m.e1264 = Constraint(expr= m.x31 + 3 * m.b1057 == 0)
m.e1265 = Constraint(expr= m.x32 + 9 * m.b1058 == 0)
m.e1266 = Constraint(expr= m.x33 + 3 * m.b1059 == 0)
m.e1267 = Constraint(expr= m.x34 + 7 * m.b1060 == 0)
m.e1268 = Constraint(expr= m.x35 + 2 * m.b1061 == 0)
m.e1269 = Constraint(expr= m.x36 + 9 * m.b1062 == 0)
m.e1270 = Constraint(expr= m.x37 + 3 * m.b1063 == 0)
m.e1271 = Constraint(expr= m.x38 + m.b1064 == 0)
m.e1272 = Constraint(expr= m.x39 + 9 * m.b1065 == 0)
m.e1273 = Constraint(expr= m.x40 + 2 * m.b1066 == 0)
m.e1274 = Constraint(expr= m.x41 + 6 * m.b1067 == 0)
m.e1275 = Constraint(expr= m.x42 + 3 * m.b1068 == 0)
m.e1276 = Constraint(expr= m.x43 + 4 * m.b1069 == 0)
m.e1277 = Constraint(expr= m.x44 + 8 * m.b1070 == 0)
m.e1278 = Constraint(expr= m.x45 + m.b1071 == 0)
m.e1279 = Constraint(expr= m.x46 + 2 * m.b1072 == 0)
m.e1280 = Constraint(expr= m.x47 + 5 * m.b1073 == 0)
m.e1281 = Constraint(expr= m.x48 + 2 * m.b1074 == 0)
m.e1282 = Constraint(expr= m.x49 + 3 * m.b1075 == 0)
m.e1283 = Constraint(expr= m.x50 + 4 * m.b1076 == 0)
m.e1284 = Constraint(expr= m.x51 + 3 * m.b1077 == 0)
m.e1285 = Constraint(expr= m.x52 + 5 * m.b1078 == 0)
m.e1286 = Constraint(expr= m.x53 + 7 * m.b1079 == 0)
m.e1287 = Constraint(expr= m.x54 + 6 * m.b1080 == 0)
m.e1288 = Constraint(expr= m.x55 + 2 * m.b1081 == 0)
m.e1289 = Constraint(expr= m.x56 + 8 * m.b1082 == 0)
m.e1290 = Constraint(expr= m.x57 + 4 * m.b1083 == 0)
m.e1291 = Constraint(expr= m.x58 + m.b1084 == 0)
m.e1292 = Constraint(expr= m.x59 + 4 * m.b1085 == 0)
m.e1293 = Constraint(expr= m.x60 + m.b1086 == 0)
m.e1294 = Constraint(expr= m.x61 + 2 * m.b1087 == 0)
m.e1295 = Constraint(expr= m.x62 + 5 * m.b1088 == 0)
m.e1296 = Constraint(expr= m.x63 + 2 * m.b1089 == 0)
m.e1297 = Constraint(expr= m.x64 + 9 * m.b1090 == 0)
m.e1298 = Constraint(expr= m.x65 + 2 * m.b1091 == 0)
m.e1299 = Constraint(expr= m.x66 + 9 * m.b1092 == 0)
m.e1300 = Constraint(expr= m.x67 + 5 * m.b1093 == 0)
m.e1301 = Constraint(expr= m.x68 + 8 * m.b1094 == 0)
m.e1302 = Constraint(expr= m.x69 + 4 * m.b1095 == 0)
m.e1303 = Constraint(expr= m.x70 + 2 * m.b1096 == 0)
m.e1304 = Constraint(expr= m.x71 + 3 * m.b1097 == 0)
m.e1305 = Constraint(expr= m.x72 + 8 * m.b1098 == 0)
m.e1306 = Constraint(expr= m.x73 + 10 * m.b1099 == 0)
m.e1307 = Constraint(expr= m.x74 + 6 * m.b1100 == 0)
m.e1308 = Constraint(expr= m.x75 + 3 * m.b1101 == 0)
m.e1309 = Constraint(expr= m.x76 + 4 * m.b1102 == 0)
m.e1310 = Constraint(expr= m.x77 + 8 * m.b1103 == 0)
m.e1311 = Constraint(expr= m.x78 + 7 * m.b1104 == 0)
m.e1312 = Constraint(expr= m.x79 + 7 * m.b1105 == 0)
m.e1313 = Constraint(expr= m.x80 + 3 * m.b1106 == 0)
m.e1314 = Constraint(expr= m.x81 + 9 * m.b1107 == 0)
m.e1315 = Constraint(expr= m.x82 + 4 * m.b1108 == 0)
m.e1316 = Constraint(expr= m.x83 + 8 * m.b1109 == 0)
m.e1317 = Constraint(expr= m.x84 + 6 * m.b1110 == 0)
m.e1318 = Constraint(expr= m.x85 + 2 * m.b1111 == 0)
m.e1319 = Constraint(expr= m.x86 + m.b1112 == 0)
m.e1320 = Constraint(expr= m.x87 + 3 * m.b1113 == 0)
m.e1321 = Constraint(expr= m.x88 + 8 * m.b1114 == 0)
m.e1322 = Constraint(expr= m.x89 + 3 * m.b1115 == 0)
m.e1323 = Constraint(expr= m.x90 + 4 * m.b1116 == 0)
m.e1324 = Constraint(expr= m.x91 + 9 * m.b1117 == 0)
m.e1325 = Constraint(expr= m.x92 + 5 * m.b1118 == 0)
m.e1326 = Constraint(expr= m.x93 + m.b1119 == 0)
m.e1327 = Constraint(expr= m.x94 + 3 * m.b1120 == 0)
m.e1328 = Constraint(expr= m.x95 + 9 * m.b1121 == 0)
m.e1329 = Constraint(expr= m.x96 + 5 * m.b1122 == 0)
m.e1330 = Constraint(expr= m.x97 + 5 * m.b1123 == 0)
m.e1331 = Constraint(expr= m.x98 + 3 * m.b1124 == 0)
m.e1332 = Constraint(expr= m.x99 + 3 * m.b1125 == 0)
m.e1333 = Constraint(expr= m.x100 + 5 * m.b1126 == 0)
m.e1334 = Constraint(expr= m.x101 + 3 * m.b1127 == 0)
m.e1335 = Constraint(expr= m.x102 + 2 * m.b1128 == 0)
m.e1336 = Constraint(expr= m.x103 + 6 * m.b1129 == 0)
m.e1337 = Constraint(expr= m.x104 + 4 * m.b1130 == 0)
m.e1338 = Constraint(expr= m.x105 + 6 * m.b1131 == 0)
m.e1339 = Constraint(expr= m.x106 + 2 * m.b1132 == 0)
m.e1340 = Constraint(expr= m.x107 + 6 * m.b1133 == 0)
m.e1341 = Constraint(expr= m.x108 + 6 * m.b1134 == 0)
m.e1342 = Constraint(expr= m.x109 + 6 * m.b1135 == 0)
m.e1343 = Constraint(expr= m.x110 + 4 * m.b1136 == 0)
m.e1344 = Constraint(expr= m.x111 + 3 * m.b1137 == 0)
m.e1345 = Constraint(expr= m.x112 + 3 * m.b1138 == 0)
m.e1346 = Constraint(expr= m.x113 + 2 * m.b1139 == 0)
m.e1347 = Constraint(expr= m.x114 + m.b1140 == 0)
m.e1348 = Constraint(expr= m.x115 + 5 * m.b1141 == 0)
m.e1349 = Constraint(expr= m.x116 + 8 * m.b1142 == 0)
m.e1350 = Constraint(expr= m.x117 + 6 * m.b1143 == 0)
m.e1351 = Constraint(expr= m.x118 + 9 * m.b1144 == 0)
m.e1352 = Constraint(expr= m.x119 + 5 * m.b1145 == 0)
m.e1353 = Constraint(expr= m.x120 + 2 * m.b1146 == 0)
m.e1354 = Constraint(expr= m.b907 - m.b908 <= 0)
m.e1355 = Constraint(expr= m.b907 - m.b909 <= 0)
m.e1356 = Constraint(expr= m.b908 - m.b909 <= 0)
m.e1357 = Constraint(expr= m.b910 - m.b911 <= 0)
m.e1358 = Constraint(expr= m.b910 - m.b912 <= 0)
m.e1359 = Constraint(expr= m.b911 - m.b912 <= 0)
m.e1360 = Constraint(expr= m.b913 - m.b914 <= 0)
m.e1361 = Constraint(expr= m.b913 - m.b915 <= 0)
m.e1362 = Constraint(expr= m.b914 - m.b915 <= 0)
m.e1363 = Constraint(expr= m.b916 - m.b917 <= 0)
m.e1364 = Constraint(expr= m.b916 - m.b918 <= 0)
m.e1365 = Constraint(expr= m.b917 - m.b918 <= 0)
m.e1366 = Constraint(expr= m.b919 - m.b920 <= 0)
m.e1367 = Constraint(expr= m.b919 - m.b921 <= 0)
m.e1368 = Constraint(expr= m.b920 - m.b921 <= 0)
m.e1369 = Constraint(expr= m.b922 - m.b923 <= 0)
m.e1370 = Constraint(expr= m.b922 - m.b924 <= 0)
m.e1371 = Constraint(expr= m.b923 - m.b924 <= 0)
m.e1372 = Constraint(expr= m.b925 - m.b926 <= 0)
m.e1373 = Constraint(expr= m.b925 - m.b927 <= 0)
m.e1374 = Constraint(expr= m.b926 - m.b927 <= 0)
m.e1375 = Constraint(expr= m.b928 - m.b929 <= 0)
m.e1376 = Constraint(expr= m.b928 - m.b930 <= 0)
m.e1377 = Constraint(expr= m.b929 - m.b930 <= 0)
m.e1378 = Constraint(expr= m.b931 - m.b932 <= 0)
m.e1379 = Constraint(expr= m.b931 - m.b933 <= 0)
m.e1380 = Constraint(expr= m.b932 - m.b933 <= 0)
m.e1381 = Constraint(expr= m.b934 - m.b935 <= 0)
m.e1382 = Constraint(expr= m.b934 - m.b936 <= 0)
m.e1383 = Constraint(expr= m.b935 - m.b936 <= 0)
m.e1384 = Constraint(expr= m.b937 - m.b938 <= 0)
m.e1385 = Constraint(expr= m.b937 - m.b939 <= 0)
m.e1386 = Constraint(expr= m.b938 - m.b939 <= 0)
m.e1387 = Constraint(expr= m.b940 - m.b941 <= 0)
m.e1388 = Constraint(expr= m.b940 - m.b942 <= 0)
m.e1389 = Constraint(expr= m.b941 - m.b942 <= 0)
m.e1390 = Constraint(expr= m.b943 - m.b944 <= 0)
m.e1391 = Constraint(expr= m.b943 - m.b945 <= 0)
m.e1392 = Constraint(expr= m.b944 - m.b945 <= 0)
m.e1393 = Constraint(expr= m.b946 - m.b947 <= 0)
m.e1394 = Constraint(expr= m.b946 - m.b948 <= 0)
m.e1395 = Constraint(expr= m.b947 - m.b948 <= 0)
m.e1396 = Constraint(expr= m.b949 - m.b950 <= 0)
m.e1397 = Constraint(expr= m.b949 - m.b951 <= 0)
m.e1398 = Constraint(expr= m.b950 - m.b951 <= 0)
m.e1399 = Constraint(expr= m.b952 - m.b953 <= 0)
m.e1400 = Constraint(expr= m.b952 - m.b954 <= 0)
m.e1401 = Constraint(expr= m.b953 - m.b954 <= 0)
m.e1402 = Constraint(expr= m.b955 - m.b956 <= 0)
m.e1403 = Constraint(expr= m.b955 - m.b957 <= 0)
m.e1404 = Constraint(expr= m.b956 - m.b957 <= 0)
m.e1405 = Constraint(expr= m.b958 - m.b959 <= 0)
m.e1406 = Constraint(expr= m.b958 - m.b960 <= 0)
m.e1407 = Constraint(expr= m.b959 - m.b960 <= 0)
m.e1408 = Constraint(expr= m.b961 - m.b962 <= 0)
m.e1409 = Constraint(expr= m.b961 - m.b963 <= 0)
m.e1410 = Constraint(expr= m.b962 - m.b963 <= 0)
m.e1411 = Constraint(expr= m.b964 - m.b965 <= 0)
m.e1412 = Constraint(expr= m.b964 - m.b966 <= 0)
m.e1413 = Constraint(expr= m.b965 - m.b966 <= 0)
m.e1414 = Constraint(expr= m.b967 - m.b968 <= 0)
m.e1415 = Constraint(expr= m.b967 - m.b969 <= 0)
m.e1416 = Constraint(expr= m.b968 - m.b969 <= 0)
m.e1417 = Constraint(expr= m.b970 - m.b971 <= 0)
m.e1418 = Constraint(expr= m.b970 - m.b972 <= 0)
m.e1419 = Constraint(expr= m.b971 - m.b972 <= 0)
m.e1420 = Constraint(expr= m.b973 - m.b974 <= 0)
m.e1421 = Constraint(expr= m.b973 - m.b975 <= 0)
m.e1422 = Constraint(expr= m.b974 - m.b975 <= 0)
m.e1423 = Constraint(expr= m.b976 - m.b977 <= 0)
m.e1424 = Constraint(expr= m.b976 - m.b978 <= 0)
m.e1425 = Constraint(expr= m.b977 - m.b978 <= 0)
m.e1426 = Constraint(expr= m.b979 - m.b980 <= 0)
m.e1427 = Constraint(expr= m.b979 - m.b981 <= 0)
m.e1428 = Constraint(expr= m.b980 - m.b981 <= 0)
m.e1429 = Constraint(expr= m.b982 - m.b983 <= 0)
m.e1430 = Constraint(expr= m.b982 - m.b984 <= 0)
m.e1431 = Constraint(expr= m.b983 - m.b984 <= 0)
m.e1432 = Constraint(expr= m.b985 - m.b986 <= 0)
m.e1433 = Constraint(expr= m.b985 - m.b987 <= 0)
m.e1434 = Constraint(expr= m.b986 - m.b987 <= 0)
m.e1435 = Constraint(expr= m.b988 - m.b989 <= 0)
m.e1436 = Constraint(expr= m.b988 - m.b990 <= 0)
m.e1437 = Constraint(expr= m.b989 - m.b990 <= 0)
m.e1438 = Constraint(expr= m.b991 - m.b992 <= 0)
m.e1439 = Constraint(expr= m.b991 - m.b993 <= 0)
m.e1440 = Constraint(expr= m.b992 - m.b993 <= 0)
m.e1441 = Constraint(expr= m.b994 - m.b995 <= 0)
m.e1442 = Constraint(expr= m.b994 - m.b996 <= 0)
m.e1443 = Constraint(expr= m.b995 - m.b996 <= 0)
m.e1444 = Constraint(expr= m.b997 - m.b998 <= 0)
m.e1445 = Constraint(expr= m.b997 - m.b999 <= 0)
m.e1446 = Constraint(expr= m.b998 - m.b999 <= 0)
m.e1447 = Constraint(expr= m.b1000 - m.b1001 <= 0)
m.e1448 = Constraint(expr= m.b1000 - m.b1002 <= 0)
m.e1449 = Constraint(expr= m.b1001 - m.b1002 <= 0)
m.e1450 = Constraint(expr= m.b1003 - m.b1004 <= 0)
m.e1451 = Constraint(expr= m.b1003 - m.b1005 <= 0)
m.e1452 = Constraint(expr= m.b1004 - m.b1005 <= 0)
m.e1453 = Constraint(expr= m.b1006 - m.b1007 <= 0)
m.e1454 = Constraint(expr= m.b1006 - m.b1008 <= 0)
m.e1455 = Constraint(expr= m.b1007 - m.b1008 <= 0)
m.e1456 = Constraint(expr= m.b1009 - m.b1010 <= 0)
m.e1457 = Constraint(expr= m.b1009 - m.b1011 <= 0)
m.e1458 = Constraint(expr= m.b1010 - m.b1011 <= 0)
m.e1459 = Constraint(expr= m.b1012 - m.b1013 <= 0)
m.e1460 = Constraint(expr= m.b1012 - m.b1014 <= 0)
m.e1461 = Constraint(expr= m.b1013 - m.b1014 <= 0)
m.e1462 = Constraint(expr= m.b1015 - m.b1016 <= 0)
m.e1463 = Constraint(expr= m.b1015 - m.b1017 <= 0)
m.e1464 = Constraint(expr= m.b1016 - m.b1017 <= 0)
m.e1465 = Constraint(expr= m.b1018 - m.b1019 <= 0)
m.e1466 = Constraint(expr= m.b1018 - m.b1020 <= 0)
m.e1467 = Constraint(expr= m.b1019 - m.b1020 <= 0)
m.e1468 = Constraint(expr= m.b1021 - m.b1022 <= 0)
m.e1469 = Constraint(expr= m.b1021 - m.b1023 <= 0)
m.e1470 = Constraint(expr= m.b1022 - m.b1023 <= 0)
m.e1471 = Constraint(expr= m.b1024 - m.b1025 <= 0)
m.e1472 = Constraint(expr= m.b1024 - m.b1026 <= 0)
m.e1473 = Constraint(expr= m.b1025 - m.b1026 <= 0)
m.e1474 = Constraint(expr= m.b1027 + m.b1028 <= 1)
m.e1475 = Constraint(expr= m.b1027 + m.b1029 <= 1)
m.e1476 = Constraint(expr= m.b1027 + m.b1028 <= 1)
m.e1477 = Constraint(expr= m.b1028 + m.b1029 <= 1)
m.e1478 = Constraint(expr= m.b1027 + m.b1029 <= 1)
m.e1479 = Constraint(expr= m.b1028 + m.b1029 <= 1)
m.e1480 = Constraint(expr= m.b1030 + m.b1031 <= 1)
m.e1481 = Constraint(expr= m.b1030 + m.b1032 <= 1)
m.e1482 = Constraint(expr= m.b1030 + m.b1031 <= 1)
m.e1483 = Constraint(expr= m.b1031 + m.b1032 <= 1)
m.e1484 = Constraint(expr= m.b1030 + m.b1032 <= 1)
m.e1485 = Constraint(expr= m.b1031 + m.b1032 <= 1)
m.e1486 = Constraint(expr= m.b1033 + m.b1034 <= 1)
m.e1487 = Constraint(expr= m.b1033 + m.b1035 <= 1)
m.e1488 = Constraint(expr= m.b1033 + m.b1034 <= 1)
m.e1489 = Constraint(expr= m.b1034 + m.b1035 <= 1)
m.e1490 = Constraint(expr= m.b1033 + m.b1035 <= 1)
m.e1491 = Constraint(expr= m.b1034 + m.b1035 <= 1)
m.e1492 = Constraint(expr= m.b1036 + m.b1037 <= 1)
m.e1493 = Constraint(expr= m.b1036 + m.b1038 <= 1)
m.e1494 = Constraint(expr= m.b1036 + m.b1037 <= 1)
m.e1495 = Constraint(expr= m.b1037 + m.b1038 <= 1)
m.e1496 = Constraint(expr= m.b1036 + m.b1038 <= 1)
m.e1497 = Constraint(expr= m.b1037 + m.b1038 <= 1)
m.e1498 = Constraint(expr= m.b1039 + m.b1040 <= 1)
m.e1499 = Constraint(expr= m.b1039 + m.b1041 <= 1)
m.e1500 = Constraint(expr= m.b1039 + m.b1040 <= 1)
m.e1501 = Constraint(expr= m.b1040 + m.b1041 <= 1)
m.e1502 = Constraint(expr= m.b1039 + m.b1041 <= 1)
m.e1503 = Constraint(expr= m.b1040 + m.b1041 <= 1)
m.e1504 = Constraint(expr= m.b1042 + m.b1043 <= 1)
m.e1505 = Constraint(expr= m.b1042 + m.b1044 <= 1)
m.e1506 = Constraint(expr= m.b1042 + m.b1043 <= 1)
m.e1507 = Constraint(expr= m.b1043 + m.b1044 <= 1)
m.e1508 = Constraint(expr= m.b1042 + m.b1044 <= 1)
m.e1509 = Constraint(expr= m.b1043 + m.b1044 <= 1)
m.e1510 = Constraint(expr= m.b1045 + m.b1046 <= 1)
m.e1511 = Constraint(expr= m.b1045 + m.b1047 <= 1)
m.e1512 = Constraint(expr= m.b1045 + m.b1046 <= 1)
m.e1513 = Constraint(expr= m.b1046 + m.b1047 <= 1)
m.e1514 = Constraint(expr= m.b1045 + m.b1047 <= 1)
m.e1515 = Constraint(expr= m.b1046 + m.b1047 <= 1)
m.e1516 = Constraint(expr= m.b1048 + m.b1049 <= 1)
m.e1517 = Constraint(expr= m.b1048 + m.b1050 <= 1)
m.e1518 = Constraint(expr= m.b1048 + m.b1049 <= 1)
m.e1519 = Constraint(expr= m.b1049 + m.b1050 <= 1)
m.e1520 = Constraint(expr= m.b1048 + m.b1050 <= 1)
m.e1521 = Constraint(expr= m.b1049 + m.b1050 <= 1)
m.e1522 = Constraint(expr= m.b1051 + m.b1052 <= 1)
m.e1523 = Constraint(expr= m.b1051 + m.b1053 <= 1)
m.e1524 = Constraint(expr= m.b1051 + m.b1052 <= 1)
m.e1525 = Constraint(expr= m.b1052 + m.b1053 <= 1)
m.e1526 = Constraint(expr= m.b1051 + m.b1053 <= 1)
m.e1527 = Constraint(expr= m.b1052 + m.b1053 <= 1)
m.e1528 = Constraint(expr= m.b1054 + m.b1055 <= 1)
m.e1529 = Constraint(expr= m.b1054 + m.b1056 <= 1)
m.e1530 = Constraint(expr= m.b1054 + m.b1055 <= 1)
m.e1531 = Constraint(expr= m.b1055 + m.b1056 <= 1)
m.e1532 = Constraint(expr= m.b1054 + m.b1056 <= 1)
m.e1533 = Constraint(expr= m.b1055 + m.b1056 <= 1)
m.e1534 = Constraint(expr= m.b1057 + m.b1058 <= 1)
m.e1535 = Constraint(expr= m.b1057 + m.b1059 <= 1)
m.e1536 = Constraint(expr= m.b1057 + m.b1058 <= 1)
m.e1537 = Constraint(expr= m.b1058 + m.b1059 <= 1)
m.e1538 = Constraint(expr= m.b1057 + m.b1059 <= 1)
m.e1539 = Constraint(expr= m.b1058 + m.b1059 <= 1)
m.e1540 = Constraint(expr= m.b1060 + m.b1061 <= 1)
m.e1541 = Constraint(expr= m.b1060 + m.b1062 <= 1)
m.e1542 = Constraint(expr= m.b1060 + m.b1061 <= 1)
m.e1543 = Constraint(expr= m.b1061 + m.b1062 <= 1)
m.e1544 = Constraint(expr= m.b1060 + m.b1062 <= 1)
m.e1545 = Constraint(expr= m.b1061 + m.b1062 <= 1)
m.e1546 = Constraint(expr= m.b1063 + m.b1064 <= 1)
m.e1547 = Constraint(expr= m.b1063 + m.b1065 <= 1)
m.e1548 = Constraint(expr= m.b1063 + m.b1064 <= 1)
m.e1549 = Constraint(expr= m.b1064 + m.b1065 <= 1)
m.e1550 = Constraint(expr= m.b1063 + m.b1065 <= 1)
m.e1551 = Constraint(expr= m.b1064 + m.b1065 <= 1)
m.e1552 = Constraint(expr= m.b1066 + m.b1067 <= 1)
m.e1553 = Constraint(expr= m.b1066 + m.b1068 <= 1)
m.e1554 = Constraint(expr= m.b1066 + m.b1067 <= 1)
m.e1555 = Constraint(expr= m.b1067 + m.b1068 <= 1)
m.e1556 = Constraint(expr= m.b1066 + m.b1068 <= 1)
m.e1557 = Constraint(expr= m.b1067 + m.b1068 <= 1)
m.e1558 = Constraint(expr= m.b1069 + m.b1070 <= 1)
m.e1559 = Constraint(expr= m.b1069 + m.b1071 <= 1)
m.e1560 = Constraint(expr= m.b1069 + m.b1070 <= 1)
m.e1561 = Constraint(expr= m.b1070 + m.b1071 <= 1)
m.e1562 = Constraint(expr= m.b1069 + m.b1071 <= 1)
m.e1563 = Constraint(expr= m.b1070 + m.b1071 <= 1)
m.e1564 = Constraint(expr= m.b1072 + m.b1073 <= 1)
m.e1565 = Constraint(expr= m.b1072 + m.b1074 <= 1)
m.e1566 = Constraint(expr= m.b1072 + m.b1073 <= 1)
m.e1567 = Constraint(expr= m.b1073 + m.b1074 <= 1)
m.e1568 = Constraint(expr= m.b1072 + m.b1074 <= 1)
m.e1569 = Constraint(expr= m.b1073 + m.b1074 <= 1)
m.e1570 = Constraint(expr= m.b1075 + m.b1076 <= 1)
m.e1571 = Constraint(expr= m.b1075 + m.b1077 <= 1)
m.e1572 = Constraint(expr= m.b1075 + m.b1076 <= 1)
m.e1573 = Constraint(expr= m.b1076 + m.b1077 <= 1)
m.e1574 = Constraint(expr= m.b1075 + m.b1077 <= 1)
m.e1575 = Constraint(expr= m.b1076 + m.b1077 <= 1)
m.e1576 = Constraint(expr= m.b1078 + m.b1079 <= 1)
m.e1577 = Constraint(expr= m.b1078 + m.b1080 <= 1)
m.e1578 = Constraint(expr= m.b1078 + m.b1079 <= 1)
m.e1579 = Constraint(expr= m.b1079 + m.b1080 <= 1)
m.e1580 = Constraint(expr= m.b1078 + m.b1080 <= 1)
m.e1581 = Constraint(expr= m.b1079 + m.b1080 <= 1)
m.e1582 = Constraint(expr= m.b1081 + m.b1082 <= 1)
m.e1583 = Constraint(expr= m.b1081 + m.b1083 <= 1)
m.e1584 = Constraint(expr= m.b1081 + m.b1082 <= 1)
m.e1585 = Constraint(expr= m.b1082 + m.b1083 <= 1)
m.e1586 = Constraint(expr= m.b1081 + m.b1083 <= 1)
m.e1587 = Constraint(expr= m.b1082 + m.b1083 <= 1)
m.e1588 = Constraint(expr= m.b1084 + m.b1085 <= 1)
m.e1589 = Constraint(expr= m.b1084 + m.b1086 <= 1)
m.e1590 = Constraint(expr= m.b1084 + m.b1085 <= 1)
m.e1591 = Constraint(expr= m.b1085 + m.b1086 <= 1)
m.e1592 = Constraint(expr= m.b1084 + m.b1086 <= 1)
m.e1593 = Constraint(expr= m.b1085 + m.b1086 <= 1)
m.e1594 = Constraint(expr= m.b1087 + m.b1088 <= 1)
m.e1595 = Constraint(expr= m.b1087 + m.b1089 <= 1)
m.e1596 = Constraint(expr= m.b1087 + m.b1088 <= 1)
m.e1597 = Constraint(expr= m.b1088 + m.b1089 <= 1)
m.e1598 = Constraint(expr= m.b1087 + m.b1089 <= 1)
m.e1599 = Constraint(expr= m.b1088 + m.b1089 <= 1)
m.e1600 = Constraint(expr= m.b1090 + m.b1091 <= 1)
m.e1601 = Constraint(expr= m.b1090 + m.b1092 <= 1)
m.e1602 = Constraint(expr= m.b1090 + m.b1091 <= 1)
m.e1603 = Constraint(expr= m.b1091 + m.b1092 <= 1)
m.e1604 = Constraint(expr= m.b1090 + m.b1092 <= 1)
m.e1605 = Constraint(expr= m.b1091 + m.b1092 <= 1)
m.e1606 = Constraint(expr= m.b1093 + m.b1094 <= 1)
m.e1607 = Constraint(expr= m.b1093 + m.b1095 <= 1)
m.e1608 = Constraint(expr= m.b1093 + m.b1094 <= 1)
m.e1609 = Constraint(expr= m.b1094 + m.b1095 <= 1)
m.e1610 = Constraint(expr= m.b1093 + m.b1095 <= 1)
m.e1611 = Constraint(expr= m.b1094 + m.b1095 <= 1)
m.e1612 = Constraint(expr= m.b1096 + m.b1097 <= 1)
m.e1613 = Constraint(expr= m.b1096 + m.b1098 <= 1)
m.e1614 = Constraint(expr= m.b1096 + m.b1097 <= 1)
m.e1615 = Constraint(expr= m.b1097 + m.b1098 <= 1)
m.e1616 = Constraint(expr= m.b1096 + m.b1098 <= 1)
m.e1617 = Constraint(expr= m.b1097 + m.b1098 <= 1)
m.e1618 = Constraint(expr= m.b1099 + m.b1100 <= 1)
m.e1619 = Constraint(expr= m.b1099 + m.b1101 <= 1)
m.e1620 = Constraint(expr= m.b1099 + m.b1100 <= 1)
m.e1621 = Constraint(expr= m.b1100 + m.b1101 <= 1)
m.e1622 = Constraint(expr= m.b1099 + m.b1101 <= 1)
m.e1623 = Constraint(expr= m.b1100 + m.b1101 <= 1)
m.e1624 = Constraint(expr= m.b1102 + m.b1103 <= 1)
m.e1625 = Constraint(expr= m.b1102 + m.b1104 <= 1)
m.e1626 = Constraint(expr= m.b1102 + m.b1103 <= 1)
m.e1627 = Constraint(expr= m.b1103 + m.b1104 <= 1)
m.e1628 = Constraint(expr= m.b1102 + m.b1104 <= 1)
m.e1629 = Constraint(expr= m.b1103 + m.b1104 <= 1)
m.e1630 = Constraint(expr= m.b1105 + m.b1106 <= 1)
m.e1631 = Constraint(expr= m.b1105 + m.b1107 <= 1)
m.e1632 = Constraint(expr= m.b1105 + m.b1106 <= 1)
m.e1633 = Constraint(expr= m.b1106 + m.b1107 <= 1)
m.e1634 = Constraint(expr= m.b1105 + m.b1107 <= 1)
m.e1635 = Constraint(expr= m.b1106 + m.b1107 <= 1)
m.e1636 = Constraint(expr= m.b1108 + m.b1109 <= 1)
m.e1637 = Constraint(expr= m.b1108 + m.b1110 <= 1)
m.e1638 = Constraint(expr= m.b1108 + m.b1109 <= 1)
m.e1639 = Constraint(expr= m.b1109 + m.b1110 <= 1)
m.e1640 = Constraint(expr= m.b1108 + m.b1110 <= 1)
m.e1641 = Constraint(expr= m.b1109 + m.b1110 <= 1)
m.e1642 = Constraint(expr= m.b1111 + m.b1112 <= 1)
m.e1643 = Constraint(expr= m.b1111 + m.b1113 <= 1)
m.e1644 = Constraint(expr= m.b1111 + m.b1112 <= 1)
m.e1645 = Constraint(expr= m.b1112 + m.b1113 <= 1)
m.e1646 = Constraint(expr= m.b1111 + m.b1113 <= 1)
m.e1647 = Constraint(expr= m.b1112 + m.b1113 <= 1)
m.e1648 = Constraint(expr= m.b1114 + m.b1115 <= 1)
m.e1649 = Constraint(expr= m.b1114 + m.b1116 <= 1)
m.e1650 = Constraint(expr= m.b1114 + m.b1115 <= 1)
m.e1651 = Constraint(expr= m.b1115 + m.b1116 <= 1)
m.e1652 = Constraint(expr= m.b1114 + m.b1116 <= 1)
m.e1653 = Constraint(expr= m.b1115 + m.b1116 <= 1)
m.e1654 = Constraint(expr= m.b1117 + m.b1118 <= 1)
m.e1655 = Constraint(expr= m.b1117 + m.b1119 <= 1)
m.e1656 = Constraint(expr= m.b1117 + m.b1118 <= 1)
m.e1657 = Constraint(expr= m.b1118 + m.b1119 <= 1)
m.e1658 = Constraint(expr= m.b1117 + m.b1119 <= 1)
m.e1659 = Constraint(expr= m.b1118 + m.b1119 <= 1)
m.e1660 = Constraint(expr= m.b1120 + m.b1121 <= 1)
m.e1661 = Constraint(expr= m.b1120 + m.b1122 <= 1)
m.e1662 = Constraint(expr= m.b1120 + m.b1121 <= 1)
m.e1663 = Constraint(expr= m.b1121 + m.b1122 <= 1)
m.e1664 = Constraint(expr= m.b1120 + m.b1122 <= 1)
m.e1665 = Constraint(expr= m.b1121 + m.b1122 <= 1)
m.e1666 = Constraint(expr= m.b1123 + m.b1124 <= 1)
m.e1667 = Constraint(expr= m.b1123 + m.b1125 <= 1)
m.e1668 = Constraint(expr= m.b1123 + m.b1124 <= 1)
m.e1669 = Constraint(expr= m.b1124 + m.b1125 <= 1)
m.e1670 = Constraint(expr= m.b1123 + m.b1125 <= 1)
m.e1671 = Constraint(expr= m.b1124 + m.b1125 <= 1)
m.e1672 = Constraint(expr= m.b1126 + m.b1127 <= 1)
m.e1673 = Constraint(expr= m.b1126 + m.b1128 <= 1)
m.e1674 = Constraint(expr= m.b1126 + m.b1127 <= 1)
m.e1675 = Constraint(expr= m.b1127 + m.b1128 <= 1)
m.e1676 = Constraint(expr= m.b1126 + m.b1128 <= 1)
m.e1677 = Constraint(expr= m.b1127 + m.b1128 <= 1)
m.e1678 = Constraint(expr= m.b1129 + m.b1130 <= 1)
m.e1679 = Constraint(expr= m.b1129 + m.b1131 <= 1)
m.e1680 = Constraint(expr= m.b1129 + m.b1130 <= 1)
m.e1681 = Constraint(expr= m.b1130 + m.b1131 <= 1)
m.e1682 = Constraint(expr= m.b1129 + m.b1131 <= 1)
m.e1683 = Constraint(expr= m.b1130 + m.b1131 <= 1)
m.e1684 = Constraint(expr= m.b1132 + m.b1133 <= 1)
m.e1685 = Constraint(expr= m.b1132 + m.b1134 <= 1)
m.e1686 = Constraint(expr= m.b1132 + m.b1133 <= 1)
m.e1687 = Constraint(expr= m.b1133 + m.b1134 <= 1)
m.e1688 = Constraint(expr= m.b1132 + m.b1134 <= 1)
m.e1689 = Constraint(expr= m.b1133 + m.b1134 <= 1)
m.e1690 = Constraint(expr= m.b1135 + m.b1136 <= 1)
m.e1691 = Constraint(expr= m.b1135 + m.b1137 <= 1)
m.e1692 = Constraint(expr= m.b1135 + m.b1136 <= 1)
m.e1693 = Constraint(expr= m.b1136 + m.b1137 <= 1)
m.e1694 = Constraint(expr= m.b1135 + m.b1137 <= 1)
m.e1695 = Constraint(expr= m.b1136 + m.b1137 <= 1)
m.e1696 = Constraint(expr= m.b1138 + m.b1139 <= 1)
m.e1697 = Constraint(expr= m.b1138 + m.b1140 <= 1)
m.e1698 = Constraint(expr= m.b1138 + m.b1139 <= 1)
m.e1699 = Constraint(expr= m.b1139 + m.b1140 <= 1)
m.e1700 = Constraint(expr= m.b1138 + m.b1140 <= 1)
m.e1701 = Constraint(expr= m.b1139 + m.b1140 <= 1)
m.e1702 = Constraint(expr= m.b1141 + m.b1142 <= 1)
m.e1703 = Constraint(expr= m.b1141 + m.b1143 <= 1)
m.e1704 = Constraint(expr= m.b1141 + m.b1142 <= 1)
m.e1705 = Constraint(expr= m.b1142 + m.b1143 <= 1)
m.e1706 = Constraint(expr= m.b1141 + m.b1143 <= 1)
m.e1707 = Constraint(expr= m.b1142 + m.b1143 <= 1)
m.e1708 = Constraint(expr= m.b1144 + m.b1145 <= 1)
m.e1709 = Constraint(expr= m.b1144 + m.b1146 <= 1)
m.e1710 = Constraint(expr= m.b1144 + m.b1145 <= 1)
m.e1711 = Constraint(expr= m.b1145 + m.b1146 <= 1)
m.e1712 = Constraint(expr= m.b1144 + m.b1146 <= 1)
m.e1713 = Constraint(expr= m.b1145 + m.b1146 <= 1)
m.e1714 = Constraint(expr= m.b907 - m.b1027 <= 0)
m.e1715 = Constraint(expr= -m.b907 + m.b908 - m.b1028 <= 0)
m.e1716 = Constraint(expr= -m.b907 - m.b908 + m.b909 - m.b1029 <= 0)
m.e1717 = Constraint(expr= m.b910 - m.b1030 <= 0)
m.e1718 = Constraint(expr= -m.b910 + m.b911 - m.b1031 <= 0)
m.e1719 = Constraint(expr= -m.b910 - m.b911 + m.b912 - m.b1032 <= 0)
m.e1720 = Constraint(expr= m.b913 - m.b1033 <= 0)
m.e1721 = Constraint(expr= -m.b913 + m.b914 - m.b1034 <= 0)
m.e1722 = Constraint(expr= -m.b913 - m.b914 + m.b915 - m.b1035 <= 0)
m.e1723 = Constraint(expr= m.b916 - m.b1036 <= 0)
m.e1724 = Constraint(expr= -m.b916 + m.b917 - m.b1037 <= 0)
m.e1725 = Constraint(expr= -m.b916 - m.b917 + m.b918 - m.b1038 <= 0)
m.e1726 = Constraint(expr= m.b919 - m.b1039 <= 0)
m.e1727 = Constraint(expr= -m.b919 + m.b920 - m.b1040 <= 0)
m.e1728 = Constraint(expr= -m.b919 - m.b920 + m.b921 - m.b1041 <= 0)
m.e1729 = Constraint(expr= m.b922 - m.b1042 <= 0)
m.e1730 = Constraint(expr= -m.b922 + m.b923 - m.b1043 <= 0)
m.e1731 = Constraint(expr= -m.b922 - m.b923 + m.b924 - m.b1044 <= 0)
m.e1732 = Constraint(expr= m.b925 - m.b1045 <= 0)
m.e1733 = Constraint(expr= -m.b925 + m.b926 - m.b1046 <= 0)
m.e1734 = Constraint(expr= -m.b925 - m.b926 + m.b927 - m.b1047 <= 0)
m.e1735 = Constraint(expr= m.b928 - m.b1048 <= 0)
m.e1736 = Constraint(expr= -m.b928 + m.b929 - m.b1049 <= 0)
m.e1737 = Constraint(expr= -m.b928 - m.b929 + m.b930 - m.b1050 <= 0)
m.e1738 = Constraint(expr= m.b931 - m.b1051 <= 0)
m.e1739 = Constraint(expr= -m.b931 + m.b932 - m.b1052 <= 0)
m.e1740 = Constraint(expr= -m.b931 - m.b932 + m.b933 - m.b1053 <= 0)
m.e1741 = Constraint(expr= m.b934 - m.b1054 <= 0)
m.e1742 = Constraint(expr= -m.b934 + m.b935 - m.b1055 <= 0)
m.e1743 = Constraint(expr= -m.b934 - m.b935 + m.b936 - m.b1056 <= 0)
m.e1744 = Constraint(expr= m.b937 - m.b1057 <= 0)
m.e1745 = Constraint(expr= -m.b937 + m.b938 - m.b1058 <= 0)
m.e1746 = Constraint(expr= -m.b937 - m.b938 + m.b939 - m.b1059 <= 0)
m.e1747 = Constraint(expr= m.b940 - m.b1060 <= 0)
m.e1748 = Constraint(expr= -m.b940 + m.b941 - m.b1061 <= 0)
m.e1749 = Constraint(expr= -m.b940 - m.b941 + m.b942 - m.b1062 <= 0)
m.e1750 = Constraint(expr= m.b943 - m.b1063 <= 0)
m.e1751 = Constraint(expr= -m.b943 + m.b944 - m.b1064 <= 0)
m.e1752 = Constraint(expr= -m.b943 - m.b944 + m.b945 - m.b1065 <= 0)
m.e1753 = Constraint(expr= m.b946 - m.b1066 <= 0)
m.e1754 = Constraint(expr= -m.b946 + m.b947 - m.b1067 <= 0)
m.e1755 = Constraint(expr= -m.b946 - m.b947 + m.b948 - m.b1068 <= 0)
m.e1756 = Constraint(expr= m.b949 - m.b1069 <= 0)
m.e1757 = Constraint(expr= -m.b949 + m.b950 - m.b1070 <= 0)
m.e1758 = Constraint(expr= -m.b949 - m.b950 + m.b951 - m.b1071 <= 0)
m.e1759 = Constraint(expr= m.b952 - m.b1072 <= 0)
m.e1760 = Constraint(expr= -m.b952 + m.b953 - m.b1073 <= 0)
m.e1761 = Constraint(expr= -m.b952 - m.b953 + m.b954 - m.b1074 <= 0)
m.e1762 = Constraint(expr= m.b955 - m.b1075 <= 0)
m.e1763 = Constraint(expr= -m.b955 + m.b956 - m.b1076 <= 0)
m.e1764 = Constraint(expr= -m.b955 - m.b956 + m.b957 - m.b1077 <= 0)
m.e1765 = Constraint(expr= m.b958 - m.b1078 <= 0)
m.e1766 = Constraint(expr= -m.b958 + m.b959 - m.b1079 <= 0)
m.e1767 = Constraint(expr= -m.b958 - m.b959 + m.b960 - m.b1080 <= 0)
m.e1768 = Constraint(expr= m.b961 - m.b1081 <= 0)
m.e1769 = Constraint(expr= -m.b961 + m.b962 - m.b1082 <= 0)
m.e1770 = Constraint(expr= -m.b961 - m.b962 + m.b963 - m.b1083 <= 0)
m.e1771 = Constraint(expr= m.b964 - m.b1084 <= 0)
m.e1772 = Constraint(expr= -m.b964 + m.b965 - m.b1085 <= 0)
m.e1773 = Constraint(expr= -m.b964 - m.b965 + m.b966 - m.b1086 <= 0)
m.e1774 = Constraint(expr= m.b967 - m.b1087 <= 0)
m.e1775 = Constraint(expr= -m.b967 + m.b968 - m.b1088 <= 0)
m.e1776 = Constraint(expr= -m.b967 - m.b968 + m.b969 - m.b1089 <= 0)
m.e1777 = Constraint(expr= m.b970 - m.b1090 <= 0)
m.e1778 = Constraint(expr= -m.b970 + m.b971 - m.b1091 <= 0)
m.e1779 = Constraint(expr= -m.b970 - m.b971 + m.b972 - m.b1092 <= 0)
m.e1780 = Constraint(expr= m.b973 - m.b1093 <= 0)
m.e1781 = Constraint(expr= -m.b973 + m.b974 - m.b1094 <= 0)
m.e1782 = Constraint(expr= -m.b973 - m.b974 + m.b975 - m.b1095 <= 0)
m.e1783 = Constraint(expr= m.b976 - m.b1096 <= 0)
m.e1784 = Constraint(expr= -m.b976 + m.b977 - m.b1097 <= 0)
m.e1785 = Constraint(expr= -m.b976 - m.b977 + m.b978 - m.b1098 <= 0)
m.e1786 = Constraint(expr= m.b979 - m.b1099 <= 0)
m.e1787 = Constraint(expr= -m.b979 + m.b980 - m.b1100 <= 0)
m.e1788 = Constraint(expr= -m.b979 - m.b980 + m.b981 - m.b1101 <= 0)
m.e1789 = Constraint(expr= m.b982 - m.b1102 <= 0)
m.e1790 = Constraint(expr= -m.b982 + m.b983 - m.b1103 <= 0)
m.e1791 = Constraint(expr= -m.b982 - m.b983 + m.b984 - m.b1104 <= 0)
m.e1792 = Constraint(expr= m.b985 - m.b1105 <= 0)
m.e1793 = Constraint(expr= -m.b985 + m.b986 - m.b1106 <= 0)
m.e1794 = Constraint(expr= -m.b985 - m.b986 + m.b987 - m.b1107 <= 0)
m.e1795 = Constraint(expr= m.b988 - m.b1108 <= 0)
m.e1796 = Constraint(expr= -m.b988 + m.b989 - m.b1109 <= 0)
m.e1797 = Constraint(expr= -m.b988 - m.b989 + m.b990 - m.b1110 <= 0)
m.e1798 = Constraint(expr= m.b991 - m.b1111 <= 0)
m.e1799 = Constraint(expr= -m.b991 + m.b992 - m.b1112 <= 0)
m.e1800 = Constraint(expr= -m.b991 - m.b992 + m.b993 - m.b1113 <= 0)
m.e1801 = Constraint(expr= m.b994 - m.b1114 <= 0)
m.e1802 = Constraint(expr= -m.b994 + m.b995 - m.b1115 <= 0)
m.e1803 = Constraint(expr= -m.b994 - m.b995 + m.b996 - m.b1116 <= 0)
m.e1804 = Constraint(expr= m.b997 - m.b1117 <= 0)
m.e1805 = Constraint(expr= -m.b997 + m.b998 - m.b1118 <= 0)
m.e1806 = Constraint(expr= -m.b997 - m.b998 + m.b999 - m.b1119 <= 0)
m.e1807 = Constraint(expr= m.b1000 - m.b1120 <= 0)
m.e1808 = Constraint(expr= -m.b1000 + m.b1001 - m.b1121 <= 0)
m.e1809 = Constraint(expr= -m.b1000 - m.b1001 + m.b1002 - m.b1122 <= 0)
m.e1810 = Constraint(expr= m.b1003 - m.b1123 <= 0)
m.e1811 = Constraint(expr= -m.b1003 + m.b1004 - m.b1124 <= 0)
m.e1812 = Constraint(expr= -m.b1003 - m.b1004 + m.b1005 - m.b1125 <= 0)
m.e1813 = Constraint(expr= m.b1006 - m.b1126 <= 0)
m.e1814 = Constraint(expr= -m.b1006 + m.b1007 - m.b1127 <= 0)
m.e1815 = Constraint(expr= -m.b1006 - m.b1007 + m.b1008 - m.b1128 <= 0)
m.e1816 = Constraint(expr= m.b1009 - m.b1129 <= 0)
m.e1817 = Constraint(expr= -m.b1009 + m.b1010 - m.b1130 <= 0)
m.e1818 = Constraint(expr= -m.b1009 - m.b1010 + m.b1011 - m.b1131 <= 0)
m.e1819 = Constraint(expr= m.b1012 - m.b1132 <= 0)
m.e1820 = Constraint(expr= -m.b1012 + m.b1013 - m.b1133 <= 0)
m.e1821 = Constraint(expr= -m.b1012 - m.b1013 + m.b1014 - m.b1134 <= 0)
m.e1822 = Constraint(expr= m.b1015 - m.b1135 <= 0)
m.e1823 = Constraint(expr= -m.b1015 + m.b1016 - m.b1136 <= 0)
m.e1824 = Constraint(expr= -m.b1015 - m.b1016 + m.b1017 - m.b1137 <= 0)
m.e1825 = Constraint(expr= m.b1018 - m.b1138 <= 0)
m.e1826 = Constraint(expr= -m.b1018 + m.b1019 - m.b1139 <= 0)
m.e1827 = Constraint(expr= -m.b1018 - m.b1019 + m.b1020 - m.b1140 <= 0)
m.e1828 = Constraint(expr= m.b1021 - m.b1141 <= 0)
m.e1829 = Constraint(expr= -m.b1021 + m.b1022 - m.b1142 <= 0)
m.e1830 = Constraint(expr= -m.b1021 - m.b1022 + m.b1023 - m.b1143 <= 0)
m.e1831 = Constraint(expr= m.b1024 - m.b1144 <= 0)
m.e1832 = Constraint(expr= -m.b1024 + m.b1025 - m.b1145 <= 0)
m.e1833 = Constraint(expr= -m.b1024 - m.b1025 + m.b1026 - m.b1146 <= 0)
m.e1834 = Constraint(expr= m.b907 + m.b910 == 1)
m.e1835 = Constraint(expr= m.b908 + m.b911 == 1)
m.e1836 = Constraint(expr= m.b909 + m.b912 == 1)
m.e1837 = Constraint(expr= -m.b913 + m.b922 + m.b925 >= 0)
m.e1838 = Constraint(expr= -m.b914 + m.b923 + m.b926 >= 0)
m.e1839 = Constraint(expr= -m.b915 + m.b924 + m.b927 >= 0)
m.e1840 = Constraint(expr= -m.b922 + m.b940 >= 0)
m.e1841 = Constraint(expr= -m.b923 + m.b941 >= 0)
m.e1842 = Constraint(expr= -m.b924 + m.b942 >= 0)
m.e1843 = Constraint(expr= -m.b925 + m.b943 >= 0)
m.e1844 = Constraint(expr= -m.b926 + m.b944 >= 0)
m.e1845 = Constraint(expr= -m.b927 + m.b945 >= 0)
m.e1846 = Constraint(expr= -m.b916 + m.b928 >= 0)
m.e1847 = Constraint(expr= -m.b917 + m.b929 >= 0)
m.e1848 = Constraint(expr= -m.b918 + m.b930 >= 0)
m.e1849 = Constraint(expr= -m.b928 + m.b946 + m.b949 >= 0)
m.e1850 = Constraint(expr= -m.b929 + m.b947 + m.b950 >= 0)
m.e1851 = Constraint(expr= -m.b930 + m.b948 + m.b951 >= 0)
m.e1852 = Constraint(expr= -m.b919 + m.b931 + m.b934 + m.b937 >= 0)
m.e1853 = Constraint(expr= -m.b920 + m.b932 + m.b935 + m.b938 >= 0)
m.e1854 = Constraint(expr= -m.b921 + m.b933 + m.b936 + m.b939 >= 0)
m.e1855 = Constraint(expr= -m.b931 + m.b949 >= 0)
m.e1856 = Constraint(expr= -m.b932 + m.b950 >= 0)
m.e1857 = Constraint(expr= -m.b933 + m.b951 >= 0)
m.e1858 = Constraint(expr= -m.b934 + m.b952 + m.b955 >= 0)
m.e1859 = Constraint(expr= -m.b935 + m.b953 + m.b956 >= 0)
m.e1860 = Constraint(expr= -m.b936 + m.b954 + m.b957 >= 0)
m.e1861 = Constraint(expr= -m.b937 + m.b958 + m.b961 + m.b964 >= 0)
m.e1862 = Constraint(expr= -m.b938 + m.b959 + m.b962 + m.b965 >= 0)
m.e1863 = Constraint(expr= -m.b939 + m.b960 + m.b963 + m.b966 >= 0)
m.e1864 = Constraint(expr= m.b913 - m.b922 >= 0)
m.e1865 = Constraint(expr= m.b914 - m.b923 >= 0)
m.e1866 = Constraint(expr= m.b915 - m.b924 >= 0)
m.e1867 = Constraint(expr= m.b913 - m.b925 >= 0)
m.e1868 = Constraint(expr= m.b914 - m.b926 >= 0)
m.e1869 = Constraint(expr= m.b915 - m.b927 >= 0)
m.e1870 = Constraint(expr= m.b916 - m.b928 >= 0)
m.e1871 = Constraint(expr= m.b917 - m.b929 >= 0)
m.e1872 = Constraint(expr= m.b918 - m.b930 >= 0)
m.e1873 = Constraint(expr= m.b919 - m.b931 >= 0)
m.e1874 = Constraint(expr= m.b920 - m.b932 >= 0)
m.e1875 = Constraint(expr= m.b921 - m.b933 >= 0)
m.e1876 = Constraint(expr= m.b919 - m.b934 >= 0)
m.e1877 = Constraint(expr= m.b920 - m.b935 >= 0)
m.e1878 = Constraint(expr= m.b921 - m.b936 >= 0)
m.e1879 = Constraint(expr= m.b919 - m.b937 >= 0)
m.e1880 = Constraint(expr= m.b920 - m.b938 >= 0)
m.e1881 = Constraint(expr= m.b921 - m.b939 >= 0)
m.e1882 = Constraint(expr= m.b922 - m.b940 >= 0)
m.e1883 = Constraint(expr= m.b923 - m.b941 >= 0)
m.e1884 = Constraint(expr= m.b924 - m.b942 >= 0)
m.e1885 = Constraint(expr= m.b925 - m.b943 >= 0)
m.e1886 = Constraint(expr= m.b926 - m.b944 >= 0)
m.e1887 = Constraint(expr= m.b927 - m.b945 >= 0)
m.e1888 = Constraint(expr= m.b928 - m.b946 >= 0)
m.e1889 = Constraint(expr= m.b929 - m.b947 >= 0)
m.e1890 = Constraint(expr= m.b930 - m.b948 >= 0)
m.e1891 = Constraint(expr= m.b928 - m.b949 >= 0)
m.e1892 = Constraint(expr= m.b929 - m.b950 >= 0)
m.e1893 = Constraint(expr= m.b930 - m.b951 >= 0)
m.e1894 = Constraint(expr= m.b934 - m.b952 >= 0)
m.e1895 = Constraint(expr= m.b935 - m.b953 >= 0)
m.e1896 = Constraint(expr= m.b936 - m.b954 >= 0)
m.e1897 = Constraint(expr= m.b934 - m.b955 >= 0)
m.e1898 = Constraint(expr= m.b935 - m.b956 >= 0)
m.e1899 = Constraint(expr= m.b936 - m.b957 >= 0)
m.e1900 = Constraint(expr= m.b937 - m.b958 >= 0)
m.e1901 = Constraint(expr= m.b938 - m.b959 >= 0)
m.e1902 = Constraint(expr= m.b939 - m.b960 >= 0)
m.e1903 = Constraint(expr= m.b937 - m.b961 >= 0)
m.e1904 = Constraint(expr= m.b938 - m.b962 >= 0)
m.e1905 = Constraint(expr= m.b939 - m.b963 >= 0)
m.e1906 = Constraint(expr= m.b937 - m.b964 >= 0)
m.e1907 = Constraint(expr= m.b938 - m.b965 >= 0)
m.e1908 = Constraint(expr= m.b939 - m.b966 >= 0)
m.e1909 = Constraint(expr= -m.b964 + m.b967 + m.b970 >= 0)
m.e1910 = Constraint(expr= -m.b965 + m.b968 + m.b971 >= 0)
m.e1911 = Constraint(expr= -m.b966 + m.b969 + m.b972 >= 0)
m.e1912 = Constraint(expr= -m.b973 + m.b982 + m.b985 >= 0)
m.e1913 = Constraint(expr= -m.b974 + m.b983 + m.b986 >= 0)
m.e1914 = Constraint(expr= -m.b975 + m.b984 + m.b987 >= 0)
m.e1915 = Constraint(expr= -m.b982 + m.b1000 >= 0)
m.e1916 = Constraint(expr= -m.b983 + m.b1001 >= 0)
m.e1917 = Constraint(expr= -m.b984 + m.b1002 >= 0)
m.e1918 = Constraint(expr= -m.b985 + m.b1003 >= 0)
m.e1919 = Constraint(expr= -m.b986 + m.b1004 >= 0)
m.e1920 = Constraint(expr= -m.b987 + m.b1005 >= 0)
m.e1921 = Constraint(expr= -m.b976 + m.b988 >= 0)
m.e1922 = Constraint(expr= -m.b977 + m.b989 >= 0)
m.e1923 = Constraint(expr= -m.b978 + m.b990 >= 0)
m.e1924 = Constraint(expr= -m.b988 + m.b1006 + m.b1009 >= 0)
m.e1925 = Constraint(expr= -m.b989 + m.b1007 + m.b1010 >= 0)
m.e1926 = Constraint(expr= -m.b990 + m.b1008 + m.b1011 >= 0)
m.e1927 = Constraint(expr= -m.b979 + m.b991 + m.b994 + m.b997 >= 0)
m.e1928 = Constraint(expr= -m.b980 + m.b992 + m.b995 + m.b998 >= 0)
m.e1929 = Constraint(expr= -m.b981 + m.b993 + m.b996 + m.b999 >= 0)
m.e1930 = Constraint(expr= -m.b991 + m.b1009 >= 0)
m.e1931 = Constraint(expr= -m.b992 + m.b1010 >= 0)
m.e1932 = Constraint(expr= -m.b993 + m.b1011 >= 0)
m.e1933 = Constraint(expr= -m.b994 + m.b1012 + m.b1015 >= 0)
m.e1934 = Constraint(expr= -m.b995 + m.b1013 + m.b1016 >= 0)
m.e1935 = Constraint(expr= -m.b996 + m.b1014 + m.b1017 >= 0)
m.e1936 = Constraint(expr= -m.b997 + m.b1018 + m.b1021 + m.b1024 >= 0)
m.e1937 = Constraint(expr= -m.b998 + m.b1019 + m.b1022 + m.b1025 >= 0)
m.e1938 = Constraint(expr= -m.b999 + m.b1020 + m.b1023 + m.b1026 >= 0)
m.e1939 = Constraint(expr= m.b973 - m.b982 >= 0)
m.e1940 = Constraint(expr= m.b974 - m.b983 >= 0)
m.e1941 = Constraint(expr= m.b975 - m.b984 >= 0)
m.e1942 = Constraint(expr= m.b973 - m.b985 >= 0)
m.e1943 = Constraint(expr= m.b974 - m.b986 >= 0)
m.e1944 = Constraint(expr= m.b975 - m.b987 >= 0)
m.e1945 = Constraint(expr= m.b982 - m.b1000 >= 0)
m.e1946 = Constraint(expr= m.b983 - m.b1001 >= 0)
m.e1947 = Constraint(expr= m.b984 - m.b1002 >= 0)
m.e1948 = Constraint(expr= m.b985 - m.b1003 >= 0)
m.e1949 = Constraint(expr= m.b986 - m.b1004 >= 0)
m.e1950 = Constraint(expr= m.b987 - m.b1005 >= 0)
m.e1951 = Constraint(expr= m.b976 - m.b988 >= 0)
m.e1952 = Constraint(expr= m.b977 - m.b989 >= 0)
m.e1953 = Constraint(expr= m.b978 - m.b990 >= 0)
m.e1954 = Constraint(expr= m.b988 - m.b1006 >= 0)
m.e1955 = Constraint(expr= m.b989 - m.b1007 >= 0)
m.e1956 = Constraint(expr= m.b990 - m.b1008 >= 0)
m.e1957 = Constraint(expr= m.b988 - m.b1009 >= 0)
m.e1958 = Constraint(expr= m.b989 - m.b1010 >= 0)
m.e1959 = Constraint(expr= m.b990 - m.b1011 >= 0)
m.e1960 = Constraint(expr= m.b979 - m.b991 >= 0)
m.e1961 = Constraint(expr= m.b980 - m.b992 >= 0)
m.e1962 = Constraint(expr= m.b981 - m.b993 >= 0)
m.e1963 = Constraint(expr= m.b979 - m.b994 >= 0)
m.e1964 = Constraint(expr= m.b980 - m.b995 >= 0)
m.e1965 = Constraint(expr= m.b981 - m.b996 >= 0)
m.e1966 = Constraint(expr= m.b979 - m.b997 >= 0)
m.e1967 = Constraint(expr= m.b980 - m.b998 >= 0)
m.e1968 = Constraint(expr= m.b981 - m.b999 >= 0)
m.e1969 = Constraint(expr= m.b994 - m.b1012 >= 0)
m.e1970 = Constraint(expr= m.b995 - m.b1013 >= 0)
m.e1971 = Constraint(expr= m.b996 - m.b1014 >= 0)
m.e1972 = Constraint(expr= m.b994 - m.b1015 >= 0)
m.e1973 = Constraint(expr= m.b995 - m.b1016 >= 0)
m.e1974 = Constraint(expr= m.b996 - m.b1017 >= 0)
m.e1975 = Constraint(expr= m.b997 - m.b1018 >= 0)
m.e1976 = Constraint(expr= m.b998 - m.b1019 >= 0)
m.e1977 = Constraint(expr= m.b999 - m.b1020 >= 0)
m.e1978 = Constraint(expr= m.b997 - m.b1021 >= 0)
m.e1979 = Constraint(expr= m.b998 - m.b1022 >= 0)
m.e1980 = Constraint(expr= m.b999 - m.b1023 >= 0)
m.e1981 = Constraint(expr= m.b997 - m.b1024 >= 0)
m.e1982 = Constraint(expr= m.b998 - m.b1025 >= 0)
m.e1983 = Constraint(expr= m.b999 - m.b1026 >= 0)
m.e1984 = Constraint(expr= m.b907 + m.b910 - m.b913 >= 0)
m.e1985 = Constraint(expr= m.b908 + m.b911 - m.b914 >= 0)
m.e1986 = Constraint(expr= m.b909 + m.b912 - m.b915 >= 0)
m.e1987 = Constraint(expr= m.b907 + m.b910 - m.b916 >= 0)
m.e1988 = Constraint(expr= m.b908 + m.b911 - m.b917 >= 0)
m.e1989 = Constraint(expr= m.b909 + m.b912 - m.b918 >= 0)
m.e1990 = Constraint(expr= m.b907 + m.b910 - m.b919 >= 0)
m.e1991 = Constraint(expr= m.b908 + m.b911 - m.b920 >= 0)
m.e1992 = Constraint(expr= m.b909 + m.b912 - m.b921 >= 0)
m.e1993 = Constraint(expr= m.b964 - m.b967 >= 0)
m.e1994 = Constraint(expr= m.b965 - m.b968 >= 0)
m.e1995 = Constraint(expr= m.b966 - m.b969 >= 0)
m.e1996 = Constraint(expr= m.b964 - m.b970 >= 0)
m.e1997 = Constraint(expr= m.b965 - m.b971 >= 0)
m.e1998 = Constraint(expr= m.b966 - m.b972 >= 0)
| StarcoderdataPython |
6628102 | # scan_set.py
# Scan Gatherer to find out which cards are in a set, and create a list of
# multiverse ids for those cards.
# Lists of ids are stored in ids/FOO.txt, where FOO is the short code for an
# expansion set (see sets.py).
import os
import re
import sys
import urllib
#
import sets
import tools
SEARCH_URL = "http://gatherer.wizards.com/Pages/Search/Default.aspx?"\
"page=%(page)s&set=%%5B%%22%(escaped_set_name)s%%22%%5D"\
"&special=true"
#URL_RE = re.compile(r"../Card/Details\.aspx\?multiverseid=(\d+)")
URL_RE = re.compile(r"../../Handlers/Image\.ashx\?multiverseid=(\d+)")
# only pick up cards that have their image shown
ALT_VERSION_RE = \
r"../Card/Details\.aspx\?multiverseid=(\d+)\"><img[^>]+set={0}&.*?>"
class ScannerError(Exception): pass
def scan_set(short_set):
""" Fetch and scan search result pages for the given set until we don't
find any more new cards. Return a list of card ids. """
try:
full_set_name = sets.set_info[short_set].name
except KeyError:
raise ScannerError("Unknown set code: %s" % short_set)
escaped_set_name = urllib.quote(full_set_name)
ids = []
page = 0
print "Scanning cards for set: %s (%s)" % (short_set, full_set_name)
while True:
print "Fetching search results, page", page, "..."
html = grab_page(page, escaped_set_name)
new_ids = scan_page(html, short_set)
old_length = len(ids)
for new_id in new_ids:
if new_id not in ids:
ids.append(new_id)
if old_length == len(ids):
break # no new cards found, we're done
else:
page += 1
if len(ids) != sets.set_info[short_set].cards:
print "WARNING: Expected %d cards, got %d instead" % (
sets.set_info[short_set].cards, len(ids))
print "Done;", len(ids), "found"
return ids
def grab_page(page, escaped_set_name):
url = SEARCH_URL % locals()
return tools.grab_url(url)
def scan_page(html, short_set):
""" Scan the given HTML for URLs to cards, collect their ids, and return
these ids. """
ids = []
for match in URL_RE.finditer(html):
id = match.group(1)
ids.append(id)
# try to find alternate versions (basic lands etc)
set_re = re.compile(ALT_VERSION_RE.replace('{0}', short_set))
for match in set_re.finditer(html):
id = match.group(1)
ids.append(id)
# to make things difficult, some sets use aliases here, e.g. 'OD' instead
# of 'ODY', etc
alias = sets.set_info[short_set].alias # may be None
if alias:
set_re = re.compile(ALT_VERSION_RE.replace('{0}', alias))
for match in set_re.finditer(html):
id = match.group(1)
ids.append(id)
return ids
def write_ids(short_set, ids):
filename = os.path.join('ids', "%s.txt" % short_set)
if not os.path.exists('ids'): os.mkdir('ids')
f = open(filename, 'wb')
for id in ids:
print >> f, id
f.close()
if __name__ == "__main__":
for short_set in sys.argv[1:]:
ids = scan_set(short_set)
write_ids(short_set, ids)
| StarcoderdataPython |
4855431 | <gh_stars>10-100
from datasets import load_dataset
import os
import fire
if not os.path.exists('data'):
os.makedirs('data')
NUM_EPOCHS = 1
CKPT_DIR = 'ckpts'
if not os.path.exists(CKPT_DIR):
os.makedirs(CKPT_DIR)
def main(train_file=None, val_file=None, dataset_name=None, dataset_config_name=None, key='text', val_name='validation', key2=None, conditional=False, do_val=False, cache_dir='cache/huggingface/transformers', fname=None, version='gpt2'):
if train_file is None:
if dataset_config_name is not None:
fname = dataset_name + '_' + dataset_config_name
else:
fname = dataset_name
if do_val:
train, val = load_dataset(dataset_name, dataset_config_name, split=['train', val_name], cache_dir=cache_dir)
else:
train = load_dataset(dataset_name, dataset_config_name, split='train', cache_dir=cache_dir)
print(f"Processing dataset {fname}...")
train_str = ""
for ex in train:
if conditional:
line = f"{ex['label']} "
else:
line = ""
line += f"{ex[key]}"
if key2 is not None:
line += f" {ex[key2]}"
train_str += f"{line} <|endoftext|>\n"
if do_val:
val_str = ""
for ex in val:
if conditional:
line = f"{ex['label']} "
else:
line = ""
line += f"{ex[key]}"
if key2 is not None:
line += f" {ex[key2]}"
val_str += f"{line} <|endoftext|>\n"
if conditional:
fname_train = f'data/{fname}_conditional_train.txt'
fname_val = f'data/{fname}_conditional_val.txt'
else:
fname_train = f'data/{fname}_train.txt'
fname_val = f'data/{fname}_val.txt'
with open (fname_train, 'w') as f:
f.write(train_str)
if do_val:
with open (fname_val, 'w') as f:
f.write(val_str)
else:
fname_train = train_file
fname_val = val_file
print(f"Running fine-tuning from {fname_train}...")
if conditional == False:
output_dir = f'--output_dir {CKPT_DIR}/{version}-{fname} '
else:
output_dir = f'--output_dir {CKPT_DIR}/{version}-{fname}-conditional '
if do_val:
cmd = 'python run_language_modeling.py ' + \
f'--train_data_file {fname_train} ' + \
f'--eval_data_file {fname_val} ' + \
output_dir + \
f'--model_type {version} ' + \
f'--model_name_or_path {version} ' + \
'--save_total_limit 1 ' + \
f'--num_train_epochs {NUM_EPOCHS} ' + \
'--do_train \
--evaluate_during_training \
--logging_steps 500 \
--save_steps 500 \
--do_eval \
--per_gpu_train_batch_size 8 \
--per_gpu_eval_batch_size 8 \
--line_by_line \
--gradient_accumulation_steps 1'
else:
cmd = 'python run_language_modeling.py ' + \
f'--train_data_file {fname_train} ' + \
output_dir + \
f'--model_type {version} ' + \
f'--model_name_or_path {version} ' + \
'--save_total_limit 1 ' + \
f'--num_train_epochs {NUM_EPOCHS} ' + \
'--do_train \
--per_gpu_train_batch_size 8 \
--per_gpu_eval_batch_size 8 \
--line_by_line \
--gradient_accumulation_steps 1'
if cache_dir is not None:
cmd += f' --cache_dir {cache_dir}'
cmd += ' --overwrite_output_dir'
os.system(cmd)
if __name__ == '__main__':
fire.Fire(main)
print("\n\n--------DONE--------")
| StarcoderdataPython |
4825929 | <filename>cysecuretools/execute/provision_device_mxs40v1.py<gh_stars>1-10
"""
Copyright (c) 2018-2020 Cypress Semiconductor Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import json
import logging
from time import sleep
import cysecuretools.execute.provisioning_packet_mxs40v1 as prov_packet
from cysecuretools.core.target_director import Target
from cysecuretools.core.enums import (ProtectionState, EntranceExamStatus,
ProvisioningStatus, KeyId)
from cysecuretools.execute.entrance_exam.exam_mxs40v1 import \
EntranceExamMXS40v1
from cysecuretools.execute.provisioning_lib.cyprov_pem import PemKey
from cysecuretools.execute.sys_call \
import (provision_keys_and_policies, read_lifecycle, dap_control,
get_prov_details, FB_POLICY_JWT)
from cysecuretools.execute.programmer.base import AP
from cysecuretools.execute.programmer.pyocd_wrapper import ResetType
from cysecuretools.core.strategy_context.provisioning_strategy_ctx import \
ProvisioningStrategy
from cysecuretools.core.strategy_context.encrypted_programming_strategy_ctx \
import EncryptedProgrammingContext
from cysecuretools.execute.encrypted_programming.aes_header_strategy import \
AesHeaderStrategy
from cysecuretools.data.mxs40v1.mxs40v1_sfb_status_codes import \
sfb_status_codes
from cysecuretools.targets.common.policy_parser import PolicyParser
from cysecuretools.execute.provisioning_lib.cyprov_crypto import Crypto
logger = logging.getLogger(__name__)
class ProvisioningMXS40V1(ProvisioningStrategy):
def provision(self, tool,
target: Target, entrance_exam: EntranceExamMXS40v1,
bootloader, **kwargs) -> ProvisioningStatus:
"""
Programs Cypress Bootloader and calls system calls for device
provisioning.
:param tool: Programming/debugging tool used for communication
:param target: The target object.
:param entrance_exam: The object used to execute entrance exam
before provisioning.
:param bootloader: Path to Cypress Bootloader program file.
:param kwargs: Dictionary with the following fields:
- ap: Access port to use
- probe_id: Probe ID to use
:return: Provisioning status.
"""
if 'ap' in kwargs:
ap = kwargs['ap']
else:
ap = 'cm0'
if 'skip_prompts' in kwargs:
skip_prompts = kwargs['skip_prompts']
else:
skip_prompts = None
if 'probe_id' in kwargs:
probe_id = kwargs['probe_id']
else:
probe_id = None
prov_packets = self._get_provisioning_packet(target)
status = _provision_identity(tool, target, entrance_exam,
prov_packets['prov_identity'],
skip_prompts)
if status == ProvisioningStatus.OK:
status = _provision_complete(tool, target,
prov_packets['prov_cmd'], bootloader,
False, ap=ap, probe_id=probe_id)
if status == ProvisioningStatus.OK:
logger.info('*****************************************')
logger.info(' PROVISIONING PASSED ')
logger.info('*****************************************\n')
return status
def re_provision(self, tool, target: Target, bootloader, **kwargs) \
-> ProvisioningStatus:
"""
Programs Cypress Bootloader and calls system calls for device
provisioning.
:param tool: Programming/debugging tool used for communication
:param target: The target object.
:param bootloader: Path to Cypress Bootloader program file.
:param kwargs: Dictionary with the following fields:
- erase_boot: Indicates whether to erase BOOT slot
- control_dap_cert: Certificate for AP control
- ap: Access port to use
- probe_id: Probe ID to use
:return: Provisioning status.
"""
# Process keyword arguments
if 'erase_boot' in kwargs:
erase_boot = kwargs['erase_boot']
else:
erase_boot = False
if 'control_dap_cert' in kwargs:
control_dap_cert = kwargs['control_dap_cert']
else:
control_dap_cert = None
if 'ap' in kwargs:
ap = kwargs['ap']
else:
ap = 'cm0'
if 'probe_id' in kwargs:
probe_id = kwargs['probe_id']
else:
probe_id = None
prov_packets = self._get_re_provisioning_packet(target)
tool.reset_and_halt(ResetType.HW)
status = _provision_complete(
tool, target, prov_packets['prov_cmd'], bootloader, True,
erase_boot, control_dap_cert, ap, probe_id)
if status == ProvisioningStatus.OK:
logger.info('*****************************************')
logger.info(' RE-PROVISIONING PASSED ')
logger.info('*****************************************\n')
return status
def erase_flash(self, tool, target):
"""
Erases allowed (w/o bootloader, data only) flash area
:param tool: Programming/debugging tool used for communication
:param target: The target object
"""
erase_flash(tool, target)
@staticmethod
def _get_provisioning_packet(target):
packet_dir = target.policy_parser.get_provisioning_packet_dir()
prov_identity = os.path.join(packet_dir, prov_packet.PROV_IDENTITY_JWT)
prov_cmd = os.path.join(packet_dir, prov_packet.PROV_CMD_JWT)
if not os.path.isfile(prov_identity):
logger.error(f'Cannot find provisioning packet {prov_identity}')
return False
if not os.path.isfile(prov_cmd):
logger.error(f'Cannot find provisioning packet {prov_cmd}')
return False
return {'prov_identity': prov_identity, 'prov_cmd': prov_cmd}
@staticmethod
def _get_re_provisioning_packet(target):
packet_dir = target.policy_parser.get_provisioning_packet_dir()
prov_cmd = os.path.join(packet_dir, prov_packet.PROV_CMD_JWT)
if not os.path.isfile(prov_cmd):
logger.error(f'Cannot find provisioning packet {prov_cmd}')
return False
return {'prov_cmd': prov_cmd}
def read_silicon_data(tool, target: Target):
"""
Reads silicon data from device
:param tool: Programming/debugging tool used for communication
:param target: The target object.
:return: Device response
"""
logger.debug('Read silicon data')
tool.reset(ResetType.HW)
passed, response = provision_keys_and_policies(tool, None,
target.register_map)
return response
def erase_flash(tool, target):
logger.info('Erase main flash:')
addr = target.memory_map.FLASH_ADDRESS
size = target.memory_map.FLASH_SIZE
logger.info(f'erasing address {hex(addr)}, size {hex(size)} ...')
ap = tool.get_ap()
tool.set_ap(AP.CMx)
tool.halt()
tool.erase(addr, size)
logger.info('Erasing complete')
tool.set_ap(ap)
erase_smif(tool, target)
def erase_smif(tool, target):
smif_resources = target.policy_parser.get_smif_resources()
if len(smif_resources) > 0:
logger.info('Erase main smif slots:')
ap = tool.get_ap()
tool.set_ap(AP.CMx)
for (addr, size) in smif_resources:
logger.info(f'erasing address {hex(addr)}, size {hex(size)} ...')
tool.erase(addr, size)
logger.info('Erasing complete')
tool.set_ap(ap)
def erase_status_partition(tool, target):
memory_area = target.policy_parser.status_partition()
if memory_area is not None:
logger.info('Erase SWAP status partition memory region:')
ap = tool.get_ap()
tool.set_ap(AP.CMx)
logger.info(f'erasing address {hex(memory_area.address)}, '
f'size {hex(memory_area.size)} ...')
tool.erase(memory_area.address, memory_area.size)
logger.info('Erasing complete')
tool.set_ap(ap)
def erase_scratch_area(tool, target):
memory_area = target.policy_parser.scratch_area()
if memory_area is not None:
logger.info('Erase SCRATCH memory region:')
ap = tool.get_ap()
tool.set_ap(AP.CMx)
logger.info(f'erasing address {hex(memory_area.address)}, '
f'size {hex(memory_area.size)} ...')
tool.erase(memory_area.address, memory_area.size)
logger.info('Erasing complete')
tool.set_ap(ap)
def erase_slots(tool, target, slot_type, first_only=False):
"""
Erases slot(s) of specific type.
:param tool: Programming/debugging tool
:param target: The target object
:param slot_type: Slot type - BOOT, UPGRADE
:param first_only: For performance, erase first image only, it is
enough to prevent application from starting
"""
data = target.policy_parser.get_image_data(slot_type)
logger.info(f'Erase {slot_type} slot:')
for addr, size in data:
logger.info(f'erasing address {hex(addr)}, size {hex(size)} ...')
ap = tool.get_ap()
tool.set_ap(AP.CMx)
tool.halt()
tool.erase(addr, size)
logger.info('Erasing complete')
tool.set_ap(ap)
if first_only:
break
def _provision_identity(tool, target: Target,
entrance_exam: EntranceExamMXS40v1,
prov_identity_jwt, skip_prompts) -> ProvisioningStatus:
lifecycle = read_lifecycle(tool, target.register_map)
if lifecycle == ProtectionState.secure:
status = entrance_exam.execute(tool)
if status == EntranceExamStatus.FLASH_NOT_EMPTY:
if skip_prompts:
logger.error('Cannot start provisioning. '
'User firmware running on chip detected')
return ProvisioningStatus.FAIL
else:
answer = input('Erase user firmware running on chip? (y/n): ')
if answer.lower() == 'y':
erase_flash(tool, target)
else:
return ProvisioningStatus.TERMINATED
elif status != EntranceExamStatus.OK:
return ProvisioningStatus.FAIL
else:
erase_flash(tool, target)
tool.reset_and_halt()
sleep(0.2)
is_exam_pass, response = provision_keys_and_policies(
tool, prov_identity_jwt, target.register_map)
_save_device_response(target, response)
if not is_exam_pass:
logger.error('Unexpected ProvisionKeysAndPolicies syscall response')
return ProvisioningStatus.FAIL
else:
return ProvisioningStatus.OK
def _provision_complete(tool, target: Target, prov_cmd_jwt, bootloader,
re_provision, erase_boot=False,
control_dap_cert=None, ap='cm0', probe_id=None) \
-> ProvisioningStatus:
flash_ops_allowed = True
if re_provision:
# Check whether cm0 is open
cm0_open = read_cm0_permissions(tool, target.register_map)
if cm0_open:
tool.disconnect()
tool.connect(target.name, probe_id=probe_id, ap='cm0')
tool.reset_and_halt(ResetType.HW)
flash_ops_allowed = cm0_open or ap == 'cm4'
reg_map = target.register_map
if flash_ops_allowed:
erase_status_partition(tool, target)
erase_scratch_area(tool, target)
# Read firmware status
logger.info('Read FlashBoot firmware status:')
sfb_fw_status = tool.read32(reg_map.ENTRANCE_EXAM_FW_STATUS_REG)
if re_provision:
expected = reg_map.ENTRANCE_EXAM_FW_STATUS_RE_VAL
else:
expected = reg_map.ENTRANCE_EXAM_FW_STATUS_VAL
received = sfb_fw_status & reg_map.ENTRANCE_EXAM_FW_STATUS_MASK
logger.info(f'FlashBoot firmware status = {hex(sfb_fw_status)}')
logger.info(f'Received FB_FW_STATUS = {hex(received)}')
logger.info(f'Expected FB_FW_STATUS = {hex(expected)}')
if expected != received:
try:
status = sfb_status_codes[received]
logger.info(f'SFB status: {status["status"]}: {status["desc"]}')
except KeyError:
logger.debug(f'Unexpected SFB status {hex(received)}')
# Open cm0 AP
if control_dap_cert:
logger.info('Opening cm0 AP')
cm_open = dap_control(tool, reg_map, 0, 1, False, control_dap_cert)
logger.info(f'cm0 AP {"open" if cm_open else "closed"}')
if cm_open:
logger.info('Use cm0 AP')
tool.disconnect()
tool.connect(target.name, probe_id=probe_id, ap='cm0')
tool.set_skip_reset_and_halt(True)
flash_ops_allowed = cm_open
if erase_boot:
if flash_ops_allowed:
erase_slots(tool, target, 'BOOT')
else:
logger.warning('Skip erasing BOOT slot, AP cm0 is closed')
else:
logger.info('BOOT slot will remain the same and can affect '
'rollback counter')
smif_enabled = len(target.policy_parser.get_smif_resources()) > 0
if smif_enabled:
if flash_ops_allowed:
erase_smif(tool, target)
else:
logger.warning('Skip erasing external memory, AP cm0 is closed')
context = EncryptedProgrammingContext(AesHeaderStrategy)
# Program user application
for encrypted, app in target.policy_parser.get_user_apps():
if not os.path.isabs(app):
app = os.path.join(target.policy_parser.policy_dir, app)
if encrypted:
logger.info(f'Programming encrypted user application \'{app}\':')
result = context.program(tool, target, app)
if not result:
logger.error('User application encrypted programming failed')
return ProvisioningStatus.FAIL
else:
if flash_ops_allowed:
current_ap = tool.get_ap()
tool.set_ap(AP.CMx)
logger.info(f'Programming user application \'{app}\':')
tool.halt()
tool.program(app)
tool.set_ap(current_ap)
else:
logger.warning('Skip programming user application, '
'AP cm0 is closed')
# Program bootloader
is_custom_bootloader = target.policy_parser.is_custom_bootloader()
is_encrypted_bootloader = target.policy_parser.is_encrypted_bootloader()
if is_custom_bootloader and is_encrypted_bootloader:
cy_bootloader_hex = target.policy_parser.get_cybootloader_hex()
logger.info(f'Programming encrypted bootloader '
f'\'{cy_bootloader_hex}\':')
result = context.program(tool, target, cy_bootloader_hex)
if not result:
logger.error('Bootloader encrypted programming failed')
return ProvisioningStatus.FAIL
else:
if not flash_ops_allowed:
logger.warning('Skip programming bootloader, AP cm0 is closed')
elif bootloader is None:
logger.warning('Skip programming bootloader')
else:
sleep(3)
current_ap = tool.get_ap()
tool.set_ap(AP.CMx)
logger.info(f'Programming bootloader \'{bootloader}\':')
tool.halt()
tool.program(bootloader)
logger.info('Programming bootloader complete')
tool.set_ap(current_ap)
if control_dap_cert:
tool.set_skip_reset_and_halt(False)
if flash_ops_allowed and re_provision:
tool.disconnect()
tool.connect(target.name, probe_id=probe_id, ap=ap)
tool.reset(ResetType.HW)
sleep(3)
if not re_provision:
_save_device_public_key(tool, target)
# Run provisioning syscall
logger.info('Run provisioning syscall:')
is_exam_pass, response = provision_keys_and_policies(tool, prov_cmd_jwt,
target.register_map)
if not is_exam_pass:
return ProvisioningStatus.FAIL
_save_device_response(target, response)
tool.reset()
if not target.policy_parser.is_sys_ap_enabled():
if not target.policy_parser.is_cmx_ap_enabled(re_provision):
logger.info('All APs closed by policy. Final verification is '
'unavailable.')
return ProvisioningStatus.OK
else:
tool.set_ap(AP.CMx)
logger.debug(f'Access through {tool.get_ap()}')
sleep(3)
sfb_fw_status = tool.read32(reg_map.ENTRANCE_EXAM_FW_STATUS_REG)
logger.info(f'FlashBoot firmware status = {hex(sfb_fw_status)}')
is_exam_pass = sfb_fw_status == reg_map.FB_FW_STATUS_FIRMWARE_RUNNING_CM0
if not is_exam_pass:
logger.error('FlashBoot firmware status is not as expected')
return ProvisioningStatus.OK if is_exam_pass else ProvisioningStatus.FAIL
def read_cm0_permissions(tool, reg_map):
logger.info('Checking cm0 AP permissions')
passed, data = get_prov_details(tool, reg_map, FB_POLICY_JWT)
if passed and len(data) > 0:
policy = Crypto.readable_jwt(data)
silicon_policy_parser = PolicyParser(policy['payload'])
cm0_open = silicon_policy_parser.is_cmx_ap_enabled(True)
logger.info(f'cm0 AP {"open" if cm0_open else "closed"}')
else:
logger.error('Failed to read policy from device while getting AP '
'permission')
logger.warning('Flash operations will be skipped')
cm0_open = False
return cm0_open
def _save_device_public_key(tool, target):
try:
jwk_path, pem_path = target.policy_parser.device_public_key_path()
key = target.key_reader.read_public_key(tool, KeyId.DEVICE, 'jwk')
if key:
with open(jwk_path, "w") as f:
f.write(json.dumps(key, indent=4))
pem = PemKey(jwk_path)
pem.save(pem_path, private_key=False)
except Exception as e:
logger.error('Failed to save device public key')
logger.error(e)
def _save_device_response(target, response):
try:
packet_dir = target.policy_parser.get_provisioning_packet_dir()
filename = os.path.join(packet_dir, prov_packet.DEVICE_RESPONSE_JWT)
if response:
with open(filename, 'w') as f:
f.write(response)
logger.info(f'Saved device response to \'{filename}\'')
except Exception as e:
logger.error('Failed to save device response')
logger.error(e)
| StarcoderdataPython |
159111 | <filename>src/covid19model/ABC/distances.py
# -*- coding: utf-8 -*-
"""
Library of distance functions.
"""
import numpy as np
from numba import jit
# %% FUNCTION DEFINITIONS
###############################################################################
def Euclidean(s_param_dist,s_obs):
"""
Euclidean distance function, used for SMC ABC.
Calculate the Euclidean distance between the observed summary statistics (`s_obs`) and the
simulated summary statistics (`s_theta_dist`) generated with the `N` parameter
particles in the SMC distribution.
Parameters
----------
s_param_dist : ndarray
simulated summary statistic vector; dimensions: `(N, n_draws_per_parameter, n_summary_stat)`
s_obs : ndarray
observed summary statistic vector; dimensions: `(n_summary_stat,)`
Returns
-------
ndarray
Euclidean distances; dimensions: `(N, n_draws_per_parameter)`
"""
return np.linalg.norm(s_param_dist - s_obs, axis = 2)
def SSRE(s_param_dist,s_obs):
"""
Sum of squared relative errors, used for SMC ABC.
Calculate a sum of squared errors, relative to the magnitude of the observed summary statistics.
If the absolute value of the summary statistic is smaller than 1, the squared error is taken.
Parameters
----------
s_param_dist : ndarray
simulated summary statistic vector; dimensions: `(N, n_draws_per_parameter, n_summary_stat)`
s_obs : ndarray
observed summary statistic vector; dimensions: `(n_summary_stat,)`
Returns
-------
ndarray
Euclidean distances; dimensions: `(N, n_draws_per_parameter)`
"""
SE = (s_param_dist - s_obs)**2
denom = np.abs(s_obs)
denom[denom<1] = 1
return np.sum(SE/denom, axis = 2)
# =============================================================================
# # Compositional Data
# =============================================================================
@jit(nopython = True)
def logratio(x):
"""
Compute log ratios of a compositional vector.
Parameters
----------
x : ndarray
d-part compositionional vector (on the reduced simplex).
Returns
-------
lr: ndarray
log ratios of compositional vector. 1-D array size d**2
"""
d = x.shape[0]
lr = np.empty((d,d))
for i in range(d):
for j in range(d):
lr[i,j] = np.log(x[i]/x[j])
return lr.ravel()
## no numba JIT:
##--------------
# x_i,x_j = np.meshgrid(x,x)
# ratios = np.ravel(x_i/x_j)
# return np.log(ratios)
@jit(nopython = True)
def Aitchison(s_param_dist,s_obs):
"""
Aitchison distance function, used for SMC ABC.
Calculate the Aitchison distance between the observed summary statistic (`s_obs`),
simulated summary statistics (`s_theta_dist`) generated with the `N` parameter
particles in the SMC distribution. This function assumes that the summary
statistics are d-part compositionional vectors (on the reduced simplex).
Parameters
----------
s_param_dist : ndarray
simulated summary statistic vector; dimensions: `(N, n_draws_per_parameter, d)`
s_obs : ndarray
observed summary statistic vector; dimensions: `(d,)`
Returns
-------
ndarray
Aitchison distances; dimensions: `(N, n_draws_per_parameter)`
"""
N, n_draws_per_parameter, d = s_param_dist.shape
lr_obs = logratio(s_obs) # compute logratios of observation d-part comp
distances = np.empty((N,n_draws_per_parameter)) #initialise distance array
for i_par in range(N):
for i_draw in range(n_draws_per_parameter):
lr_sim = logratio(s_param_dist[i_par,i_draw,:]) # compute logratios of simulation d-part comp
distances[i_par,i_draw] = np.sqrt(np.sum((lr_obs-lr_sim)**2)/(2*d)) #compute Aitchison dist
return distances
@jit(nopython = True)
def Aitchison_timeseries(s_param_dist,s_obs):
"""
Aitchison distance function on timeseries of comp. data, used for SMC ABC.
Calculate the Aitchison distance between the observed summary statistic (`s_obs`),
simulated summary statistics (`s_param_dist`) generated with the `N` parameter
particles in the SMC distribution. This function assumes that the summary
statistics are timeseries of d-part compositionional vectors (on the reduced simplex) and the distance is computed as the
sum of the Aitchison distances at every t.
Parameters
----------
s_param_dist : ndarray
simulated summary statistic vector; dimensions: `(N, n_draws_per_parameter, n_t, d)`
s_obs : ndarray
observed summary statistic vector; dimensions: `(nt,d)`
Returns
-------
ndarray
sum of Aitchison distances; dimensions: `(N, n_draws_per_parameter)`
"""
N, n_draws_per_parameter, n_t, d = s_param_dist.shape
distances = np.zeros((N,n_draws_per_parameter))
for t in range(n_t): # at every timeste:
lr_obs = logratio(s_obs[t,:]) # compute logratios of observation d-part comp
for i_par in range(N):
for i_draw in range(n_draws_per_parameter):
lr_sim = logratio(s_param_dist[i_par,i_draw,t,:]) # compute logratios of simulation d-part comp
distances[i_par,i_draw] += np.sqrt(np.sum((lr_obs-lr_sim)**2)/(2*d)) # add Aitchison @ t to total dist
return distances
# %% TEST FUNCTIONS
###############################################################################
# compositional vectors
x = np.array([0.8 ,0.15, 0.04, 0.01])
y_array = np.array([[0.01 ,0.04 ,0.15 ,0.8 ],
[0.1 ,0.3 ,0.4 ,0.2 ],
[0.25,0.25,0.25,0.25],
[0.8 ,0.15, 0.04, 0.01]]).reshape((1,4,4))
Aitchison(y_array,x)
# time series of compositional vectors
x_ts = np.array([[0.5 ,0.25, 0.15, 0.1],
[0.8 ,0.15, 0.04, 0.01]])
y_array_ts = np.array([[[0.1 ,0.15, 0.25, 0.5],[0.01 ,0.04 ,0.15 ,0.8]],
[[0.25,0.25,0.25,0.25],[0.5 ,0.25, 0.15, 0.1]],
[[0.25,0.25,0.25,0.25],[0.8 ,0.15, 0.04, 0.01]],
[[0.5 ,0.25, 0.15, 0.1],[0.8 ,0.15, 0.04, 0.01]]]).reshape((1,4,2,4))
Aitchison_timeseries(y_array_ts,x_ts)
| StarcoderdataPython |
1846692 | <filename>tests/settings.py
# -*- coding: utf-8 -*-
# Standard library imports
import os
# Third party imports
from django import VERSION as DJANGO_VERSION
from django.conf import global_settings as default_settings
# Local application / specific library imports
TEST_ROOT = os.path.abspath(os.path.dirname(__file__))
# Helper function to extract absolute path
def location(x):
return os.path.join(TEST_ROOT, x)
class DisableMigrations(object):
def __contains__(self, item):
return True
def __getitem__(self, item):
if DJANGO_VERSION >= (1, 9):
return
else:
return "notmigrations"
DEBUG = False
TEMPLATE_DEBUG = False
SECRET_KEY = "key"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
if DJANGO_VERSION >= (1, 8):
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": (location("_testsite/templates"),),
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"cms.context_processors.cms_settings",
"sekizai.context_processors.sekizai",
]
},
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = default_settings.TEMPLATE_CONTEXT_PROCESSORS + (
"django.core.context_processors.request",
"cms.context_processors.cms_settings",
)
TEMPLATE_DIRS = (location("_testsite/templates"),)
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sessions",
"django.contrib.sites",
"cms",
"treebeard",
"menus",
"sekizai",
"djangocms_admin_style",
"djangocms_htmlsitemap",
"tests",
] + ["django.contrib.admin"]
MIGRATION_MODULES = DisableMigrations()
TEST_RUNNER = "django.test.runner.DiscoverRunner" # Hide checks
CMS_TEMPLATES = (("index.html", "Index"), ("simple.html", "Simple"))
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"cms.middleware.user.CurrentUserMiddleware",
"cms.middleware.page.CurrentPageMiddleware",
"cms.middleware.toolbar.ToolbarMiddleware",
"cms.middleware.language.LanguageCookieMiddleware",
)
MEDIA_ROOT = "/media/"
STATIC_URL = "/static/"
USE_TZ = True
LANGUAGE_CODE = "en"
LANGUAGES = (("en", "English"), ("fr", "Français"))
SITE_ID = 1
ROOT_URLCONF = "tests._testsite.urls"
# Setting this explicitly prevents Django 1.7+ from showing a
# warning regarding a changed default test runner. The test
# suite is run with py.test, so it does not matter.
SILENCED_SYSTEM_CHECKS = ["1_6.W001"]
| StarcoderdataPython |
11249782 | <filename>dataviva/utils/title_case.py<gh_stars>0
import re
''' Titlecase Function '''
def title_case(string):
exceptions = ['A', 'An', 'And', 'As', 'At', 'But', 'By', 'For', 'From', 'If', \
'In', 'Into', 'Near', 'Nor', 'Of', 'On', 'Onto', 'Or', 'That', \
'The', 'To', 'With', 'Via', 'Vs', 'Vs.', \
'Um', 'Uma', 'E', 'Como', 'Em', 'No', 'Na', 'Mas', 'Por', \
'Para', 'Pelo', 'Pela', 'De', 'Do', 'Da', 'Se', 'Perto', 'Nem', \
'Ou', 'Que', 'O', 'A', 'Com']
uppers = ['Id', 'Tv', 'R&d', "P&d", "It", "Ti"]
words = re.split('(\s|-|\/|\()', string)
def detect_string(s):
if s in exceptions or s.capitalize() in exceptions:
return s.lower()
elif s in uppers or s.capitalize() in uppers:
return s.upper()
else:
return s.capitalize()
for i, word in enumerate(words):
words[i] = detect_string(word)
words[0] = words[0].capitalize()
return "".join(words)
| StarcoderdataPython |
8050023 | <filename>library_api/models.py
from datetime import date
from decimal import Decimal, getcontext
from django.db import models
from django.db.models import Lookup, Field
from rest_framework.exceptions import ValidationError
from library_api.util import Penalty, InterestPerDay
@Field.register_lookup
class NotEqualLookup(Lookup):
lookup_name = 'ne'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return '%s <> %s' % (lhs, rhs), params
class Book(models.Model):
title = models.CharField(max_length=255)
subtitle = models.CharField(max_length=255, null=True, blank=True)
author = models.CharField(max_length=255)
isbn = models.CharField(max_length=20, null=True, blank=True)
edition = models.SmallIntegerField(default=1)
pages = models.SmallIntegerField(default=0, null=True)
reservation_price = models.DecimalField(max_digits=8, decimal_places=2)
reserved = models.BooleanField(default=False)
class Meta:
db_table = 'tbl_book'
managed = True
verbose_name = 'Book'
verbose_name_plural = 'Books'
unique_together = ('title', 'author', 'edition'),
ordering = ['title', 'author', 'edition']
def save(self, *args, **kwargs):
for f in self._meta.fields:
if isinstance(f, models.CharField):
field_name = f.attname
val = getattr(self, field_name, False)
if val:
setattr(self, field_name, val.upper())
if Book.objects.filter(title=self.title, author=self.author, edition=self.edition,
reserved=self.reserved).first():
raise ValidationError(detail="Book already exists!")
super(Book, self).save(*args, **kwargs)
def __str__(self):
return f'{self.title}, {self.author}, Ed {self.edition}'
class Client(models.Model):
name = models.CharField(max_length=100)
username = models.CharField(max_length=80, unique=True)
email = models.EmailField(unique=True)
class Meta:
db_table = 'tbl_client'
managed = True
verbose_name = "Client"
verbose_name_plural = "Clients"
unique_together = ('name', 'email'),
ordering = ['name', 'username']
def __str__(self):
return f'{self.username}'
class Reservation(models.Model):
MAX_DAYS = 3
tax = 0
book = models.ForeignKey('Book', on_delete=models.PROTECT, related_name='reservation_books')
client = models.ForeignKey('Client', on_delete=models.PROTECT, related_name='client_reservation')
reserved_at = models.DateField(auto_now_add=True)
returned_at = models.DateField(null=True, blank=True)
active = models.BooleanField(default=True)
class Meta:
db_table = 'tbl_reservation'
managed = True
verbose_name = "Reservation"
verbose_name_plural = "Reservations"
ordering = ['-reserved_at', 'book__title']
def save(self, *args, **kwargs):
super(Reservation, self).save(*args, **kwargs)
book = self.book
book.reserved = True
book.save()
@property
def delayed_days(self):
today = date.today()
loan_days = today - self.reserved_at
return loan_days.days
@property
def tax(self):
getcontext().prec = 4
if self.delayed_days > 0:
reservation_price = self.book.reservation_price
penalty = Penalty(self.delayed_days).calculate(reservation_price) - reservation_price
interest_per_day = InterestPerDay(self.delayed_days).calculate(reservation_price) - reservation_price
return reservation_price + penalty + interest_per_day
return Decimal(0)
def __str__(self):
return f'{self.client.username}: {self.book.__str__()}'
| StarcoderdataPython |
4936117 | #!/usr/bin/env python
import sys
import re
from neopixel import *
# LED strip configuration:
LED_COUNT = 300 # Number of LED pixels.
LED_PIN = 18 # GPIO pin connected to the pixels (must support PWM!).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 5 # DMA channel to use for generating signal (try 5)
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
if __name__ == "__main__":
opts = []
if len(sys.argv) > 1:
for i in range(1,len(sys.argv)):
if sys.argv[i] == '-h' or sys.argv[i] == '--help':
opts.append('h')
if 'h' in opts:
print "Usage:"
print "<led_index | \"all\">"
print "\tLight given LED or all LEDs to blue (0,0,255)."
print "<led_index | \"all\"> 0"
print "\tSet given LED or all LEDs to off (0,0,0)."
print "<led_index | \"all\"> <r> <g> <b>"
print "\tSet given LED or all LEDs to the given color (r,g,b)."
print "\"q\""
print "\tQuit."
exit()
strip = Adafruit_NeoPixel(LED_COUNT, LED_PIN, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS)
strip.begin()
while True:
sys.stdout.write(">")
sys.stdout.flush
line = sys.stdin.readline().rstrip()
tokens = line.split(" ")
if tokens[0] == "q":
break
try:
color = Color(0,0,255)
if len(tokens) == 2 and int(tokens[1]) == 0:
color = Color(0,0,0)
if len(tokens) == 4:
try:
color = Color(int(tokens[1]),int(tokens[2]),int(tokens[3]))
except ValueError:
pass
if tokens[0] == "all":
for i in range(strip.numPixels()):
strip.setPixelColor(i,color)
else:
p = re.compile('\d+-\d+')
m = p.match(tokens[0])
if m:
indices = m.group().split('-')
for i in range(int(indices[0]),int(indices[1])+1):
strip.setPixelColor(i,color)
else:
strip.setPixelColor(int(tokens[0]),color)
strip.show()
except ValueError:
pass
# for i in range(0, strip.numPixels()):
# strip.setPixelColor(i,Color(0,0,0))
# strip.show()
print "Done!"
| StarcoderdataPython |
8041730 | import torch
import torch.nn as nn
import math
import torch.nn.functional as F
class Triangle_transform(nn.Module):
def __init__(self, output_dim):
"""
output dim is the number of t parameters in the triangle point transformation
"""
super().__init__()
self.output_dim = output_dim
self.t_param = torch.nn.Parameter(
torch.randn(output_dim)*0.1, requires_grad=True)
def forward(self, x):
"""
x is of shape [N,2]
output is of shape [N,output_dim]
"""
return torch.nn.functional.relu(x[:, 1][:, None] - torch.abs(self.t_param-x[:, 0][:, None]))
def batch_to_tensor(batch, external_tensor, attribute='x'):
"""
Takes a pytorch geometric batch and returns the data as a regular tensor padded with 0 and the associated mask
stacked_tensor [Num graphs, Max num nodes, D]
mask [Num_graphs, Max num nodes]
"""
batch_list = []
idx = batch.__slices__[attribute]
for i in range(1, 1+len(batch.y)):
batch_list.append(external_tensor[idx[i-1]:idx[i]])
stacked_tensor = torch.nn.utils.rnn.pad_sequence(
batch_list, batch_first=True) # .permute(1,0,2)
mask = torch.zeros(stacked_tensor.shape[:2])
for i in range(1, 1+len(batch.y)):
mask[i-1, :(idx[i]-idx[i-1])] = 1
mask_zeros = (stacked_tensor != 0).any(2)
return stacked_tensor, mask.to(bool), mask_zeros.to(bool)
class Gaussian_transform(nn.Module):
def __init__(self, output_dim):
"""
output dim is the number of t parameters in the Gaussian point transformation
"""
super().__init__()
self.output_dim = output_dim
self.t_param = torch.nn.Parameter(
torch.randn(output_dim)*0.1, requires_grad=True)
self.sigma = torch.nn.Parameter(torch.ones(1), requires_grad=True)
def forward(self, x):
"""
x is of shape [N,2]
output is of shape [N,output_dim]
"""
return torch.exp(- (x[:, :, None]-self.t_param).pow(2).sum(axis=1) / (2*self.sigma.pow(2)))
class Line_transform(nn.Module):
def __init__(self, output_dim):
"""
output dim is the number of lines in the Line point transformation
"""
super().__init__()
self.output_dim = output_dim
self.lin_mod = torch.nn.Linear(2, output_dim)
def forward(self, x):
"""
x is of shape [N,2]
output is of shape [N,output_dim]
"""
return self.lin_mod(x)
class RationalHat_transform(nn.Module):
"""
Coordinate function as defined in
/<NAME>., <NAME>., and <NAME>.
Learning representations of persistence barcodes.
JMLR, 20(126):1–45, 2019b./
"""
def __init__(self, output_dim, input_dim = 1):
"""
output dim is the number of lines in the Line point transformation
"""
super().__init__()
self.output_dim = output_dim
self.c_param = torch.nn.Parameter(
torch.randn(input_dim, output_dim)*0.1, requires_grad=True)
self.r_param = torch.nn.Parameter(
torch.randn(1, output_dim)*0.1, requires_grad=True)
def forward(self, x):
"""
x is of shape [N,input_dim]
output is of shape [N,output_dim]
"""
first_element = 1+torch.norm(x[:, :, None]-self.c_param, p=1, dim=1)
second_element = 1 + \
torch.abs(torch.abs(self.r_param) -
torch.norm(x[:, :, None]-self.c_param, p=1, dim=1))
return (1/first_element) - (1/second_element)
class MAB(nn.Module):
def __init__(self, dim_Q, dim_K, dim_V, num_heads, ln=False):
super(MAB, self).__init__()
self.dim_V = dim_V
self.num_heads = num_heads
self.fc_q = nn.Linear(dim_Q, dim_V) # * num_heads)
self.fc_k = nn.Linear(dim_K, dim_V) # * num_heads)
self.fc_v = nn.Linear(dim_K, dim_V) # * num_heads)
if ln:
self.ln0 = nn.LayerNorm(dim_V)
self.ln1 = nn.LayerNorm(dim_V)
self.fc_o = nn.Linear(dim_V, dim_V)
def forward(self, Q, K, mask=None):
"""
mask should be of shape [batch, length]
"""
Q = self.fc_q(Q)
K, V = self.fc_k(K), self.fc_v(K)
dim_split = self.dim_V // self.num_heads
Q_ = torch.cat(Q.split(dim_split, 2), 0)
K_ = torch.cat(K.split(dim_split, 2), 0)
V_ = torch.cat(V.split(dim_split, 2), 0)
# Modification to handle masking.
if mask is not None:
mask_repeat = mask[:, None, :].repeat(
self.num_heads, Q.shape[1], 1)
before_softmax = Q_.bmm(K_.transpose(1, 2))/math.sqrt(self.dim_V)
before_softmax[~mask_repeat] = -1e10
else:
before_softmax = Q_.bmm(K_.transpose(1, 2))/math.sqrt(self.dim_V)
A = torch.softmax(before_softmax, 2)
O = torch.cat((Q_ + A.bmm(V_)).split(Q.size(0), 0), 2)
O = O if getattr(self, 'ln0', None) is None else self.ln0(O)
O = O + F.relu(self.fc_o(O))
O = O if getattr(self, 'ln1', None) is None else self.ln1(O)
return O
class ISAB(nn.Module):
def __init__(self, dim_in, dim_out, num_heads, num_inds, ln=False):
super(ISAB, self).__init__()
self.I = nn.Parameter(torch.Tensor(1, num_inds, dim_out))
nn.init.xavier_uniform_(self.I)
self.mab0 = MAB(dim_out, dim_in, dim_out, num_heads, ln=ln)
self.mab1 = MAB(dim_in, dim_out, dim_out, num_heads, ln=ln)
def forward(self, X, mask):
H = self.mab0(self.I.repeat(X.size(0), 1, 1), X, mask)
return self.mab1(X, H)
class Set2SetMod(torch.nn.Module):
def __init__(self, dim_in, dim_out, num_heads, num_inds):
super().__init__()
self.set_transform = ISAB(dim_in=dim_in,
dim_out=dim_out,
num_heads=num_heads,
num_inds=num_inds)
def forward(self, x, batch, dim1_flag=False):
if dim1_flag:
stacked_tensor, mask, mask_zeros = batch_to_tensor(
batch, x, attribute="edge_index")
out_ = self.set_transform(stacked_tensor, mask)
out_[mask_zeros] = 0
out = out_[mask]
else:
stacked_tensor, mask, mask_zeros = batch_to_tensor(batch, x)
out_ = self.set_transform(stacked_tensor, mask)
out = out_[mask]
return out
#mod = ISAB(dim_in = 2, dim_out = 32, num_heads = 4, num_inds = 6, ln = False)
#x = torch.randn((2,12,2))
#y = mod(x, mask = torch.randint(high=2, size = (2,12)))
| StarcoderdataPython |
6685186 | import datetime
import hashlib
from django.db import models
from requests import get
from django.db import models # new
from django.shortcuts import reverse # new
from django.contrib.auth.models import AbstractUser
from django_mysql.models import ListCharField
from django.conf import settings
# Create your models here.
from rest_framework import serializers
class Movie(models.Model):
id = models.CharField(max_length=20, primary_key=True)
imdb_id = models.CharField(max_length=10)
title = models.CharField(max_length=255)
year = models.CharField(max_length=4)
slug = models.CharField(max_length=150)
synopsis = models.TextField()
runtime = models.CharField(max_length=4)
country = models.CharField(max_length=4)
last_updated = models.FloatField(max_length=16)
released = models.IntegerField()
certification = models.CharField(max_length=255)
torrents = models.TextField()
trailer = models.CharField(max_length=255)
genres = models.CharField(max_length=255)
images = models.TextField()
rating = models.CharField(max_length=255)
_v = models.PositiveSmallIntegerField()
def __str__(self):
return self.title
class Ip(object):
def __init__(self, ip):
self.ip = ip
self.response = self.ipinfo(ip)
def ipinfo(ip):
url = "https://ipinfo.io/"
resp = get(url + ip)
return resp.json()
class User(AbstractUser):
pass
class Task(models.Model):
BACKLOG = 'BACKLOG'
UP_NEXT = 'UP_NEXT'
TODO = 'TODO'
IN_PROGRESS = 'IN_PROGRESS'
COMPLETED = 'COMPLETED'
STATUSES = (
(BACKLOG, BACKLOG),
(UP_NEXT, UP_NEXT),
(TODO, TODO),
(IN_PROGRESS, IN_PROGRESS),
(COMPLETED, COMPLETED)
)
LOW = 'LOW'
MEDIUM = 'MEDIUM'
HIGH = 'HIGH'
PRIORITIES = (
(LOW, LOW),
(MEDIUM, MEDIUM),
(HIGH, HIGH)
)
ident = models.CharField(max_length=32, unique=True, db_index=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
start_date = models.DateField(blank=True, null=True)
due_date = models.DateField(blank=True, null=True)
title = models.CharField(max_length=150)
description = models.TextField(max_length=400)
priority = models.CharField(max_length=15, choices=PRIORITIES, default=LOW)
status = models.CharField(max_length=30, choices=STATUSES, default=BACKLOG)
tags = ListCharField(
base_field=models.CharField(max_length=10),
size=6,
max_length=(6 * 11)
)
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
null = False,
blank=False,
on_delete=models.DO_NOTHING,
related_name='tasks_as_owner'
)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('task:task_detail', kwargs={'task_ident': self.ident})
def save(self, **kwargs):
if not self.ident:
now = datetime.datetime.now()
secure_hash = hashlib.md5()
secure_hash.update(f'{now}:{self.title}'.encode('utf-8'))
self.ident = secure_hash.hexdigest()
super().save(**kwargs)
| StarcoderdataPython |
397211 | <filename>data_munging.py
import numpy as np
import scipy.misc
import matplotlib.pyplot as plt
# import matplotlib as mpl
import os
import colorsys
import cv2
import logging
import itertools
from colorcorrect.algorithm import grey_world
from annotation import get_annotation, get_bbs
from tools_plot import dispims
from contours import get_contours
# from pprint import pprint
from scipy.misc import imresize
logger = logging.getLogger(__name__)
plt.ion()
imargs = {'cmap': 'gray', 'interpolation': 'none'}
def crop_centered_box(im, xy, width, height, target_width=32, target_height=32):
"""
This function takes the same input and output format as the previous written
crop_and_rescale. Therefore there are some of the variable names which is
not the true description of it.
Given image and bounding box specified by xy, width, height
Find a another box with target_height and target_width, which have the same
center as the given bounding box. It also deals with the boundary situation.
"""
yx = np.cast['int32'](xy[::-1])
shape = (int(height), int(width))
target_x = int(np.floor(yx[1] + shape[1] / 2.0 - target_width / 2.0))
target_y = int(np.floor(yx[0] + shape[0] / 2.0 - target_height / 2.0))
if target_x < 0:
target_x = 0
elif target_x + target_width >= im.shape[1]:
target_x = im.shape[1] - target_width
if target_y < 0:
target_y = 0
elif target_y + target_height >= im.shape[0]:
target_y = im.shape[0] - target_height
im_crop = np.take(np.take(im,
np.arange(target_y, target_y + target_height),
axis=0),
np.arange(target_x, target_x + target_width), axis=1)
return im_crop
# end def crop_centered_box
def crop_and_rescale_nearest(im, xy, width, height,
target_width=32, target_height=32,
detect_width_list=[8, 16, 32, 64],
detect_height_list=[8, 16, 32, 64],
):
'''
For multiple scale detection, the cropped box for each groundtruth is the
smallest box that can fully cover the groundtruth label.
And then scaled to the fixed size: target_width * target_height
'''
detect_width_list = np.array(detect_width_list)
detect_height_list = np.array(detect_height_list)
def find_ind(value, value_list):
if value < max(value_list):
ind = np.where(value_list >= value)[0][0]
else:
ind = len(value_list) - 1
return ind
ind_width = find_ind(width, detect_width_list)
ind_height = find_ind(height, detect_height_list)
ind = max(ind_width, ind_height)
im_crop = crop_centered_box(im, xy, width, height,
target_width=detect_width_list[ind],
target_height=detect_height_list[ind])
im_resized = imresize(im_crop, (target_height, target_width))
if False:
print ind_width
print ind_height
print im_crop.shape
print im_resized.shape
return im_resized
# end def crop_and_rescale_nearest
def crop_and_rescale(im, xy, width, height, target_width=32, target_height=32):
""" Given image and bounding box specified by xy, width, height
Crop region specified by bounding box and scale to target width and height """
yx = np.cast['int32'](xy[::-1])
shape = (int(height), int(width))
small_dim = np.argmin(shape) # pad smaller dimension
large_dim = 1 - small_dim
# pad up small dim so that we have square image
pad_size = shape[large_dim] - shape[small_dim]
pad_before = pad_size / 2
pad_after = (pad_size / 2) + (pad_size % 2) # extra goes at end
small_bounds = (yx[small_dim] - pad_before,
yx[small_dim] + shape[small_dim] + pad_after)
# bounds checking: did padding mean we exceed image dimensions?
# if so, make window tight up against boundary
if small_bounds[0] < 0:
small_bounds = (0, shape[large_dim])
if small_bounds[1] > im.shape[small_dim]:
small_bounds = (im.shape[small_dim] - shape[large_dim],
im.shape[small_dim])
# the min here is a fix for at least one of the annotations
# which exceeds the image bounds
large_bounds = (yx[large_dim], min(yx[large_dim] + shape[large_dim],
im.shape[large_dim]))
im_crop = np.take(np.take(im, np.arange(*small_bounds), axis=small_dim),
np.arange(*large_bounds), axis=large_dim)
im_resized = imresize(im_crop, (target_height, target_width))
return im_resized
# end def crop_and_rescale
def augment_bbs_by_trans(bbs, dist_trans_list):
'''
translation augmentation on bounding boxes
'''
xy_trans_list = tuple(itertools.product(dist_trans_list,
dist_trans_list))
bbs_orig = bbs
bbs = []
for (x, y), width, height in bbs_orig:
for (x_trans, y_trans) in xy_trans_list:
bbs.append(((x + x_trans, y + y_trans), width, height))
return bbs
def get_pos(data_path, target_height, target_width,
flag_rescale=False, flag_multiscale=False, flag_rgb=True,
detect_width_list=[8, 16, 32, 64],
detect_height_list=[8, 16, 32, 64],
flag_trans_aug=False,
dist_trans_list=(-2, 0, 2),
):
""" Get positive training examples
examples are rescaled to target_height and target_width
With the assumption that the annotation file have the same name with
the image but with no extension
flag_trans_aug: if do translation augmentation
"""
jpg_train = [f for f in os.listdir(data_path) if f.find('.jpg') > 0]
# moths = []
moth_resized_list = []
for i, j in enumerate(jpg_train):
try:
im = scipy.misc.imread(os.path.join(data_path, j))
except IOError:
logger.warn("There was a problem reading the jpg: %s." % j)
continue
im = grey_world(im)
if not flag_rgb:
# im will be assigned to the new gray image
# the rollaxis command rolls the last (-1) axis back until the start
# do a colourspace conversion
im, im_i, im_q = colorsys.rgb_to_yiq(
*np.rollaxis(im[..., :3], axis=-1))
ann_file = j.split('.')[0]
ann_path = os.path.join(data_path, ann_file)
annotation = get_annotation(ann_path)
# get all bbs for this image
bbs = get_bbs(annotation)
if flag_trans_aug:
bbs = augment_bbs_by_trans(bbs, dist_trans_list)
for xy, width, height in bbs:
x, y = xy
# determine if the xy, width, height are postive and within range
values_with_in_range = width > 0 and height > 0 \
and y >= 0 and y + height < im.shape[0] \
and x >= 0 and x + width < im.shape[1]
if not values_with_in_range:
print "Bad boundingbox, ignored"
print xy, width, height
continue
# remember y is indexed first in image
# moth = im[y:(y + height), x:(x + width)]
# moths.append(moth)
# print moth.shape
if flag_multiscale:
moth_resized = crop_and_rescale_nearest(im, xy, width, height,
target_width, target_height,
detect_width_list=detect_width_list,
detect_height_list=detect_height_list)
elif flag_rescale:
moth_resized = crop_and_rescale(im, xy, width, height,
target_width, target_height)
else:
moth_resized = crop_centered_box(im, xy, width, height,
target_width, target_height)
moth_resized_list.append(moth_resized)
return moth_resized_list
# end def get_pos
def get_neg(data_path, target_height, target_width, flag_rescale=True,
flag_rgb=True, num_appr=2500,
flag_trans_aug=False,
dist_trans_list=(-2, 0, 2),
):
'''
generate negative training examples, which does not contain moths
'''
jpg_train = [f for f in os.listdir(data_path) if f.find('.jpg') > 0]
blobs = []
blob_resized_list = []
for i, j in enumerate(jpg_train):
try:
im = scipy.misc.imread(os.path.join(data_path, j))
except IOError:
logger.warn("There was a problem reading the jpg: %s." % j)
continue
im = grey_world(im)
# negative patches are extracted on color input image
contours, c_area = get_contours(im)
c_idx = np.argsort(c_area)[::-1] # largest first
contours = contours[c_idx]
c_area = c_area[c_idx]
# fig1, subs1 = plt.subplots(nrows=1, ncols=1)
# cv2.drawContours(im, contours[:2], -1, (0, 255, 0), -1)
# for c in xrange(2):
# boundingRect returns top left corner xy
# width, and height
# bx,by,bw,bh = cv2.boundingRect(contours[c])
# cv2.rectangle(im,(bx,by),(bx+bw,by+bh),(255,0,0),3) # draw rectangle in blue color)
# subs1.imshow(im, **imargs)
# subs1.set_title(j)
# plt.tight_layout()
if not flag_rgb:
# im will be assigned to the new gray image
# the rollaxis command rolls the last (-1) axis back until the start
# do a colourspace conversion
im, im_i, im_q = colorsys.rgb_to_yiq(
*np.rollaxis(im[..., :3], axis=-1))
# get certain amount of bbs from this image based on the approximate
# wanted number
num_per_image = num_appr / len(jpg_train)
if flag_trans_aug:
num_per_image = num_per_image / (len(dist_trans_list) ** 2)
bbs = []
for c in contours[:num_per_image]:
# boundingRect returns top left corner xy width, and height
bx, by, bw, bh = cv2.boundingRect(c)
xy = (bx, by)
bbs.append([xy, bx, by])
if flag_trans_aug:
bbs = augment_bbs_by_trans(bbs, dist_trans_list)
for (bx, by), bw, bh in bbs:
# remember y is indexed first in image
blob = im[by:(by + bh), bx:(bx + bw)]
blobs.append(blob)
# print moth.shape
if flag_rescale:
blob_resized = crop_and_rescale(im, xy, bw, bh,
target_width, target_height)
else:
blob_resized = crop_centered_box(im, xy, bw, bh,
target_width, target_height)
blob_resized_list.append(blob_resized)
return blob_resized_list
# end def get_neg
def dispims_new(M, height, width, border=0, bordercolor=0.0, layout=None, **kwargs):
""" Display a whole stack (colunmwise) of vectorized matrices. Useful
eg. to display the weights of a neural network layer.
"""
numimages = M.shape[1]
if layout is None:
n0 = int(np.ceil(np.sqrt(numimages)))
n1 = int(np.ceil(np.sqrt(numimages)))
else:
n0, n1 = layout
def gen_one_channel(M, height, width, bordercolor, border, n0, n1):
im = bordercolor * \
np.ones(
((height + border) * n0 + border, (width + border) * n1 + border), dtype='<f8')
for i in range(n0):
for j in range(n1):
if i * n1 + j < M.shape[1]:
im[i * (height + border) + border:(i + 1) * (height + border) + border,
j * (width + border) + border:(j + 1) * (width + border) + border] = \
np.vstack((np.hstack((np.reshape(M[:, i * n1 + j], (height, width)),
bordercolor * np.ones((height, border), dtype=float))),
bordercolor *
np.ones(
(border, width + border), dtype=float)
)
)
return im
if M.ndim < 3:
# the case of gray image or empty
im = gen_one_channel(M, height, width, bordercolor, border, n0, n1)
plt.imshow(im, cmap=plt.cm.gray, interpolation='nearest', **kwargs)
elif M.ndim == 3:
im_list = []
for ind in range(M.shape[0]):
im_list.append(gen_one_channel(M[ind], height, width,
bordercolor, border, n0, n1))
im = np.transpose(np.array(im_list), axes=(1, 2, 0))
# FIXME, the color display is not correct
# im[:, :, 0], im[:, :, 1], im[:, :, 2] = \
# im[:, :, 0], im[:, :, 2], im[:, :, 1]
plt.imshow(im, interpolation='nearest', **kwargs)
# plt.show()
# end def dispims
if __name__ == "__main__":
def show_examples(data_path, func,
target_height=32, target_width=32, flag_rgb=True):
data_array = func(data_path, target_height, target_width)
n_moths = len(data_array)
data_array = np.asarray(data_array)
if data_array.ndim < 4:
m = data_array.reshape((n_moths, target_height * target_width))
elif data_array.ndim == 4:
m = data_array.reshape((n_moths, target_height * target_width, 3))
plt.figure()
dispims_new(m.T, target_height, target_width, border=2,
vmin=data_array.min(), vmax=data_array.max())
plt.title('{} examples ({})'.format(data_path.split('/')[-1],
data_path.split('/')[-2]))
return data_array
train_path_pos = '/mnt/data/datasets/bugs_annotated_2014/new_separation/train/withmoth'
train_path_neg = '/mnt/data/datasets/bugs_annotated_2014/new_separation/train/nomoth'
test_path_pos = '/mnt/data/datasets/bugs_annotated_2014/new_separation/test/withmoth'
# FIXME, the color is incorrect
data_train_pos = show_examples(train_path_pos, get_pos)
data_train_neg = show_examples(train_path_neg, get_neg)
data_test_pos = show_examples(test_path_pos, get_pos)
| StarcoderdataPython |
8106369 | <filename>src/test/demo.py
import pytest
import pathlib
import pickle
from pathlib import Path
import numpy as np
def test_demo_results():
fn = Path(__file__).resolve().parent / 'demo_result.pickle'
good_result = pickle.load(open(str(fn), 'rb'))
from pymor_dealii.pymor.demo import run
result, _, _, _ = run(plot_error=False)
pickle.dump(result, open(Path(__file__).resolve().parent / 'actual_demo_result.pickle', 'wb'))
compare = ['errors', 'basis_sizes', 'rel_errors']
for key in compare:
assert np.allclose(result[key], good_result[key])
| StarcoderdataPython |
6637853 | from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('api/user/', include('user.urls')),
path('api/profile/', include('eprofile.urls')),
path('api/vote/', include('vote.urls')),
path('api/candidates/', include('candidate.urls')),
path('api/parties/', include('party.urls')),
path('api/state/', include('state.urls')),
path('api/stats/', include('stats.urls')),
path('ckeditor/', include('ckeditor_uploader.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| StarcoderdataPython |
9671546 | <gh_stars>1-10
from ircu import consts
from ircu import p10
class MessageHandler(p10.MessageHandler):
token = 'EB'
command = 'END_OF_BURST'
def __init__(self, *args, **kwargs):
super(MessageHandler, self).__init__(*args, **kwargs)
def server(self, client, source, args):
# MH EB
self.network.send(consts.FMT_ENDOFBURST_ACK,
self.service.server.num)
| StarcoderdataPython |
8050542 | <reponame>hockeyprincess/google-api-dfp-python
#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Methods to access ApiService."""
__author__ = '<EMAIL> (<NAME>)'
from adspygoogle.common import SOAPPY
from adspygoogle.common import ZSI
from adspygoogle.common.Errors import ValidationError
class ApiService(object):
"""Wrapper for ApiService."""
def __init__(self, headers, config, op_config, url, import_chain, lock,
logger):
"""Inits ApiService.
Args:
headers: dict Dictionary object with populated authentication
credentials.
config: dict Dictionary object with populated configuration values.
op_config: dict Dictionary object with additional configuration values for
this operation.
url: str URL for the web service.
import_chain: str Import chain of the wrapper for web service.
lock: thread.lock Thread lock
logger: Logger Instance of Logger
"""
ToolkitSanityCheck = None
API_VERSIONS = []
self._config = config
self._op_config = op_config
if config['soap_lib'] == SOAPPY:
from adspygoogle.common.soappy import MessageHandler
exec ('from %s.soappy import SanityCheck as ToolkitSanityCheck'
% import_chain)
self._web_services = None
self._message_handler = MessageHandler
elif config['soap_lib'] == ZSI:
exec 'from %s import API_VERSIONS' % import_chain
exec 'from %s.zsi import SanityCheck as ToolkitSanityCheck' % import_chain
if op_config['version'] in API_VERSIONS:
module = '%s_services' % self.__class__.__name__
try:
version = op_config['version']
if version.find('.') > -1: version = version.replace('.', '_')
web_services = __import__('%s.zsi.%s.%s'
% (import_chain, version, module),
globals(), locals(), [''])
except ImportError, e:
# If one of library's required modules is missing, re raise exception.
if str(e).find(module) < 0:
raise ImportError(e)
msg = ('The version \'%s\' is not compatible with \'%s\'.'
% (op_config['version'], self.__class__.__name__))
raise ValidationError(msg)
else:
msg = 'Invalid API version, not one of %s.' % str(list(API_VERSIONS))
raise ValidationError(msg)
self._web_services = web_services
self._loc = eval('web_services.%sLocator()' % self.__class__.__name__)
self._sanity_check = ToolkitSanityCheck
| StarcoderdataPython |
1762027 | from math import sqrt
n = int(input())
x,y= map(float,input().split())
for i in range(n-1):
a,b =input().split()
b= float(b)
if len(a) == 1:
if a == "N":
y+=b
if a == "S":
y-=b
if a == "W":
x-=b
if a == "E":
x+=b
else:
b = sqrt(b*b/2)
for i in a:
if i == "N":
y+=b
if i == "S":
y-=b
if i == "W":
x-=b
if i == "E":
x+=b
print(x,y) | StarcoderdataPython |
5169767 | <filename>game/schema.py
import graphene
from graphene_django import DjangoObjectType
from graphql import GraphQLError
from .models import Channel, Game, Leaderboard, LeaderboardRow, ValidatePost
from users.models import Profile
from posts.models import Post
from tags.models import Tag
from chat.models import ChatRoom
from django.dispatch import Signal
post_added_to_game = Signal()
from posts.schema import ModifierEnumsType
class ValidatorEnumsType(graphene.Enum):
ACCEPT = 1
REJECT = 0
class ChannelType(DjangoObjectType):
def resolve_cover_image(self, info):
"""Resolve cover image absolute path"""
if self.cover_image:
self.cover_image = info.context.build_absolute_uri(self.cover_image.url)
return self.cover_image
def resolve_avatar(self, info):
"""Resolve avatar image absolute path"""
if self.avatar:
self.avatar = info.context.build_absolute_uri(self.avatar.url)
return self.avatar
class Meta:
model = Channel
fields = "__all__"
class GameType(DjangoObjectType):
def resolve_image(self, info):
"""Resolve image absolute path"""
if self.image:
self.image = info.context.build_absolute_uri(self.image.url)
return self.image
class Meta:
model = Game
fields = "__all__"
class LeaderboardType(DjangoObjectType):
class Meta:
model = Leaderboard
fields = "__all__"
class LeaderboardRowType(DjangoObjectType):
class Meta:
model = LeaderboardRow
fields = "__all__"
class ValidatePostType(DjangoObjectType):
class Meta:
model = ValidatePost
fields = "__all__"
class ChannelQuery(graphene.AbstractType):
channel = graphene.Field(ChannelType, id=graphene.ID(required=True), description="Get one channel based on given id")
channelname = graphene.Field(ChannelType, name=graphene.String(required=True), description="Get one channel based on given name")
channels = graphene.List(ChannelType, description="Get all channels")
channels_by_tag = graphene.List(ChannelType, tags=graphene.List(graphene.String, required=True) ,description="Gets all channels based on given tags")
def resolve_channel(self, info, id):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get channel by channel_id!')
else:
return Channel.objects.get(id=id)
def resolve_channelname(self, info, name):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get channel by channel_id!')
else:
return Channel.objects.get(name=name)
def resolve_channels(self, info):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get channels!')
else:
return Channel.objects.all()
def resolve_channels_by_tag(self, info, tags=[]):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get channels!')
else:
tagobjects = Tag.objects.filter(name__in=tags)
return Channel.objects.filter(tags__in=tagobjects)
class GameQuery(graphene.AbstractType):
game = graphene.Field(GameType, id=graphene.ID(required=True), description="Get one game based on given id")
gamename = graphene.Field(GameType, name=graphene.String(required=True), description="Get one game based on given name")
games = graphene.List(GameType, description="Get all games")
games_by_tag = graphene.List(GameType, tags=graphene.List(graphene.String, required=True) ,description="Gets all games based on given tags")
def resolve_game(self, info, id):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get game by id!')
else:
return Game.objects.get(id=id)
def resolve_gamename(self, info, name):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get game by name!')
else:
return Game.objects.get(name=name)
def resolve_games(self, info):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get games!')
else:
return Game.objects.all()
def resolve_games_by_tag(self, info, tags=[]):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get games!')
else:
tagobjects = Tag.objects.filter(name__in=tags)
return Game.objects.filter(tags__in=tagobjects)
class ValidatePostQuery(graphene.AbstractType):
validate_post = graphene.Field(ValidatePostType, id=graphene.ID(required=True), description="Get one post to be validated based on given id")
validate_posts = graphene.List(ValidatePostType, description="Get all posts to be validated")
validate_posts_by_game = graphene.List(ValidatePostType, game=graphene.String(required=True), channel=graphene.String(required=True) ,description="Gets all posts to be validated for given game in channel")
validate_posts_by_channel = graphene.List(ValidatePostType, channel=graphene.String(required=True) ,description="Gets all posts to be validated for given channel")
def resolve_validate_post(self, info, id):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get post to be validated by id!')
else:
return ValidatePost.objects.get(id=id)
def resolve_validate_posts(self, info):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get all posts to be validated!')
else:
return ValidatePost.objects.all()
def resolve_validate_posts_by_game(self, info, game, channel):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get post to be validated by game!')
else:
return ValidatePost.objects.filter(game=Game.objects.get(name=game, channel=channel))
def resolve_validate_posts_by_channel(self, info, channel):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to get post to be validated by channel!')
else:
return ValidatePost.objects.filter(channel=Channel.objects.get(name=channel))
class CreateChannel(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Name of Channel.")
description = graphene.String(default_value="", description="Description of the Channel.")
cover_image = graphene.String(default_value="", description="Cover image media for Channel.")
avatar_image = graphene.String(default_value="", description="Avatar image media for Channel.")
tags = graphene.List(graphene.String, description="List of tags asscoiated with the Channel.")
channel = graphene.Field(ChannelType, description="Returns the new channel that was created successfully.")
success = graphene.Boolean(default_value=False, description="Returns whether the chatroom was created successfully.")
def mutate(self, info, name, description, cover_image, avatar_image, tags=[]):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to create chatroom!')
else:
current_user_profile = Profile.objects.get(user=info.context.user)
if Channel.objects.filter(name=name).exists():
raise GraphQLError('Channel with same name exists. PLease try another name!')
channel = Channel(name=name, description=description)
channel.save()
channel.subscribers.add(current_user_profile)
channel.save()
if cover_image != "":
channel.cover_image = image=info.context.FILES[cover_image]
channel.save()
if avatar_image != "":
channel.avatar = image=info.context.FILES[avatar_image]
channel.save()
for tag in tags:
if not Tag.objects.filter(name=tag).exists():
t = Tag(name=tag)
t.save()
channel.tags.add(Tag.objects.get(name=tag))
channel.save()
chatroomname = '{}-chatroom'.format(name)
chatroom = ChatRoom(created_by=current_user_profile, name=chatroomname)
chatroom.save()
chatroom.members.add(current_user_profile)
chatroom.save()
channel.chatroom = chatroom
channel.save()
return CreateChannel(
channel,
success=True
)
class DeleteChannel(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name of channel to be deleted")
success = graphene.Boolean(default_value=False, description="Returns whether the channel was deleted successfully.")
def mutate(self, info, name):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to delete on posts!')
else:
Channel.objects.get(name=name).delete()
return DeleteChannel(
success=True
)
class ChannelChangeDescription(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name for channel to be edited")
description = graphene.String(required=True, description="New description")
success = graphene.Boolean(default_value=False, description="Returns whether the description was changed successfully.")
def mutate(self, info, name, description):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to change channel description!')
else:
channel = Channel.objects.get(name=name)
channel.description = description
channel.save()
return ChannelChangeDescription(
success=True
)
class ChannelChangeCoverImage(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name for channel to be edited")
image = graphene.String(required=True, description="New image")
success = graphene.Boolean(default_value=False, description="Returns whether the cover image was changed successfully.")
def mutate(self, info, name, image):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to change channel cover image!')
else:
channel = Channel.objects.get(name=name)
channel.cover_image = info.context.FILES[image]
channel.save()
return ChannelChangeCoverImage(
success=True
)
class ChannelChangeAvatarImage(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name for channel to be edited")
image = graphene.String(required=True, description="New image")
success = graphene.Boolean(default_value=False, description="Returns whether the avatar image was changed successfully.")
def mutate(self, info, name, image):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to change channel avatar image!')
else:
channel = Channel.objects.get(name=name)
channel.avatar = info.context.FILES[image]
channel.save()
return ChannelChangeCoverImage(
success=True
)
class ChannelSubscription(graphene.Mutation):
class Arguments:
name = graphene.ID(required=True, description="Unique name of Channel to be change membership")
modifier = ModifierEnumsType(required=True, description="Add or remove")
success = graphene.Boolean(default_value=False, description="Returns whether the post was upvoted successfully.")
def mutate(self, info, name, modifier):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to add/remove channel memberships!')
else:
channel = Channel.objects.get(name=name)
current_user_profile = Profile.objects.get(user=info.context.user)
if modifier == ModifierEnumsType.ADD:
if not channel.subscribers.filter(user=current_user_profile).exists():
channel.subscribers.add(current_user_profile)
channel.chatroom.members.add(current_user_profile)
channel.chatroom.save()
channel.save()
if modifier == ModifierEnumsType.REMOVE:
if channel.subscribers.filter(user=current_user_profile).exists():
channel.subscribers.remove(current_user_profile)
channel.chatroom.members.remove(current_user_profile)
channel.chatroom.save()
channel.save()
return ChannelSubscription(
success=True
)
class CreateGame(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Name of Game.")
channel = graphene.String(required=True, description="Name of Channel.")
description = graphene.String(default_value="", description="Description of the Game.")
game_image = graphene.String(default_value="", description="Game image media for Game.")
tags = graphene.List(graphene.String, description="List of tags asscoiated with the Game.")
posts = graphene.List(graphene.ID, required=True, description="List of post_id to be added to the Game.")
game = graphene.Field(GameType, description="Returns the new game that was created successfully.")
success = graphene.Boolean(default_value=False, description="Returns whether the game was created successfully.")
def mutate(self, info, name, description, game_image, channel, tags=[], posts=[]):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to create game!')
else:
current_user_profile = Profile.objects.get(user=info.context.user)
channel = Channel.objects.get(name=channel)
if not channel.subscribers.filter(user=current_user_profile).exists():
raise GraphQLError('You must be suscribed to channel to add games!')
if Game.objects.filter(name=name, channel=channel).exists():
raise GraphQLError('Game with same name exists in Channel. PLease try another name!')
game = Game(name=name, channel=channel, description=description, creator=current_user_profile)
game.save()
game.subscribers.add(current_user_profile)
game.save()
if game_image != "":
game.image = image=info.context.FILES[game_image]
game.save()
for tag in tags:
if not Tag.objects.filter(name=tag).exists():
t = Tag(name=tag)
t.save()
game.tags.add(Tag.objects.get(name=tag))
game.save()
for post_id in posts:
post = Post.objects.get(post_id=post_id)
game.posts.add(post)
game.save()
return CreateGame(
game,
success=True
)
class DeleteGame(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name of game to be deleted")
success = graphene.Boolean(default_value=False, description="Returns whether the game was deleted successfully.")
def mutate(self, info, name):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to delete game!')
else:
Game.objects.get(name=name).delete()
return DeleteGame(
success=True
)
class GameChangeDescription(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name for game to be edited")
description = graphene.String(required=True, description="New description")
success = graphene.Boolean(default_value=False, description="Returns whether the description was changed successfully.")
def mutate(self, info, name, description):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to change game description!')
else:
game = Game.objects.get(name=name)
game.description = description
game.save()
return GameChangeDescription(
success=True
)
class GameChangeImage(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name for game to be edited")
image = graphene.String(required=True, description="New image")
success = graphene.Boolean(default_value=False, description="Returns whether the image was changed successfully.")
def mutate(self, info, name, image):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to change game image!')
else:
game = Channel.objects.get(name=name)
game.image = info.context.FILES[image]
game.save()
return GameChangeImage(
success=True
)
class AddGamePosts(graphene.Mutation):
class Arguments:
name = graphene.String(required=True, description="Unique name of Game to be add post too")
post_id = graphene.ID(required=True, description="Unique ID for post to be added")
original_post_id = graphene.ID(required=True, description="Unique ID for the original post")
success = graphene.Boolean(default_value=False, description="Returns whether the post was added successfully.")
def mutate(self, info, name, post_id, original_post_id):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to add posts to games!')
else:
game = Game.objects.get(name=name)
current_user_profile = Profile.objects.get(user=info.context.user)
if not game.channel.subscribers.filter(user=current_user_profile).exists():
raise GraphQLError('You must be suscribed to channel to add post to game!')
if not game.subscribers.filter(user=current_user_profile).exists():
raise GraphQLError('You must be suscribed to game to add post to game!')
post = Post.objects.get(post_id=post_id)
original_post = Post.objects.get(post_id=original_post_id)
if post.author != current_user_profile:
raise GraphQLError('You must be post author to add post to game!')
validate_post = ValidatePost(game=Game.objects.get(name=name), post=post, channel=game.channel, creator_post=original_post)
validate_post.save()
game.save()
return RemoveGamePosts(
success=True
)
class RemoveGamePosts(graphene.Mutation):
class Arguments:
name = graphene.ID(required=True, description="Unique name of Game to be remove post from")
post_id = graphene.ID(required=True, description="Unique ID for post to be removed")
success = graphene.Boolean(default_value=False, description="Returns whether the post was removed successfully.")
def mutate(self, info, name, post_id):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to remove posts to games!')
else:
game = Game.objects.get(name=name)
current_user_profile = Profile.objects.get(user=info.context.user)
if not game.channel.subscribers.filter(user=current_user_profile).exists():
raise GraphQLError('You must be suscribed to channel to remove post to game!')
post = Post.objects.get(post_id=post_id)
if post.author != current_user_profile:
raise GraphQLError('You must be post author to remove post to game!')
game.posts.remove(post)
game.save()
return RemoveGamePosts(
success=True
)
class EditChannelTags(graphene.Mutation):
class Arguments:
name = graphene.ID(required=True, description="Unique name of Channel to be change tags")
modifier = ModifierEnumsType(required=True, description="Add or remove")
tags = graphene.List(graphene.String, required=True, description="List of tags of to be/removed in Channel.")
success = graphene.Boolean(default_value=False, description="Returns whether the Channel was edited successfully.")
def mutate(self, info, name, modifier, tags):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to edit channels!')
else:
channel = Channel.objects.get(name=name)
if modifier == ModifierEnumsType.ADD:
for tag in tags:
if not Tag.objects.filter(name=tag).exists():
t = Tag(name=tag)
t.save()
channel.tags.add(Tag.objects.get(name=tag))
channel.save()
if modifier == ModifierEnumsType.REMOVE:
for tag in tags:
if Tag.objects.filter(name=tag).exists():
channel.tags.remove(Tag.objects.get(name=tag))
channel.save()
return EditChannelTags(
success=True
)
class EditGameTags(graphene.Mutation):
class Arguments:
name = graphene.ID(required=True, description="Unique name of game to be change tags")
modifier = ModifierEnumsType(required=True, description="Add or remove")
tags = graphene.List(graphene.String, required=True, description="List of tags of to be/removed in game.")
success = graphene.Boolean(default_value=False, description="Returns whether the game was edited successfully.")
def mutate(self, info, name, modifier, tags):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to edit game!')
else:
game = Game.objects.get(name=name)
if modifier == ModifierEnumsType.ADD:
for tag in tags:
if not Tag.objects.filter(name=tag).exists():
t = Tag(name=tag)
t.save()
game.tags.add(Tag.objects.get(name=tag))
game.save()
if modifier == ModifierEnumsType.REMOVE:
for tag in tags:
if Tag.objects.filter(name=tag).exists():
game.tags.remove(Tag.objects.get(name=tag))
game.save()
return EditGameTags(
success=True
)
class ValidatePostMutationMethod(graphene.Mutation):
class Arguments:
post_id = graphene.ID(required=True, description="post_id in Game to be validated")
game = graphene.String(required=True, description="Unique name for game in which post to be validated")
modifier = ValidatorEnumsType(required=True, description="Accept or Reject")
success = graphene.Boolean(default_value=False, description="Returns whether the post was validated successfully.")
def mutate(self, info, post_id, game, modifier):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to add posts to games!')
else:
game = Game.objects.get(name=game)
current_user_profile = Profile.objects.get(user=info.context.user)
if not game.channel.subscribers.filter(user=current_user_profile).exists():
raise GraphQLError('You must be suscribed to channel to validate posts for game!')
post = Post.objects.get(post_id=post_id)
validate_post = ValidatePost.objects.get(game=game, post=post)
if modifier == ValidatorEnumsType.ACCEPT:
game.posts.add(post)
game.save()
post.author.points = post.author.points + 100
post.author.save()
current_user_profile.points = current_user_profile.points + 50
current_user_profile.save()
validate_post.delete()
post_added_to_game.send(sender=self.__class__, post_author = post.author.user.username, gamename=game.name, channelname=game.channel.name)
if modifier == ValidatorEnumsType.REJECT:
validate_post.delete()
current_user_profile.points = current_user_profile.points + 200
current_user_profile.save()
game.save()
return ValidatePostMutationMethod(
success=True
)
class GameSubscription(graphene.Mutation):
class Arguments:
channel = graphene.String(required=True, description="Unique name of Channel of game")
game = graphene.String(required=True, description="Unique name of game to be change membership")
modifier = ModifierEnumsType(required=True, description="Add or remove")
success = graphene.Boolean(default_value=False, description="Returns whether the post was upvoted successfully.")
def mutate(self, info, channel, game, modifier):
if not info.context.user.is_authenticated:
raise GraphQLError('You must be logged to add/remove channel memberships!')
else:
channelobj = Channel.objects.get(name=channel)
game = Game.objects.filter(channel=channelobj, name=game)[0]
current_user_profile = Profile.objects.get(user=info.context.user)
if modifier == ModifierEnumsType.ADD:
if not game.subscribers.filter(user=current_user_profile).exists():
game.subscribers.add(current_user_profile)
game.save()
if modifier == ModifierEnumsType.REMOVE:
if game.subscribers.filter(user=current_user_profile).exists():
game.subscribers.remove(current_user_profile)
game.save()
return GameSubscription(
success=True
)
class ChannelMutation(graphene.ObjectType):
create_channel = CreateChannel.Field()
delete_channel = DeleteChannel.Field()
channel_change_description = ChannelChangeDescription.Field()
channel_change_cover_image = ChannelChangeCoverImage.Field()
channel_change_avatar_image = ChannelChangeAvatarImage.Field()
channel_subscription = ChannelSubscription.Field()
edit_channel_tags = EditChannelTags.Field()
class GameMutation(graphene.ObjectType):
create_game = CreateGame.Field()
delete_game = DeleteGame.Field()
game_change_description = GameChangeDescription.Field()
game_change_image = GameChangeImage.Field()
game_add_post = AddGamePosts.Field()
game_remove_post = RemoveGamePosts.Field()
edit_game_tags = EditGameTags.Field()
game_subscription = GameSubscription.Field()
class ValidatePostMutation(graphene.ObjectType):
validate_post = ValidatePostMutationMethod.Field()
| StarcoderdataPython |
8116530 | """PUMA Noise simulator
Follows https://arxiv.org/abs/1810.09572.
Includes a general RadioTelescope class that defines a telescope in terms
of various dish, packing, and instrumental noise properties, as well as
instances of this class for the full and petite configurations of PUMA
(see https://arxiv.org/abs/1907.12559).
All spatial units are Mpc, not Mpc/h !!
"""
import numpy as np
from .castorina import castorinaBias,castorinaPn
import pyccl as ccl
class RadioTelescope:
"""Class for computing signal and noise properties of a radio telescope.
Uses signal and noise models from Appendices B and D of the Cosmic
Visions 21cm white paper, https://arxiv.org/pdf/1810.09572v3.pdf.
Attributes
----------
C : ccl.Cosmology class
CCL class defining the background cosmology.
Nside : int, optional
Number of receivers per side of square array (default: 256)
D : float, optional
Physical diameter of dishes, in m (default: 6)
tint : float, optional
Integration time of survey, in y (default: 5)
fsky : float, optional
Observed sky fraction (default: 0.5)
effic : float, optional
Dish aperture efficiency factor, such that the effective
dish area is A_eff = effic * A_phys (default: 0.7)
Tampl : float, optional
Amplifier noise temperature, in K (default: 50)
Tground : float, optional
Ground temperature, in K (default: 300)
omtcoupling : float, optional
Optical efficiency of receivers, which boosts the effective
Tampl by 1/omtcoupling (default: 0.9)
skycoupling : float, optional
Coupling of the primary beam to the sky, such that a fraction
(1-skycoupling) of the beam hits the ground instead of the sky
(default: 0.9)
hexpack : bool, optional
True if dishes are hex-packed, False if they are square-packed
(default: True)
"""
def __init__ (self,C,Nside=256, D=6, tint=5, fsky=0.5, effic=0.7, Tampl=50., Tground=300., omtcoupling=0.9, skycoupling=0.9, hexpack=True):
# CCL cosmology class
self.C=C
# Number of dishes per array side
self.Nside=Nside
# Total number of dishes
self.Nd=Nside**2
# Roughly, maximum baseline length in square array
self.Dmax=Nside*np.sqrt(2)*D # m
# Physical dish diameter
self.D=D # m
# Effective dish diameter
self.Deff=self.D*np.sqrt(effic) # m
# Total integration time
self.ttotal=tint*365*24*3600 # s
# Sky area
self.Sarea=4*np.pi*fsky # sr
# Sky fraction
self.fsky=fsky
# Effective dish area
self.Ae=np.pi/4*D**2*effic # m^2
# Contribution to system temperature from amplifier and groundspill
# (Eq. D1 in paper)
self.Tscope=Tampl/omtcoupling/skycoupling+Tground*(1-skycoupling)/skycoupling # K
# Hex packing setting
self.hexpack=hexpack
def nofl(self,x):
"""Number density of baselines on the ground.
Parameters
----------
x : float or array
Baseline length(s), in m.
Returns
-------
res : float or array
Number density of baselines of given length(s), in m^-2.
"""
### quadratic packing
if (not self.hexpack):
### square packing
a,b,B,C,D=0.4847, -0.330, 1.3157, 1.5975, 6.8390
else:
### hexagonal packing
a,b,B,C,D=0.56981864, -0.52741196, 0.8358006 , 1.66354748, 7.31776875
# Scale physical distances by Nside*D
xn=np.asarray(x)/(self.Nside*self.D)
# Fitting function prefactor
n0=(self.Nside/self.D)**2 # m^-2
# Fitting formula evaluation
res=np.asarray( n0*(a+b*xn)/(1+B*xn**C)*np.exp(-(xn)**D) ) # m^-2
# Impose numerical floor on result
if (res.shape == ()):
res = np.max([res,1e-10])
else:
res[res<1e-10]=1e-10
return res
def PNoise(self,z,kperp):
"""Thermal noise power spectrum.
Parameters
----------
z : float
Redshift.
kperp : float or array
kperp value(s), in Mpc^-1.
Returns
-------
Pn : float or array
Thermal noise power spectrum, in K^2 Mpc^3.
"""
# Observed wavelength
lam=0.21*(1+z) # m
# Comoving radial distance to redshift z
r=ccl.comoving_radial_distance(self.C,1/(1.+z)) # Mpc
# Conversion between kperp and uv-plane (vector norm) u
u=np.asarray(kperp)*r/(2*np.pi)
# Baseline length corresponding to u
l=u*lam # m
# Number density of baselines in uv plane
Nu = self.nofl(l)*lam**2
# Inaccurate approximation for uv-plane baseline density
#umax=self.Dmax/lam
#Nu=self.Nd**2/(2*np.pi*umax**2)
# Field of view of single dish
FOV=(lam/self.Deff)**2 # sr
# Hubble parameter H(z)
Hz=self.C['H0']*ccl.h_over_h0(self.C,1./(1.+z)) # km s^-1 Mpc^-1
# Conversion factor from frequency to physical space
y=3e5*(1+z)**2/(1420e6*Hz) # Mpc s
# System temperature (sum of telescope and sky temperatures)
Tsys=self.Tsky(1420./(1+z))+self.Tscope # K
# 21cm noise power spectrum (Eq. D4 of paper).
# Hard-codes 2 polarizations
Pn=Tsys**2*r**2*y*(lam**4/self.Ae**2)* 1/(2*Nu*self.ttotal) * (self.Sarea/FOV) # K^2 Mpc^3
# Catastrophically fail if we've gotten negative power spectrum values
if np.any(Pn<0):
print (Nu,Pn,l, self.nofl(l), self.nofl(l/2))
stop()
return Pn
def PNoiseShot(self,z,Tb):
"""21cm shot noise power spectrum.
Parameters
----------
z : float
Redshift.
Tb : float
Mean 21cm brightness temperature (your choice of units).
Returns
-------
pn : float or array
Shot noise power spectrum, in Mpc^3 times square of input Tb units.
"""
return Tb**2*castorinaPn(z)/(self.C['h'])**3
def PNoiseKFull(self,z,kperp,kpar, Tb=None,kparcut=0.01*0.7):
"""Full 21cm noise power spectrum, with specified kpar cut
Parameters
----------
z : float
Redshift.
kperp : array[nkpar,nkperp]
2d array where columns are kperp values (in Mpc^-1) and rows are identical.
Generate e.g. with np.outer(np.ones(nkpar),kperp_vec) where kperp_vec
is a list of kperp values.
kpar : array[nkpar,nkperp]
2d array where rows are kpar values (in Mpc^-1) and columns are identical.
Tb : float, optional
Mean 21cm brightness temperature, in K (default: computed automatically).
kparcut : float, optional
Set Pnoise to large value if kpar<kparcut, in Mpc^-1 (default: 0.007).
Returns
-------
Pn : array[nkpar,nkperp]
Array of sums of 21cm thermal noise and shot noise power spectra, in K^2 Mpc^3.
"""
assert(len(kperp.shape)==2)
assert(len(kpar.shape)==2)
if Tb is None:
Tb=self.Tb(z)
Pn=self.PNoise(z,kperp)+self.PNoiseShot(z,Tb)
Pn[kpar<kparcut]=1e30
return Pn
def PHINoiseKFull(self,z,kperp,kpar, Tb=None,kparcut=0.01*0.7):
"""Full HI noise power spectrum.
This has units of Mpc^3, and is therefore the noise power spectrum
of P_HI(k), incorporating both HI shot noise and telescope thermal
noise.
Input parameters are same as PNoiseKFull.
Returns
-------
Pn : array[nkpar,nkperp]
Array of effective HI noise power spectrum, in Mpc^3.
"""
if Tb is None:
Tb = self.Tb(z)
return self.PNoiseKFull(z,kperp,kpar,Tb=Tb,kparcut=kparcut) / Tb**2
def bias(self,z):
"""HI bias with redshift.
Parameters
----------
z : float or array
Redshift(s).
Returns
-------
b : float or array
b_HI(z) values.
"""
return castorinaBias(z)
def Tsky(self,f):
"""Mean sky temperature, including Galactic synchrotron and CMB.
Parameters
----------
f : float or array
Frequency or array of frequencies, in MHz.
Returns
-------
Tsky : float or array
Sky temperature(s), in K.
"""
#return (f/100.)**(-2.4)*2000+2.7 ## from CVFisher
return 25.*(np.asarray(f)/400.)**(-2.75) +2.75
def TbTZ(self,z):
"""Approximation for mean 21cm brightness temperature.
This is from Chang et al. 2008, https://arxiv.org/pdf/0709.3672.pdf,
Eq. 1.
Parameters
----------
z : float or array
Redshift(s).
Returns
-------
Tb : float or array
Temperature values, in K.
"""
OmegaM=0.31
z = np.asarray(z)
return 0.3e-3*np.sqrt((1+z)/(2.5)*0.29/(OmegaM+(1.-OmegaM)/(1+z)**3))
def Tb(self,z):
"""Approximation for mean 21cm brightness temperature.
This is reasonably up-to-date, and comes from Eq. B1
in the CV 21cm paper.
Parameters
----------
z : float or array
Redshift(s).
Returns
-------
Tb : float or array
Temperature value(s), in K.
"""
z = np.asarray(z)
Ez=ccl.h_over_h0(self.C,1./(1.+z))
# Note potentially misleading notation:
# Ohi = (comoving density at z) / (critical density at z=0)
Ohi=4e-4*(1+z)**0.6
Tb=188e-3*self.C['h']/Ez*Ohi*(1+z)**2
return Tb
def cutWedge(self, noise, kperp, kpar, z, NW=3.0):
"""Cut the foreground wedge from a 2d noise power spectrum.
Parameters
----------
noise : array[nkpar,nkperp]
2d noise power spectrum.
kperp : array[nkpar,nkperp]
2d array where columns are kperp values (in Mpc^-1) and rows are identical.
kpar : array[nkpar,nkperp]
2d array where rows are kpar values (in Mpc^-1) and columns are identical.
z : float
Redshift.
NW : float, optional
Multiplier defining wedge in terms of primary beam.
(default = 3)
Returns
-------
Pn : array[nkpar,nkperp]
2d noise power spectrum where modes within wedge have noise set to
large value.
"""
# Comoving radial distance to redshift z
r=ccl.comoving_radial_distance(self.C,1/(1.+z)) # Mpc
# Hubble parameter H(z)
H=self.C['H0']*ccl.h_over_h0(self.C,1./(1.+z)) # km s^-1 Mpc^-1
# Slope that defines wedge as kpar < kperp * slope.
# See Eq. C1 from the CV 21cm paper.
slope= r*H/3e5 * 1.22 *0.21/self.D * NW / 2.0 # dimensionless
# Boost noise for modes within wedge
noiseout=np.copy(noise)
noiseout[np.where(kpar<kperp*slope)]=1e30
return noiseout
def PSSensitivityTransit (self, freq=600, bandwidth=900):
"""One sigma point source transit sensitivity
Also prints some quantities for comparison: Tsys, t_eff
for the input telescope and CHIME, and the point source
sensitivity for CHIME.
Parameters
----------
freq : float, optional
Frequency, in MHz (default = 600).
bandwidth : float, optional
Bandwidth, in MHz (default = 900).
Returns
-------
onesigma : float
Point source sensitivity, in Jy.
"""
# Boltzmann constant
kB=1.38064852e-23 # J K^-1
# Observed wavelength
lam = 3e8/(freq*1e6) # m
# Total instrument collecting area
Acoll= self.Ae*self.Nd # m^2
# Dish field of view
FOV=(lam/self.Deff)**2 # m^2
# Effective transit times for specified telescope and CHIME (both in s)
teff=np.sqrt(FOV)/(2*np.pi*np.cos(30/180*np.pi))*24*3600 ## 30 deg south
teffchime=(lam/20)/(2*np.pi*np.cos(50/180*np.pi))*24*3600 ## 50 deg north
print ("Acoll*np.sqrt(teff*bandwidth*1e6)",Acoll * np.sqrt(2*teff*bandwidth*1e6))
# System temperature
Tsys = self.Tsky(freq)+self.Tscope # K
# One sigma sensitivity
onesigma= 2 * kB * Tsys / ( Acoll * np.sqrt(2*teff*bandwidth*1e6)) / 1e-26 ## to Jy
print ("Tsys",Tsys)
print ("teff=",teff,teffchime)
print ("CHIME:", 10* 2 *kB * Tsys / (0.7*80*100*np.sqrt(2*teffchime * 400e6))/1e-26)
return onesigma
class PUMA(RadioTelescope):
"""Specs for full PUMA telescope (see https://arxiv.org/pdf/1907.12559.pdf).
Survey time is given as 5/4 years to account for 0.5 filling factor.
"""
def __init__ (self,C):
RadioTelescope.__init__(self,C,Nside=256, D=6, tint=5/4, fsky=0.5, effic=0.7,
Tampl=50., Tground=300., omtcoupling=0.9, skycoupling=0.9, hexpack=True)
class PUMAPetite(RadioTelescope):
"""Specs for petite PUMA telescope (see https://arxiv.org/pdf/1907.12559.pdf).
Survey time is given as 5/4 years to account for 0.5 filling factor.
"""
def __init__ (self,C):
RadioTelescope.__init__(self,C,Nside=100, D=6, tint=5/4, fsky=0.5, effic=0.7,
Tampl=50., Tground=300., omtcoupling=0.9, skycoupling=0.9, hexpack=True)
class CHORD(RadioTelescope):
"""Specs for CHORD core array (see https://arxiv.org/pdf/1911.01777.pdf).
For simplicity, we use 23^2 = 529 instead of 512 dishes, taken to be
a square array. Table 1 in the CHORD white paper lists a 30K system
temperature, and elsewhere a nominal 5-year duration is quoted.
"""
def __init__ (self,C):
RadioTelescope.__init__(self, C, Nside=23, D=6, tint=5, fsky=0.5, effic=0.7,
Tampl=30., Tground=300., omtcoupling=0.9, skycoupling=0.9, hexpack=False)
class HIRAX(RadioTelescope):
"""Specs for HIRAX (see https://arxiv.org/pdf/2109.13755.pdf).
"""
def __init__ (self,C):
RadioTelescope.__init__(self, C, Nside=32, D=6, tint=4, fsky=0.5, effic=0.7,
Tampl=50., Tground=300., omtcoupling=0.9, skycoupling=0.9, hexpack=False)
| StarcoderdataPython |
1856477 | from django.test import TestCase
from django.test import override_settings
from mock import MagicMock
import mock
import datetime
import pam
import copy
from django.conf import settings
from tests.utilities.ldap import (
LdapTestCase,
build_mock_rcldap_user
)
from accounts.forms import (
AccountRequestVerifyUcbForm,
AccountRequestVerifyCsuForm,
)
from accounts.admin import AccountRequestAdminForm
from accounts.models import (
CuLdapUser,
CsuLdapUser,
AccountRequest
)
mock_cu_user_defaults = dict(
username = 'testuser',
first_name = 'Test',
last_name = 'User',
email = '<EMAIL>',
edu_affiliation = 'faculty'
)
mock_csu_user_defaults = dict(
username = 'testuser',
first_name = 'Test',
last_name = 'User',
email = '<EMAIL>',
)
class AccountRequestVerifyUcbFormTestCase(LdapTestCase):
def test_form_valid(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
mock_cu_user.authenticate.return_value = True
form_data = {
'username': 'testuser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
self.assertTrue(form.is_valid())
def test_form_invalid_bad_user(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
mock_cu_user.authenticate.return_value = True
form_data = {
'username': 'wronguser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',side_effect=[CuLdapUser.DoesNotExist]):
self.assertFalse(form.is_valid())
def test_form_invalid_bad_password(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
mock_cu_user.authenticate.return_value = False
form_data = {
'username': 'testuser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
self.assertFalse(form.is_valid())
def test_form_invalid_missing_fields(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
mock_cu_user.authenticate.return_value = True
form_data = {
'username': 'testuser',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
self.assertFalse(form.is_valid())
form_data = {
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
self.assertFalse(form.is_valid())
form_data = {
'username': 'testuser',
'password': '<PASSWORD>',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
self.assertFalse(form.is_valid())
def test_form_invalid_user_exists(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
mock_cu_user.authenticate.return_value = True
form_data = {
'username': 'testuser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user),mock.patch('accounts.models.RcLdapUser.objects.get_user_from_suffixed_username',return_value=mock_cu_user):
self.assertFalse(form.is_valid())
def test_form_invalid_accountrequest_exists(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
mock_cu_user.authenticate.return_value = True
ar_dict = {
'username': 'testuser',
'first_name': 'Test',
'last_name': 'User',
'email': '<EMAIL>',
'organization': 'ucb',
'role': 'faculty',
'department': 'physics',
}
ar = AccountRequest.objects.create(**ar_dict)
form_data = {
'username': 'testuser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyUcbForm(data=form_data)
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
self.assertFalse(form.is_valid())
class AccountRequestVerifyCsuFormTestCase(LdapTestCase):
def test_csu_form_valid(self):
mock_csu_user = mock.MagicMock(**mock_csu_user_defaults)
mock_csu_user.authenticate.return_value = True
form_data = {
'username': 'testuser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyCsuForm(data=form_data)
with mock.patch('accounts.models.CsuLdapUser.objects.get',return_value=mock_csu_user):
self.assertTrue(form.is_valid())
def test_csu_form_invalid_bad_creds(self):
mock_csu_user = mock.MagicMock(**mock_csu_user_defaults)
mock_csu_user.authenticate.return_value = False
form_data = {
'username': 'wronguser',
'password': '<PASSWORD>',
'role': 'faculty',
'department': 'physics',
}
form = AccountRequestVerifyCsuForm(data=form_data)
with mock.patch('accounts.models.CsuLdapUser.objects.get',return_value=mock_csu_user):
self.assertFalse(form.is_valid())
# This test case covers the functionality of the account request form
# provided in the admin interface.
class AccountRequestAdminFormTestCase(LdapTestCase):
def setUp(self):
super(AccountRequestAdminFormTestCase,self).setUp()
self.ar_dict = {
'organization': 'ucb',
'username': 'testuser',
'first_name': 'test',
'last_name': 'user',
'email': '<EMAIL>',
'login_shell': '/bin/bash',
'status': 'p'
}
def test_form_valid_create_approve_request(self):
mock_cu_user = mock.MagicMock(**mock_cu_user_defaults)
form_data = {
'organization': 'ucb',
'username': 'newtestuser',
'first_name': 'test',
'last_name': 'user',
'email': '<EMAIL>',
'role': 'faculty',
'department': 'physics',
'login_shell': '/bin/bash',
'status': 'p'
}
with mock.patch('accounts.models.CuLdapUser.objects.get',return_value=mock_cu_user):
form = AccountRequestAdminForm(data=form_data)
self.assertTrue(form.is_valid())
ar = AccountRequest.objects.create(**form_data)
form_data['status'] = 'a'
form = AccountRequestAdminForm(data=form_data,instance=ar)
self.assertTrue(form.is_valid())
def test_form_valid_request_modified(self):
ar = AccountRequest.objects.create(**self.ar_dict)
form_data = copy.deepcopy(self.ar_dict)
form_data['role'] = 'faculty'
form = AccountRequestAdminForm(data=form_data,instance=ar)
self.assertTrue(form.is_valid())
@mock.patch('accounts.models.RcLdapUser.objects.filter',return_value=[build_mock_rcldap_user(organization='ucb')])
def test_form_invalid_approval_account_exists(self,mock_get):
ar = AccountRequest.objects.create(**self.ar_dict)
form_data = copy.deepcopy(self.ar_dict)
form_data['username'] = 'testuser'
form_data['status'] = 'a'
form = AccountRequestAdminForm(data=form_data,instance=ar)
self.assertFalse(form.is_valid())
| StarcoderdataPython |
3206939 | <reponame>haomingw/xmchat
def hello():
print("Hi xmchat!")
| StarcoderdataPython |
6579580 | """*****************************************************************************
* Copyright (C) 2019 Microchip Technology Inc. and its subsidiaries.
*
* Subject to your compliance with these terms, you may use Microchip software
* and any derivatives exclusively with Microchip products. It is your
* responsibility to comply with third party license terms applicable to your
* use of third party software (including open source software) that may
* accompany Microchip software.
*
* THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
* EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
* WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
* INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
* WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
* BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
* FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
* ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
* THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
*****************************************************************************"""
import os
_CALIB_SUPPORTED_DEVICES = ['ATECC108A', 'ATECC508A', 'ATECC608', 'ATSHA204A']
_TALIB_SUPPORTED_DEVICES = ['TA100']
def loadModule():
cryptoAuthLib = Module.CreateSharedComponent("cryptoauthlib", "Core", "/Libraries/Cryptoauthlib", "/harmony/config/cryptoauthlib.py")
cryptoAuthLib.setDisplayType("Crypto Authentication Library")
cryptoAuthLib.addCapability("CAL_LIB_CAP", "CA_LIB", True)
cryptoAuthLib.addDependency("FreeRTOS", "RTOS", True, False)
cryptoAuthLib.addDependency("WolfSSL_Crypto_Dependency", "LIB_WOLFCRYPT", None, False, False)
cryptoAuthLibTng = Module.CreateSharedComponent("cryptoauthlib_tng", "Trust&Go", "/Libraries/Cryptoauthlib", "/harmony/config/tng.py")
cryptoAuthLibTng.setDisplayType("TNGTLS & TNGLORA Certificates")
cryptoAuthLibTng.addDependency("CAL_LIB_CAP", "CA_LIB", True, False)
cryptoAuthLibPkcs11 = Module.CreateSharedComponent("cryptoauthlib_pkcs11", "PKCS11", "/Libraries/Cryptoauthlib", "/harmony/config/pkcs11.py")
cryptoAuthLibPkcs11.setDisplayType("PKCS#11 Interface")
cryptoAuthLibPkcs11.addDependency("CAL_LIB_CAP", "CA_LIB", True, False)
cryptoAuthLibTest = Module.CreateSharedComponent("cryptoauthlib_test", "Tester", "/Libraries/Cryptoauthlib", "/harmony/config/test_app.py")
cryptoAuthLibTest.setDisplayType("Library Testing Application")
cryptoAuthLibTest.addDependency("CAL_LIB_CAP", "CA_LIB", True, False)
for dev in _CALIB_SUPPORTED_DEVICES:
comp = Module.CreateGeneratorComponent(dev.lower(), dev, "/Harmony/Drivers/Crypto", "/harmony/config/device_common.py", "/harmony/config/device_instance.py")
comp.addDependency("cryptoauthlib", "CA_LIB", True, False)
comp.addMultiDependency('{}_DEP_PLIB_I2C'.format(dev.upper()), 'I2C', 'I2C', True)
if os.path.exists(Module.getPath() + 'lib/talib/talib_basic.h'):
for dev in _TALIB_SUPPORTED_DEVICES:
comp = Module.CreateGeneratorComponent(dev.lower(), dev, "/Harmony/Drivers/Crypto", "/harmony/config/device_common.py", "/harmony/config/device_instance.py")
comp.addDependency("cryptoauthlib", "CA_LIB", True, False)
comp.addMultiDependency('{}_DEP_PLIB_I2C'.format(dev.upper()), 'I2C', 'I2C', False)
comp.addMultiDependency('{}_DEP_PLIB_SPI'.format(dev.upper()), 'SPI', 'SPI', False)
| StarcoderdataPython |
5116637 | <filename>hash-tables/ex1/ex1.py<gh_stars>0
# Hint: You may not need all of these. Remove the unused functions.
from hashtables import (HashTable,
hash_table_insert,
hash_table_retrieve)
def get_indices_of_item_weights(weights, length, limit):
ht = HashTable(16)
found = False
for i in range(0, length):
comp = limit - weights[i]
comp_index = hash_table_retrieve(ht, comp)
if comp_index is not None:
found = True
if comp_index > i:
return [comp_index, i]
else:
return [i, comp_index]
else:
hash_table_insert(ht, weights[i], i)
if not found:
return None
def print_answer(answer):
if answer is not None:
print(str(answer[0] + " " + answer[1]))
else:
print("None")
| StarcoderdataPython |
11270196 | # coding: utf-8
"""
Created on 24.11.2011
@author: prefer
"""
from abc import ABCMeta, abstractmethod
class IReport(object):
def show(self, *args, **kwargs):
"""
Deprecated: use build
"""
self.build(*args, **kwargs)
@abstractmethod
def build(self, *args, **kwargs):
u"""
Построение отчета
"""
class IDocumentReport(IReport):
__metaclass__ = ABCMeta
@abstractmethod
def build(self, dst_file_path, params, file_type):
"""
Генерирует выходной файл в нужном формате
:param dst_file_path: путь до выходного файла
:type dst_file_path: str
:param params: словарь ключ: параметр в шаблоне,
значение: заменяющая строка
:type params: dict
:param file_type: тип файла
:type file_type: str
"""
@abstractmethod
def get_all_parameters(self):
u"""
Возвращает все параметры документа
"""
class ISpreadsheetReport(IReport):
__metaclass__ = ABCMeta
@abstractmethod
def get_sections(self):
u"""
Возвращает все секции
"""
@abstractmethod
def get_section(self, section_name):
"""
Возвращает секцию по имени
:param section_name: имя секции
:type section_name: str
"""
@abstractmethod
def build(self, dst_file_path, file_type):
"""
Генерирует выходной файл в нужном формате
:param dst_file_path: путь до выходного файла
:type dst_file_path: str
:param file_type: тип файла
:type file_type: str
"""
class ISpreadsheetSection(object):
__metaclass__ = ABCMeta
# Тип разворота секции
VERTICAL = 0
HORIZONTAL = 1
RIGHT_UP = 2
LEFT_DOWN = 3
RIGHT = 4
HIERARCHICAL = 5
@abstractmethod
def flush(self, params, oriented=LEFT_DOWN):
"""
Записать данные в секцию
:param params: словарь параметров
:type params: dict
:param oriented: направление вывода секций
:type oriented: int
"""
@abstractmethod
def get_all_parameters(self):
u"""
Возвращает все параметры секции
"""
| StarcoderdataPython |
3402215 | """
Decision variables definition.
Decision variables are variables for which optimal values (in certain criteria) are searched.
Decision variables are main part of optimization problem model.
"""
__all__ = ["DecisionVariable", "IntegerVariable", "DiscreteVariable", "FloatVariable", "ChoiceVariable"]
from typing import Any, Union, Dict, Iterable
from abc import ABC, abstractmethod
from optimization.utilities.random_values import generate_random_int, generate_random_float, choose_random_value
class DecisionVariable(ABC):
"""Abstract definition of decision variable."""
@abstractmethod
def generate_random_value(self) -> Any:
""":return: Random value according to this Decision Variable definition."""
...
@abstractmethod
def is_proper_value(self, value: Any) -> bool:
""":return: True if value is compatible with this Decision Variable definition, False otherwise."""
...
@abstractmethod
def get_log_data(self) -> Dict[str, Any]:
"""
Gets data for logging purposes.
:return: Dictionary with this Decision Variable crucial data.
"""
...
class IntegerVariable(DecisionVariable):
"""
Integer Decision Variable definition.
This class is Decision Variable type of variable that can only take integer value within given range with step 1.
"""
def __init__(self, min_value: int, max_value: int) -> None:
"""
Creates definition of Integer Decision Variable.
:param min_value: Minimal value that this variable can store.
:param max_value: Maximal value that this variable can store.
:raise TypeError: Parameter 'min_value' or 'max_value' is not int type.
:raise ValueError: Value of parameter 'min_value' is greater or equal value of 'max_value'.
"""
if not isinstance(min_value, int):
raise TypeError(f"Value of 'min_value' parameter is not int type. Actual value: '{min_value}'.")
if not isinstance(max_value, int):
raise TypeError(f"Value of 'max_value' parameter is not int type. Actual value: '{max_value}'.")
if min_value >= max_value:
raise ValueError(f"Value of 'min_value' parameter is not less than value of 'max_value' parameter. "
f"Actual values: min_value={min_value}, max_value={max_value}.")
self.min_value = min_value
self.max_value = max_value
def generate_random_value(self) -> int:
""":return: Random value according to this Integer Variable definition."""
return generate_random_int(self.min_value, self.max_value)
def is_proper_value(self, value: Any) -> bool:
""":return: True if value is compatible with this Integer Variable definition, False otherwise."""
return isinstance(value, int) and self.min_value <= value <= self.max_value
def get_log_data(self) -> Dict[str, Union[str, int]]:
"""
Gets data for logging purposes.
:return: Dictionary with this Integer Variable crucial data.
"""
return {
"type": self.__class__.__name__,
"min_value": self.min_value,
"max_value": self.max_value,
}
class DiscreteVariable(DecisionVariable):
"""
Discrete Decision Variable definition.
This class is Decision Variable type of variable that can take integer and/or float value within given range with
given step. Examples:
- odd integer in inclusive range 1-99 (1, 3, 5, ..., 97, 99):
min_value=1, max_value=99, step=2
- even integer in inclusive range 2-10 (2, 4, 6, 8, 10):
min_value=2, max_value=10, step=2
- one of values from arithmetic sequence 0, 0.1, 0.2, ..., 9.9, 10:
min_value=0, max_value=10, step=0.1
Note: If step==1 and min_value is int type, then 'IntegerVariable' can be used instead.
!WARNING! This variable has precision issues, you can use 'IntegerVariable' and update objective function to
properly recalculate the result.
"""
def __init__(self, min_value: Union[int, float], max_value: Union[int, float], step: Union[int, float]) -> None:
"""
Creates definition of Discrete Decision Variable.
Possible value are equal: [min_value] + [i]*[step]
where [i] such that: [min_value] + [i]*[step] <= [max_value]
:param min_value: Minimal value that this variable can store.
:param max_value: Maximal value that this variable can store.
:param step: Difference between following possible values.
:raise TypeError: Parameter 'min_value' or 'max_value' is not int or float type.
:raise ValueError: Value of parameter 'min_value' is greater or equal value of 'max_value'
or 'step' is lower equal 0.
"""
if not isinstance(min_value, (int, float)):
raise TypeError(f"Value of 'min_value' parameter is not int nor float type. Actual value: '{min_value}'.")
if not isinstance(max_value, (int, float)):
raise TypeError(f"Value of 'max_value' parameter is not int nor float type. Actual value: '{max_value}'.")
if not isinstance(step, (int, float)):
raise TypeError(f"Value of 'step' parameter is not int nor float type. Actual value: '{step}'.")
if min_value >= max_value:
raise ValueError(f"Value of 'min_value' parameter is not less than value of 'max_value' parameter. "
f"Actual values: min_value={min_value}, max_value={max_value}.")
if step <= 0:
raise ValueError(f"Value of 'step' parameter less or equal 0. Actual value: {step}.")
self.min_value = min_value
self.max_value = max_value
self.step = step
self._max_rand = int((self.max_value - self.min_value) // self.step)
def generate_random_value(self) -> Union[int, float]:
""":return: Random value according to this Discrete Variable definition."""
return self.min_value + generate_random_int(0, self._max_rand)*self.step
def is_proper_value(self, value: Any) -> bool:
""":return: True if value is compatible with this Discrete Variable definition, False otherwise."""
if isinstance(value, (int, float)) and self.min_value <= value <= self.max_value:
_rest = (value - self.min_value) % self.step
return round(_rest, 15) in {self.step, 0.}
return False
def get_log_data(self) -> Dict[str, Union[str, float, int]]:
"""
Gets data for logging purposes.
:return: Dictionary with this Discrete Variable crucial data.
"""
return {
"type": self.__class__.__name__,
"min_value": self.min_value,
"max_value": self.max_value,
"step": self.step,
}
class FloatVariable(DecisionVariable):
"""
Float Decision Variable definition.
This class is Decision Variable type of variable that can only take float value within given range.
"""
def __init__(self, min_value: float, max_value: float) -> None:
"""
Creates definition of Float Decision Variable.
:param min_value: Minimal value that this variable can store.
:param max_value: Maximal value that this variable can store.
:raise TypeError: Parameter 'min_value' or 'max_value' is not float type.
:raise ValueError: Value of parameter 'min_value' is greater equal value of 'max_value'.
"""
if not isinstance(min_value, float):
raise TypeError(f"Value of 'min_value' parameter is not float type. Actual value: '{min_value}'.")
if not isinstance(max_value, float):
raise TypeError(f"Value of 'max_value' parameter is not float type. Actual value: '{max_value}'.")
if min_value >= max_value:
raise ValueError(f"Value of 'min_value' parameter is not less than value of 'max_value' parameter. "
f"Actual values: min_value={min_value}, max_value={max_value}.")
self.min_value = min_value
self.max_value = max_value
def generate_random_value(self) -> float:
""":return: Random value according to this Float Variable definition."""
return generate_random_float(self.min_value, self.max_value)
def is_proper_value(self, value: Any) -> bool:
""":return: True if value is compatible with this Float Variable definition, False otherwise."""
return isinstance(value, float) and self.min_value <= value <= self.max_value
def get_log_data(self) -> Dict[str, Union[str, float]]:
"""
Gets data for logging purposes.
:return: Dictionary with this Integer Variable crucial data.
"""
return {
"type": self.__class__.__name__,
"min_value": self.min_value,
"max_value": self.max_value,
}
class ChoiceVariable(DecisionVariable):
"""
Choice Decision Variable definition.
This class is Decision Variable type of variable that can take any (hashable) value from given iterable.
"""
def __init__(self, possible_values: Iterable[Any]) -> None:
"""
Creates definition of Choice Decision Variable.
:param possible_values: Iterable with possible values to set for this Decision Variable.
"""
self.possible_values = set(possible_values)
def generate_random_value(self) -> Any:
""":return: Random value according to this Choice Variable definition."""
return choose_random_value(self.possible_values)
def is_proper_value(self, value: Any) -> bool:
""":return: True if value is compatible with this Choice Variable definition, False otherwise."""
return value in self.possible_values
def get_log_data(self) -> Dict[str, str]:
"""
Method which prepares data of the instance of this class for logging.
:return: Crucial data of this object.
"""
return {
"type": self.__class__.__name__,
"possible_values": ", ".join([repr(value) for value in self.possible_values]),
}
| StarcoderdataPython |
8145374 | import unittest
from datainput import data_load
import pandas as pd
class HelloworldTests(unittest.TestCase):
def test_get_helloworld(self):
data = pd.read_csv(r"C:\Users\Dell\Niologic\Seattle_Real_Time_Fire_911_Calls-v.csv")
self.assertEqual(data_load(data), 'Data loaded')
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
9766600 | import PIL
import math
MAX = 4*10**6
# Problem 2 Even Fibonnacci numbers
fib = [1, 1]
k = 1
while (True):
n = fib[k] + fib[k-1]
if(n > MAX):
break
else:
fib.append(n)
k += 1
c = 0
for i in fib:
num = str(i)
print(num + " ", end='')
c += 1
if(c % 10 == 0):
print()
print()
print("The sum is: " + str(sum(fib)))
s = 0
for i in fib:
if (i % 2 == 0):
s += i
print("The sum of the even terms is: " + str(s))
| StarcoderdataPython |
3438393 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from senlin.common import utils
from senlin.events import base
from senlin.objects import notification as nobj
class MessageEvent(base.EventBackend):
"""Message driver for event dumping"""
@classmethod
def _notify_cluster_action(cls, ctx, level, cluster, action, **kwargs):
action_name = cls._get_action_name(action)
priority = utils.level_from_number(level).lower()
publisher = nobj.NotificationPublisher(
host=cfg.CONF.host, binary='senlin-engine')
publisher.obj_set_defaults()
phase = kwargs.get('phase')
event_type = nobj.EventType(
object='cluster', action=action_name, phase=phase)
payload = nobj.ClusterActionPayload(cluster, action)
notification = nobj.ClusterActionNotification(
context=ctx, priority=priority, publisher=publisher,
event_type=event_type, payload=payload)
notification.emit(ctx)
@classmethod
def _notify_node_action(cls, ctx, level, node, action, **kwargs):
action_name = cls._get_action_name(action)
priority = utils.level_from_number(level).lower()
publisher = nobj.NotificationPublisher(
host=cfg.CONF.host, binary='senlin-engine')
publisher.obj_set_defaults()
phase = kwargs.get('phase')
event_type = nobj.EventType(
object='node', action=action_name, phase=phase)
payload = nobj.NodeActionPayload(node, action)
notification = nobj.NodeActionNotification(
context=ctx, priority=priority, publisher=publisher,
event_type=event_type, payload=payload)
notification.emit(ctx)
@classmethod
def dump(cls, level, action, **kwargs):
"""Dump the provided event into message queue.
:param level: An integer as defined by python logging module.
:param action: An action object for the current operation.
:param dict kwargs: Other keyword arguments for the operation.
"""
ctx = action.context
entity = action.entity
etype = cls._check_entity(entity)
if etype == 'CLUSTER':
cls._notify_cluster_action(ctx, level, entity, action, **kwargs)
else:
cls._notify_node_action(ctx, level, entity, action, **kwargs)
| StarcoderdataPython |
3219228 | <gh_stars>0
import asyncio
import random
import time
from types import SimpleNamespace
from typing import Union, List, Tuple
import aiohttp
from aiohttp import ClientSession, TraceRequestStartParams, TraceRequestEndParams
Number = Union[int, float]
class FlowController(aiohttp.TraceConfig):
def __init__(self,
interval: Union[Number, List[Number], Tuple[Number, Number]] = 1,
ctx_key = 'host',
):
super().__init__()
self.interval = interval
self.ctx_key = ctx_key
self.store = {}
self.on_request_start.append(self.__on_request_start)
self.on_request_end.append(self.__on_request_end)
def _get_key(self, trace_config_ctx: SimpleNamespace, params: Union[TraceRequestStartParams, TraceRequestEndParams]):
key = trace_config_ctx.trace_request_ctx and trace_config_ctx.trace_request_ctx.get(self.ctx_key)
return key or params.url.host
async def __on_request_start(self, session: ClientSession, trace_config_ctx: SimpleNamespace, params: TraceRequestStartParams):
key = self._get_key(trace_config_ctx, params)
if key:
if not self.store.get(key):
self.store[key] = {
'last_start_time': time.time(),
'last_end_time': None,
}
else:
interval = self.interval if isinstance(self.interval, (int, float)) else random.uniform(*self.interval)
start_time = time.time()
while True:
store = self.store[key]
if store.get('last_end_time') and store.get('last_start_time') < store.get('last_end_time') and store.get('last_end_time') + interval < time.time():
store['last_start_time'] = time.time()
break
# set max interval, avoid endless loop on some condition when error occurs
if time.time() - start_time > 10 * interval:
print(f'warning: "{key}" store may not be set properly (url: {params.url})')
store['last_start_time'] = time.time()
break
await asyncio.sleep(min(1, interval / 5))
async def __on_request_end(self, session: ClientSession, trace_config_ctx: SimpleNamespace, params: TraceRequestEndParams):
key = self._get_key(trace_config_ctx, params)
if key:
assert self.store[key] is not None
self.store[key]['last_end_time'] = time.time()
| StarcoderdataPython |
171614 | <reponame>cosmicc/docker-postprocess
#!/usr/bin/env python
import os
try:
sonarr_eventtype = os.environ['sonarr_eventtype']
except KeyError:
sonarr_eventtype = ''
try:
sonarr_isupgrade = os.environ['sonarr_isupgrade']
except KeyError:
sonarr_isupgrade = ''
try:
sonarr_series_id = os.environ['sonarr_series_id']
except KeyError:
sonarr_series_id = ''
try:
sonarr_series_title = os.environ['sonarr_series_title']
except KeyError:
sonarr_series_title = ''
try:
sonarr_series_path = os.environ['sonarr_series_path']
except KeyError:
sonarr_eventtype = ''
try:
sonarr_series_tvdbid = os.environ['sonarr_series_tvdbid']
except KeyError:
sonarr_series_tvdbid = ''
try:
sonarr_series_tvmazeid = os.environ['sonarr_series_tvmazeid']
except KeyError:
sonarr_series_tvmazeid = ''
try:
sonarr_series_imdb = os.environ['sonarr_series_imdb']
except KeyError:
sonarr_series_imdb = ''
try:
sonarr_series_type = os.environ['sonarr_series_type']
except KeyError:
sonarr_series_type = ''
try:
sonarr_episodefile_id = os.environ['sonarr_episodefile_id']
except KeyError:
sonarr_episodefile_id = ''
try:
sonarr_episodefile_relativepath = os.environ['sonarr_episodefile_relativepath']
except KeyError:
sonarr_episodefile_relativepath = ''
try:
sonarr_episodefile_path = os.environ['sonarr_episodefile_path']
except KeyError:
sonarr_episodefile_path = ''
try:
sonarr_episodefile_episodecount = os.environ['sonarr_episodefile_episodecount']
except KeyError:
sonarr_episodefile_episodecount = ''
try:
sonarr_episodefile_seasonnumber = os.environ['sonarr_episodefile_seasonnumber']
except KeyError:
sonarr_episodefile_seasonnumber = ''
try:
sonarr_episodefile_episodenumbers = os.environ['sonarr_episodefile_episodenumbers']
except KeyError:
sonarr_episodefile_episodenumbers = ''
try:
sonarr_episodefile_episodeairdates = os.environ['sonarr_episodefile_episodeairdates']
except KeyError:
sonarr_episodefile_episodeairdates = ''
try:
sonarr_episodefile_episodeairdatesutc = os.environ['sonarr_episodefile_episodeairdatesutc']
except KeyError:
sonarr_episodefile_episodeairdatesutc = ''
try:
sonarr_episodefile_episodetitles = os.environ['sonarr_episodefile_episodetitles']
except KeyError:
sonarr_episodefile_episodetitles = ''
try:
sonarr_episodefile_quality = os.environ['sonarr_episodefile_quality']
except KeyError:
sonarr_episodefile_quality = ''
try:
sonarr_episodefile_qualityversion = os.environ['sonarr_episodefile_qualityversion']
except KeyError:
sonarr_episodefile_qualityversion = ''
try:
sonarr_episodefile_releasegroup = os.environ['sonarr_episodefile_releasegroup']
except KeyError:
sonarr_episodefile_releasegroup = ''
try:
sonarr_episodefile_scenename = os.environ['sonarr_episodefile_scenename']
except KeyError:
sonarr_episodefile_scenename = ''
try:
sonarr_episodefile_sourcepath = os.environ['sonarr_episodefile_sourcepath']
except KeyError:
sonarr_episodefile_sourcepath = ''
try:
sonarr_episodefile_sourcefolder = os.environ['sonarr_episodefile_sourcefolder']
except KeyError:
sonarr_episodefile_sourcefolder = ''
try:
sonarr_deletedrelativepaths = os.environ['sonarr_deletedrelativepaths']
except KeyError:
sonarr_deletedrelativepaths = ''
try:
sonarr_deletedpaths = os.environ['sonarr_deletedpaths']
except KeyError:
sonarr_deletedpaths = ''
try:
sonarr_download_id = os.environ['sonarr_download_id']
except KeyError:
sonarr_download_id = ''
with open('/downloads/process/poll/n%s.sonarr' % (sonarr_episodefile_id,), 'w') as f:
f.write('%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s' % (sonarr_eventtype, sonarr_isupgrade, sonarr_series_id, sonarr_series_title, sonarr_series_path, sonarr_series_tvdbid, sonarr_series_tvmazeid, sonarr_series_imdb, sonarr_series_type, sonarr_episodefile_id, sonarr_episodefile_relativepath, sonarr_episodefile_path, sonarr_episodefile_episodecount, sonarr_episodefile_seasonnumber, sonarr_episodefile_episodenumbers, sonarr_episodefile_episodeairdates, sonarr_episodefile_episodeairdatesutc, sonarr_episodefile_episodetitles, sonarr_episodefile_quality, sonarr_episodefile_qualityversion, sonarr_episodefile_releasegroup, sonarr_episodefile_scenename, sonarr_episodefile_sourcepath, sonarr_episodefile_sourcefolder, sonarr_deletedrelativepaths, sonarr_deletedpaths, sonarr_download_id))
| StarcoderdataPython |
4865684 | from . import utils
from . import tomo_functions
from . import labber_processing | StarcoderdataPython |
1946740 | <reponame>kikacaty/AgentFormer<filename>model/agentformer_loss.py
def compute_motion_mse(data, cfg):
diff = data['fut_motion_orig'] - data['train_dec_motion']
if cfg.get('mask', True):
mask = data['fut_mask']
diff *= mask.unsqueeze(2)
loss_unweighted = diff.pow(2).sum()
if cfg.get('normalize', True):
loss_unweighted /= diff.shape[0]
loss = loss_unweighted * cfg['weight']
return loss, loss_unweighted
def compute_z_kld(data, cfg):
loss_unweighted = data['q_z_dist'].kl(data['p_z_dist']).sum()
if cfg.get('normalize', True):
loss_unweighted /= data['batch_size']
loss_unweighted = loss_unweighted.clamp_min_(cfg.min_clip)
loss = loss_unweighted * cfg['weight']
return loss, loss_unweighted
def compute_sample_loss(data, cfg):
diff = data['infer_dec_motion'] - data['fut_motion_orig'].unsqueeze(1)
if cfg.get('mask', True):
mask = data['fut_mask'].unsqueeze(1).unsqueeze(-1)
diff *= mask
dist = diff.pow(2).sum(dim=-1).sum(dim=-1)
loss_unweighted = dist.min(dim=1)[0]
if cfg.get('normalize', True):
loss_unweighted = loss_unweighted.mean()
else:
loss_unweighted = loss_unweighted.sum()
loss = loss_unweighted * cfg['weight']
return loss, loss_unweighted
loss_func = {
'mse': compute_motion_mse,
'kld': compute_z_kld,
'sample': compute_sample_loss
} | StarcoderdataPython |
5070849 | # coding: utf-8
from django.apps import AppConfig
class RecruitmanageConfig(AppConfig):
name = 'RecruitManage'
verbose_name = u'招聘管理'
| StarcoderdataPython |
223375 | <reponame>facelessuser/coloraide<filename>tests/test_ictcp.py
"""Test ICtCp library."""
import unittest
from . import util
from coloraide import Color
class TestICtCpInputOutput(util.ColorAsserts, unittest.TestCase):
"""Test ICtCp."""
def test_input_raw(self):
"""Test raw input."""
self.assertColorEqual(Color("ictcp", [1, 0.5, 0.5]), Color('color(--ictcp 100% 0.5 0.5)'))
def test_color_class(self):
"""Test raw input."""
self.assertColorEqual(Color(Color("ictcp", [1, 0.5, 0.5])), Color('color(--ictcp 100% 0.5 0.5)'))
def test_color(self):
"""Test color input/output format."""
args = {"color": True}
color = "color(--ictcp 1 0.2 -0.3)"
self.assertEqual(Color(color).to_string(**args), 'color(--ictcp 1 0.2 -0.3)')
color = "color(--ictcp 1 0.2 -0.3 / 0.5)"
self.assertEqual(Color(color).to_string(**args), 'color(--ictcp 1 0.2 -0.3 / 0.5)')
color = "color(--ictcp 100% 0.2 -0.3 / 50%)"
self.assertEqual(Color(color).to_string(**args), 'color(--ictcp 1 0.2 -0.3 / 0.5)')
def test_no_alpha(self):
"""Test no alpha."""
args = {"alpha": False}
color = "color(--ictcp 1 0.2 -0.3 / 0.5)"
ictcp = Color(color)
self.assertEqual("color(--ictcp 1 0.2 -0.3)", ictcp.to_string(**args))
def test_force_alpha(self):
"""Test force alpha."""
args = {"alpha": True}
color = "color(--ictcp 1 0.2 -0.3 / 100%)"
ictcp = Color(color)
self.assertEqual("color(--ictcp 1 0.2 -0.3 / 1)", ictcp.to_string(**args))
def test_precision(self):
"""Test precision."""
color = 'color(--ictcp 0.123456 0.123456 -0.123456)'
self.assertEqual(Color(color).to_string(), 'color(--ictcp 0.12346 0.12346 -0.12346)')
self.assertEqual(Color(color).to_string(precision=3), 'color(--ictcp 0.123 0.123 -0.123)')
self.assertEqual(Color(color).to_string(precision=0), 'color(--ictcp 0 0 0)')
self.assertEqual(
Color(color).to_string(precision=-1),
'color(--ictcp 0.12345599999999999629718416827017790637910366058349609 0.12345599999999999629718416827017790637910366058349609 -0.12345599999999999629718416827017790637910366058349609)' # noqa: E501
)
def test_fit(self):
"""Test fit."""
self.assertEqual(
Color('color(--ictcp 2 0.6 -0.6)').to_string(),
'color(--ictcp 2 0.6 -0.6)'
)
self.assertEqual(
Color('color(--ictcp 2 0.6 -0.6)').to_string(fit="clip"),
'color(--ictcp 2 0.6 -0.6)'
)
self.assertEqual(
Color('color(--ictcp 2 0.6 -0.6)').to_string(fit=False),
'color(--ictcp 2 0.6 -0.6)'
)
class TestICtCpProperties(util.ColorAsserts, unittest.TestCase):
"""Test ICtCp."""
def test_i(self):
"""Test `i`."""
c = Color('color(--ictcp 1 0.2 -0.3 / 1)')
self.assertEqual(c.i, 1)
c.i = 0.2
self.assertEqual(c.i, 0.2)
def test_ct(self):
"""Test `ct`."""
c = Color('color(--ictcp 1 0.2 -0.3 / 1)')
self.assertEqual(c.ct, 0.2)
c.ct = 0.1
self.assertEqual(c.ct, 0.1)
def test_cp(self):
"""Test `cp`."""
c = Color('color(--ictcp 1 0.2 -0.3 / 1)')
self.assertEqual(c.cp, -0.3)
c.cp = 0.1
self.assertEqual(c.cp, 0.1)
def test_alpha(self):
"""Test `alpha`."""
c = Color('color(--ictcp 1 0.2 -0.3 / 1)')
self.assertEqual(c.alpha, 1)
c.alpha = 0.5
self.assertEqual(c.alpha, 0.5)
| StarcoderdataPython |
6568207 | <reponame>tchakravarty/PythonExamples
#====================================================================
# Purpose: Examples and exercises in Chapter 2 of 'A First Course in Statistical
# Programming with R', Braun & Murdoch (2007)
# Author: <NAME>
# Created: 23rd May 2015
# Revised:
# Comments:
#====================================================================
# Example 2.1
def calculate_emi(principal, interest_rate, num_months):
emi = principal*interest_rate*(1/(1-(1 + interest_rate)**-num_months))
return(emi)
# test the function
num_months = 10
principal = 1500
interest_rate = 0.01
print(calculate_emi(principal, interest_rate, num_months))
# Example | StarcoderdataPython |
1853038 | <gh_stars>1-10
from django.urls import path
from rest_framework.routers import SimpleRouter
from rgd_3d import models, views
from rgd_3d.rest import viewsets
router = SimpleRouter(trailing_slash=False)
router.register(r'api/rgd_3d/mesh', viewsets.Mesh3DViewSet, basename='mesh-3d')
router.register(r'api/rgd_3d/tiles3d', viewsets.Tiles3DViewSet, basename='tiles-3d')
urlpatterns = [
# Pages
path('rgd_3d/mesh/', views.Mesh3DListView.as_view(), name='meshes'),
path(
'rgd_3d/mesh/<int:pk>/',
views.Mesh3DDetailView.as_view(),
name=models.Mesh3D.detail_view_name,
),
path(
'rgd_3d/tiles3d/<int:pk>/',
views.Tiles3DDetailView.as_view(),
name=models.Tiles3D.detail_view_name,
),
] + router.urls
| StarcoderdataPython |
211042 | <filename>smbprotocol/tree.py
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, <NAME> (@jborean93) <<EMAIL>>
# MIT License (see LICENSE or https://opensource.org/licenses/MIT)
import logging
from collections import (
OrderedDict,
)
from smbprotocol import (
Commands,
Dialects,
)
from smbprotocol.exceptions import (
SMBException,
)
from smbprotocol.ioctl import (
CtlCode,
IOCTLFlags,
SMB2IOCTLRequest,
SMB2IOCTLResponse,
SMB2ValidateNegotiateInfoRequest,
SMB2ValidateNegotiateInfoResponse,
)
from smbprotocol.structure import (
BytesField,
EnumField,
FlagField,
IntField,
Structure,
)
log = logging.getLogger(__name__)
class TreeFlags(object):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.9 SMB2 TREE_CONNECT Response Flags
Flags used in SMB 3.1.1 to indicate how to process the operation.
"""
SMB2_TREE_CONNECT_FLAG_CLUSTER_RECONNECT = 0x0004
SMB2_TREE_CONNECT_FLAG_REDIRECT_TO_OWNER = 0x0002
SMB2_TREE_CONNECT_FLAG_EXTENSION_PRESENT = 0x0001
class ShareType(object):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.10 SMB2 TREE_CONNECT Response Capabilities
The type of share being accessed
"""
SMB2_SHARE_TYPE_DISK = 0x01
SMB2_SHARE_TYPE_PIPE = 0x02
SMB2_SHARE_TYPE_PRINT = 0x03
class ShareFlags(object):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.10 SMB2 TREE_CONNECT Response Capabilities
Properties for the share
"""
SMB2_SHAREFLAG_MANUAL_CACHING = 0x00000000
SMB2_SHAREFLAG_AUTO_CACHING = 0x00000010
SMB2_SHAREFLAG_VDO_CACHING = 0x00000020
SMB2_SHAREFLAG_NO_CACHING = 0x00000030
SMB2_SHAREFLAG_DFS = 0x00000001
SMB2_SHAREFLAG_DFS_ROOT = 0x00000002
SMB2_SHAREFLAG_RESTRICT_EXCLUSIVE_OPENS = 0x00000100
SMB2_SHAREFLAG_FORCE_SHARED_DELETE = 0x00000200
SMB2_SHAREFLAG_ALLOW_NAMESPACE_CACHING = 0x00000400
SMB2_SHAREFLAG_ACCESS_BASED_DIRECTORY_ENUM = 0x00000800
SMB2_SHAREFLAG_FORCE_LEVELII_OPLOCK = 0x00001000
SMB2_SHAREFLAG_ENABLE_HASH_V1 = 0x00002000
SMB2_SHAREFLAG_ENABLE_HASH_V2 = 0x00004000
SMB2_SHAREFLAG_ENCRYPT_DATA = 0x00008000
SMB2_SHAREFLAG_IDENTITY_REMOTING = 0x00040000
class ShareCapabilities(object):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.10 SMB2 TREE_CONNECT Response Capabilities
Indicates various capabilities for a share
"""
SMB2_SHARE_CAP_DFS = 0x00000008
SMB2_SHARE_CAP_CONTINUOUS_AVAILABILITY = 0x00000010
SMB2_SHARE_CAP_SCALEOUT = 0x00000020
SMB2_SHARE_CAP_CLUSTER = 0x00000040
SMB2_SHARE_CAP_ASYMMETRIC = 0x00000080
SMB2_SHARE_CAP_REDIRECT_TO_OWNER = 0x00000100
class SMB2TreeConnectRequest(Structure):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.9 SMB2 TREE_CONNECT Request
Sent by the client to request access to a particular share on the server
"""
COMMAND = Commands.SMB2_TREE_CONNECT
def __init__(self):
self.fields = OrderedDict([
('structure_size', IntField(
size=2,
default=9
)),
('flags', FlagField(
size=2,
flag_type=TreeFlags,
)),
('path_offset', IntField(
size=2,
default=64 + 8,
)),
('path_length', IntField(
size=2,
default=lambda s: len(s['buffer']),
)),
('buffer', BytesField(
size=lambda s: s['path_length'].get_value()
))
])
super(SMB2TreeConnectRequest, self).__init__()
class SMB2TreeConnectResponse(Structure):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.10 SMB2 TREE_CONNECT Response
Sent by the server when an SMB2 TREE_CONNECT request is processed
successfully.
"""
COMMAND = Commands.SMB2_TREE_CONNECT
def __init__(self):
self.fields = OrderedDict([
('structure_size', IntField(
size=2,
default=16
)),
('share_type', EnumField(
size=1,
enum_type=ShareType,
)),
('reserved', IntField(size=1)),
('share_flags', FlagField(
size=4,
flag_type=ShareFlags,
)),
('capabilities', FlagField(
size=4,
flag_type=ShareCapabilities,
)),
('maximal_access', IntField(size=4))
])
super(SMB2TreeConnectResponse, self).__init__()
class SMB2TreeDisconnect(Structure):
"""
[MS-SMB2] v53.0 2017-09-15
2.2.11/12 SMB2 TREE_DISCONNECT Request and Response
Sent by the client to request that the tree connect specific by tree_id in
the header is disconnected.
"""
COMMAND = Commands.SMB2_TREE_DISCONNECT
def __init__(self):
self.fields = OrderedDict([
('structure_size', IntField(
size=2,
default=4,
)),
('reserved', IntField(size=2))
])
super(SMB2TreeDisconnect, self).__init__()
class TreeConnect(object):
def __init__(self, session, share_name):
"""
[MS-SMB2] v53.0 2017-09-15
3.2.1.4 Per Tree Connect
Attributes per Tree Connect (share connections)
:param session: The Session to connect to the tree with.
:param share_name: The name of the share, including the server name.
"""
self._connected = False
self.open_table = {}
self.share_name = share_name
self.tree_connect_id = None
self.session = session
self.is_dfs_share = None
# SMB 3.x+
self.is_ca_share = None
self.encrypt_data = None
self.is_scaleout_share = None
def connect(self, require_secure_negotiate=True):
"""
Connect to the share.
:param require_secure_negotiate: For Dialects 3.0 and 3.0.2, will
verify the negotiation parameters with the server to prevent
SMB downgrade attacks
"""
log.info("Session: %s - Creating connection to share %s"
% (self.session.username, self.share_name))
utf_share_name = self.share_name.encode('utf-16-le')
connect = SMB2TreeConnectRequest()
connect['buffer'] = utf_share_name
log.info("Session: %s - Sending Tree Connect message"
% self.session.username)
log.debug(connect)
request = self.session.connection.send(connect,
sid=self.session.session_id)
log.info("Session: %s - Receiving Tree Connect response"
% self.session.username)
response = self.session.connection.receive(request)
tree_response = SMB2TreeConnectResponse()
tree_response.unpack(response['data'].get_value())
log.debug(tree_response)
# https://msdn.microsoft.com/en-us/library/cc246687.aspx
self.tree_connect_id = response['tree_id'].get_value()
log.info("Session: %s - Created tree connection with ID %d"
% (self.session.username, self.tree_connect_id))
self._connected = True
self.session.tree_connect_table[self.tree_connect_id] = self
capabilities = tree_response['capabilities']
self.is_dfs_share = capabilities.has_flag(
ShareCapabilities.SMB2_SHARE_CAP_DFS)
self.is_ca_share = capabilities.has_flag(
ShareCapabilities.SMB2_SHARE_CAP_CONTINUOUS_AVAILABILITY)
dialect = self.session.connection.dialect
if dialect >= Dialects.SMB_3_0_0 and \
self.session.connection.supports_encryption:
self.encrypt_data = tree_response['share_flags'].has_flag(
ShareFlags.SMB2_SHAREFLAG_ENCRYPT_DATA)
self.is_scaleout_share = capabilities.has_flag(
ShareCapabilities.SMB2_SHARE_CAP_SCALEOUT)
# secure negotiate is only valid for SMB 3 dialects before 3.1.1
if dialect < Dialects.SMB_3_1_1 and require_secure_negotiate:
self._verify_dialect_negotiate()
def disconnect(self):
"""
Disconnects the tree connection.
"""
if not self._connected:
return
log.info("Session: %s, Tree: %s - Disconnecting from Tree Connect"
% (self.session.username, self.share_name))
req = SMB2TreeDisconnect()
log.info("Session: %s, Tree: %s - Sending Tree Disconnect message"
% (self.session.username, self.share_name))
log.debug(req)
request = self.session.connection.send(req,
sid=self.session.session_id,
tid=self.tree_connect_id)
log.info("Session: %s, Tree: %s - Receiving Tree Disconnect response"
% (self.session.username, self.share_name))
res = self.session.connection.receive(request)
res_disconnect = SMB2TreeDisconnect()
res_disconnect.unpack(res['data'].get_value())
log.debug(res_disconnect)
self._connected = False
del self.session.tree_connect_table[self.tree_connect_id]
def _verify_dialect_negotiate(self):
log_header = "Session: %s, Tree: %s" \
% (self.session.username, self.share_name)
log.info("%s - Running secure negotiate process" % log_header)
ioctl_request = SMB2IOCTLRequest()
ioctl_request['ctl_code'] = \
CtlCode.FSCTL_VALIDATE_NEGOTIATE_INFO
ioctl_request['file_id'] = b"\xff" * 16
val_neg = SMB2ValidateNegotiateInfoRequest()
val_neg['capabilities'] = \
self.session.connection.client_capabilities
val_neg['guid'] = self.session.connection.client_guid
val_neg['security_mode'] = \
self.session.connection.client_security_mode
val_neg['dialects'] = \
self.session.connection.negotiated_dialects
ioctl_request['buffer'] = val_neg
ioctl_request['max_output_response'] = len(val_neg)
ioctl_request['flags'] = IOCTLFlags.SMB2_0_IOCTL_IS_FSCTL
log.info("%s - Sending Secure Negotiate Validation message"
% log_header)
log.debug(ioctl_request)
request = self.session.connection.send(ioctl_request,
sid=self.session.session_id,
tid=self.tree_connect_id)
log.info("%s - Receiving secure negotiation response" % log_header)
response = self.session.connection.receive(request)
ioctl_resp = SMB2IOCTLResponse()
ioctl_resp.unpack(response['data'].get_value())
log.debug(ioctl_resp)
log.info("%s - Unpacking secure negotiate response info" % log_header)
val_resp = SMB2ValidateNegotiateInfoResponse()
val_resp.unpack(ioctl_resp['buffer'].get_value())
log.debug(val_resp)
self._verify("server capabilities",
val_resp['capabilities'].get_value(),
self.session.connection.server_capabilities.get_value())
self._verify("server guid",
val_resp['guid'].get_value(),
self.session.connection.server_guid)
self._verify("server security mode",
val_resp['security_mode'].get_value(),
self.session.connection.server_security_mode)
self._verify("server dialect",
val_resp['dialect'].get_value(),
self.session.connection.dialect)
log.info("Session: %d, Tree: %d - Secure negotiate complete"
% (self.session.session_id, self.tree_connect_id))
def _verify(self, check, actual, expected):
log_header = "Session: %d, Tree: %d"\
% (self.session.session_id, self.tree_connect_id)
if actual != expected:
raise SMBException("%s - Secure negotiate failed to verify %s, "
"Actual: %s, Expected: %s"
% (log_header, check, actual, expected))
| StarcoderdataPython |
218214 | """zgrab2 scanner http"""
# -*- coding:utf-8 -*-
import os
import subprocess
import sys
import traceback
import uuid
from commonbaby.helpers import helper_dir, helper_file
from commonbaby.mslog import MsLogger, MsLogManager
from datacontract.iscoutdataset import IscoutTask
from .....clientdatafeedback.scoutdatafeedback import PortInfo
from .....config_output import tmpdir
class Zgrab2ScannerBase(object):
"""zgrab2 scanner base"""
def __init__(self, toolmark: str):
if not isinstance(toolmark, str) or toolmark == "":
raise Exception("Zgrab2 scanner toolmark is invalid")
self._logger: MsLogger = MsLogManager.get_logger(type(self).__name__)
self._toolmark: str = toolmark
self._tmpdir: str = os.path.abspath(tmpdir)
if not isinstance(self._tmpdir, str):
self._tmpdir = os.path.abspath("./_clienttmpdir")
self._tmpdir = os.path.abspath(os.path.join(self._tmpdir, self._toolmark))
if os.path.isdir(self._tmpdir):
helper_dir.remove_dirs(self._tmpdir)
os.makedirs(self._tmpdir)
def _write_hosts_to_file(self, task: IscoutTask, hosts: list) -> str:
""""""
fi: str = None
try:
fi = os.path.join(self._tmpdir, task.taskid)
while os.path.isfile(fi):
fi = os.path.join(self._tmpdir, str(uuid.uuid1()))
with open(fi, mode="w", encoding="utf-8") as fs:
for h in hosts:
fs.write("{}\n".format(h))
except Exception:
if not fi is None and os.path.isfile(fi):
os.remove(fi)
fi = None
self._logger.error(
"Write hosts to file error:\ntaskid:{}\nerror:{}".format(
task.taskid, traceback.format_exc()
)
)
return fi
def _run_process(
self, executable: str, *args, sudo: bool = False, rootDir: str = "./",
) -> subprocess.Popen:
"""run process under current operation system.
executable: the executable binary path.
arg: all args in a str.
sudo: True if sudo
rootDir: current work dir"""
try:
p = None
if not os.path.exists(rootDir):
os.makedirs(rootDir)
cmd = ""
if sudo and not sys.platform.startswith("win32"):
cmd = "sudo "
params = " ".join(args)
params = "%s %s" % (executable, params)
cmd = cmd + params
# logmsg = ''
# if not is_none_or_empty(taskid):
# logmsg += 'taskid=%s: ' % taskid
# logmsg += cmd
self._logger.info(cmd)
if (
sys.platform.startswith("freebsd")
or sys.platform.startswith("linux")
or sys.platform.startswith("darwin")
):
p = subprocess.Popen(
cmd,
shell=True,
cwd=rootDir,
bufsize=100000,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True,
universal_newlines=True,
)
else:
p = subprocess.Popen(
cmd,
cwd=rootDir,
shell=True,
bufsize=10000,
stdout=subprocess.PIPE,
universal_newlines=True,
)
return p
except Exception as ex:
raise ex
def _is_cmd_not_found(self, p: subprocess.Popen) -> (str, str, bool):
"""judge the output text if it says 'command not found.
return (strstdout,strstderr,is_not_found)'"""
is_not_found = False
output, error = p.communicate() # sav stdout
if not output is None and not output == "":
if "command not found" in output:
is_not_found = True
elif not error is None and not error == "":
if "command not found" in error:
is_not_found = True
return output, error, is_not_found
| StarcoderdataPython |
3210047 | <reponame>notoraptor/pysaurus<filename>pysaurus/core/jsonable.py
import types
from typing import Dict, Sequence
from pysaurus.core.functions import is_valid_attribute_name
from pysaurus.core.override import Override
__fn_types__ = (
types.FunctionType,
types.MethodType,
types.BuiltinMethodType,
types.BuiltinFunctionType,
types.ClassMethodDescriptorType,
classmethod,
)
def is_attribute(key, value):
return is_valid_attribute_name(key) and not isinstance(value, __fn_types__)
class _Checker:
__slots__ = ("default",)
__init__ = Override("_Checker.__init__")
@__init__.override
def __init__(self):
self.default = ()
@__init__.override
def __init__(self, value: object):
self.default = None if value is None else (value,)
__call__ = Override("_Checker.__call__")
@__call__.override
def __call__(self):
return None if self.default is None else self.validate(*self.default)
@__call__.override
def __call__(self, value: object):
return None if value is self.default is None else self.validate(value)
def __str__(self):
return f"${type(self).__name__}" f"({', '.join(str(d) for d in self.default)})"
__repr__ = __str__
validate = Override("_Checker.validate")
@validate.override
def validate(self):
raise NotImplementedError()
@validate.override
def validate(self, value: object):
raise NotImplementedError()
def to_dict(self, value):
return value
class _ClassChecker(_Checker):
__slots__ = ("cls",)
def __init__(self, cls, *args):
assert isinstance(cls, type)
super().__init__(*args)
self.cls = cls
@_Checker.validate.override
def validate(self):
return self.cls()
@_Checker.validate.override
def validate(self, value: object):
return value if isinstance(value, self.cls) else self.cls(value)
class _JsonableChecker(_Checker):
__slots__ = ("cls",)
def __init__(self, cls, *args):
assert issubclass(cls, Jsonable)
if args:
(default,) = args
if isinstance(default, cls):
default = default.to_dict()
else:
assert isinstance(default, dict) or default is None
else:
default = {}
super().__init__(default)
self.cls = cls
@_Checker.validate.override
def validate(self, value: object):
return value if isinstance(value, self.cls) else self.cls.from_dict(value)
def to_dict(self, value):
return value.to_dict()
def _get_checker(cls, *args):
if issubclass(cls, Jsonable):
return _JsonableChecker(cls, *args)
else:
return _ClassChecker(cls, *args)
class ShortFunctor:
__slots__ = ("__to_short", "__to_long")
def __init__(self, fields: Sequence[str], long_to_short: Dict[str, str]):
assert len(fields) == len(long_to_short)
assert all(field in long_to_short for field in fields)
self.__to_short = long_to_short
self.__to_long = {short: long for long, short in long_to_short.items()}
def to_short(self, dct_long_keys: dict):
return {self.__to_short[key]: value for key, value in dct_long_keys.items()}
def from_short(self, dct_short_keys: dict):
return {self.__to_long[short]: value for short, value in dct_short_keys.items()}
class NoShortFunctor:
__slots__ = ()
@classmethod
def to_short(cls, dct):
return dct
@classmethod
def from_short(cls, dct):
return dct
def get_bases(bases: tuple):
if not bases:
return ()
assert len(bases) == 1
all_bases = bases[0].__mro__
assert all_bases[-1] is object
assert all_bases[-2] is Jsonable
return all_bases[:-2]
def gen_get(namespace: dict, key: str):
name_getter = f"get_{key}"
if name_getter in namespace:
return namespace.pop(name_getter)
def getter(self):
return self.__json__[key]
getter.__name__ = name_getter
return getter
def gen_set(namespace: dict, key: str):
name_setter = f"set_{key}"
if name_setter in namespace:
return namespace.pop(name_setter)
def setter(self, value):
self.__json__[key] = value
setter.__name__ = name_setter
return setter
class _MetaJSON(type):
__slots__ = ()
def __new__(cls, name, bases, namespace):
assert "__definitions__" not in namespace, "Reserved attribute: __definitions__"
annotations = namespace.get("__annotations__", {})
attributes = {
key: value for key, value in namespace.items() if is_attribute(key, value)
}
original_attributes = list(attributes)
definitions = {}
for base in get_bases(bases):
definitions.update(base.__definitions__)
for key, value in attributes.items():
if isinstance(value, _Checker):
assert key not in annotations
definitions[key] = value
elif key in annotations:
annotation = annotations[key]
assert isinstance(annotation, type)
definitions[key] = _get_checker(annotation, value)
else:
definitions[key] = _get_checker(type(value), value)
for key, annotation in annotations.items():
if key not in definitions:
original_attributes.append(key)
assert isinstance(annotation, type)
definitions[key] = _get_checker(annotation)
short = namespace.get("__short__", {})
shortener = (
ShortFunctor(tuple(definitions), short) if short else NoShortFunctor()
)
namespace["__definitions__"] = {
key: definitions[key] for key in sorted(definitions)
}
namespace["__shortener__"] = shortener
for key in original_attributes:
namespace[key] = property(gen_get(namespace, key), gen_set(namespace, key))
return type.__new__(cls, name, bases, namespace)
class Jsonable(metaclass=_MetaJSON):
__slots__ = ("__json__",)
def __init__(self, **kwargs):
self.__json__ = {}
for key, checker in self.__definitions__.items():
if key in kwargs:
value = checker(kwargs.pop(key))
else:
value = checker()
self.__json__[key] = value
assert not kwargs, f"{type(self).__name__}: unknown keys: {tuple(kwargs)}"
def __bool__(self):
return True
def __len__(self):
return len(self.__json__)
def __iter__(self):
return iter(self.__json__.items())
def __hash__(self):
return hash(tuple(self))
def __eq__(self, other):
return type(self) is type(other) and all(a == b for a, b in zip(self, other))
def __str__(self):
fields = ", ".join(
f"{key}={repr(value) if isinstance(value, str) else value}"
for key, value in self
)
return f"{type(self).__name__}({fields})"
__repr__ = __str__
def update(self, dct: dict):
assert isinstance(dct, dict)
for key, checker in self.__definitions__.items():
if key in dct:
self.__json__[key] = checker(dct[key])
def to_json(self):
return self.__json__
@classmethod
def from_json(cls, dct):
assert isinstance(dct, dict)
return cls(**dct)
def to_dict(self):
return self.__shortener__.to_short(
{key: self.__definitions__[key].to_dict(value) for key, value in self}
)
@classmethod
def from_dict(cls, dct):
assert isinstance(dct, dict)
return cls(**cls.__shortener__.from_short(dct))
| StarcoderdataPython |
9734500 | import pygame
import random
#pylint: disable=no-member
class med:
def __init__(self):
self.img = pygame.image.load('resorces/medium.png')
self.display_width = 600
self.display_height = 700
self.img_width = 70
self.img_height = 80
self.health = 60
self.x_position = []
self.starfe_stage = 0
self.strafe_value = 0
self.spawn_pattern()
def strafe(self,entry):
if entry <=0 :
self.strafe_value = random.choice([-1,0,1,1,1,-1,-1])
if self.starfe_stage ==0:
if self.strafe_value == -1:
self.starfe_stage = -1
elif self.strafe_value == 1:
self.starfe_stage = 1
elif self.starfe_stage == -1:
self.strafe_value = 1
self.starfe_stage = 0
elif self.starfe_stage == 1:
self.strafe_value = -1
self.starfe_stage = 0
def spawn_pattern(self):
self.no = random.randint(2, 3)
x = random.choice([80, 100, 120])
self.y_position = random.choice([50, 100, 150])
if self.no == 2:
a1 = x
a2 = self.display_width - x-self.img_width
self.x_position.append(a1)
self.x_position.append(a2)
elif self.no == 3:
a1 = 265
a2 = x
a3 = self.display_width - x-self.img_width
self.x_position.append(a1)
self.x_position.append(a2)
self.x_position.append(a3)
self.the_wave = [self.x_position,[self.y_position]*self.no,[self.health]*self.no] | StarcoderdataPython |
6406593 | <gh_stars>100-1000
"""
===================
Interpolate Data
===================
In this example we show how to interpolate data from a sparse collection of points
to all the points in the cortical surface.
The method used here is biharmonic interpolation, which finds the solution with
the minimum squared Laplacian (fourth derivative) that still passes through all
the selected points. This is similar to thin plate splines.
"""
import cortex
from cortex.polyutils import Surface
import numpy as np
np.random.seed(1234)
from matplotlib import pyplot as plt
subject = "S1"
# First we need to import the surfaces for this subject
lsurf, rsurf = [Surface(*d) for d in cortex.db.get_surf(subject, "fiducial")]
# Let's choose a few points and generate data for them
selected_pts = np.arange(len(lsurf.pts), step=5000)
num_selected_pts = len(selected_pts)
sparse_data = np.random.randn(num_selected_pts)
# Then interpolate
interp_data = lsurf.interp(selected_pts, sparse_data)
# Plot the result
# interp_data is only for the left hemisphere, but the Vertex constructor
# infers that and fills the right hemisphere with zeros
interp_vertex = cortex.Vertex(interp_data[:,0], subject,
vmin=-2, vmax=2, cmap='RdBu_r')
cortex.quickshow(interp_vertex, with_labels=False, with_rois=False)
# plot the locations of the points we selected originally
# nudge=True puts both left and right hemispheres in the same space, moving them
# so that they don't overlap. These are the coordinates used in quickflat
(lflatpts, lpolys), (rflatpts, rpolys) = cortex.db.get_surf(subject, "flat",
nudge=True)
ax = plt.gca()
# zorder is set to 10 to make sure points go on top of other quickflat layers
ax.scatter(lflatpts[selected_pts,0], lflatpts[selected_pts,1], s=50,
c=sparse_data, vmin=-2, vmax=2, cmap=plt.cm.RdBu_r, zorder=10)
# the interpolate function can also handle multiple dimensions at the same time
# (this takes a while to run for no plotting, and thus is commented out)
#sparse_data_2d = np.random.randn(10, num_selected_pts)
#interp_data_2d = lsurf.interp(selected_pts, sparse_data_2d)
# > interp_data_2d.shape
# (152893, 10)
plt.show() | StarcoderdataPython |
3449736 | """Top-level package for COCO-Assistant."""
__author__ = """<NAME>"""
__email__ = "<EMAIL>"
__version__ = "0.4.0"
from .coco_assistant import COCO_Assistant
| StarcoderdataPython |
6635048 | from classifier import get_tweets_predictions
from bs4 import BeautifulSoup
from urllib2 import urlopen
import re
import os
import nltk
from nltk.stem.porter import PorterStemmer
from sklearn.externals import joblib
import numpy as np
import pandas as pd
from textstat.textstat import *
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer as VS
import nltk.data
class Analyzer(object):
def __init__(self, ):
cur_path = os.path.dirname(os.path.abspath(__file__))
self.model = joblib.load(os.path.join(cur_path, 'final_model.pkl'))
self.tf_vectorizer = joblib.load(os.path.join(cur_path, 'final_tfidf.pkl'))
self.idf_vector = joblib.load(os.path.join(cur_path, 'final_idf.pkl'))
self.pos_vectorizer = joblib.load(os.path.join(cur_path, 'final_pos.pkl'))
self.stemmer = PorterStemmer()
self.sentiment_analyzer = VS()
@staticmethod
def preprocess(text_string):
"""
Accepts a text string and replaces:
1) urls with URLHERE
2) lots of whitespace with one instance
3) mentions with MENTIONHERE
This allows us to get standardized counts of urls and mentions
Without caring about specific people mentioned
"""
space_pattern = '\s+'
giant_url_regex = ('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|'
'[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
mention_regex = '@[\w\-]+'
parsed_text = re.sub(space_pattern, ' ', text_string)
parsed_text = re.sub(giant_url_regex, 'URLHERE', parsed_text)
parsed_text = re.sub(mention_regex, 'MENTIONHERE', parsed_text)
# parsed_text = parsed_text.code("utf-8", errors='ignore')
return parsed_text
def tokenize(self, tweet):
"""Removes punctuation & excess whitespace, sets to lowercase,
and stems tweets. Returns a list of stemmed tokens."""
tweet = " ".join(re.split("[^a-zA-Z]*", tweet.lower())).strip()
# tokens = re.split("[^a-zA-Z]*", tweet.lower())
tokens = [self.stemmer.stem(t) for t in tweet.split()]
return tokens
@staticmethod
def basic_tokenize(tweet):
"""Same as tokenize but without the stemming"""
tweet = " ".join(re.split("[^a-zA-Z.,!?]*", tweet.lower())).strip()
return tweet.split()
def get_pos_tags(self, contents):
tweet_tags = []
for t in contents:
tokens = self.basic_tokenize(self.preprocess(t))
tags = nltk.pos_tag(tokens)
tag_list = [x[1] for x in tags]
# for i in range(0, len(tokens)):
tag_str = " ".join(tag_list)
tweet_tags.append(tag_str)
return tweet_tags
def other_features_(self, tweet):
"""This function takes a string and returns a list of features.
These include Sentiment scores, Text and Readability scores,
as well as Twitter specific features.
This is modified to only include those features in the final
model."""
sentiment = self.sentiment_analyzer.polarity_scores(tweet)
words = self.preprocess(tweet) # Get text only
syllables = textstat.syllable_count(words) # count syllables in words
num_chars = sum(len(w) for w in words) # num chars in words
num_chars_total = len(tweet)
num_terms = len(tweet.split())
num_words = len(words.split())
avg_syl = round(float((syllables + 0.001)) / float(num_words + 0.001), 4)
num_unique_terms = len(set(words.split()))
# Modified FK grade, where avg words per sentence is just num words/1
FKRA = round(float(0.39 * float(num_words) / 1.0) + float(11.8 * avg_syl) - 15.59, 1)
# Modified FRE score, where sentence fixed to 1
FRE = round(206.835 - 1.015 * (float(num_words) / 1.0) - (84.6 * float(avg_syl)), 2)
twitter_objs = self.count_twitter_objs(tweet) # Count #, @, and http://
features = [FKRA, FRE, syllables, num_chars, num_chars_total, num_terms, num_words,
num_unique_terms, sentiment['compound'],
twitter_objs[2], twitter_objs[1], ]
# features = pandas.DataFrame(features)
return features
@staticmethod
def count_twitter_objs(text_string):
"""
Accepts a text string and replaces:
1) urls with URLHERE
2) lots of whitespace with one instance
3) mentions with MENTIONHERE
4) hashtags with HASHTAGHERE
This allows us to get standardized counts of urls and mentions
Without caring about specific people mentioned.
Returns counts of urls, mentions, and hashtags.
"""
space_pattern = '\s+'
giant_url_regex = ('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|'
'[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
mention_regex = '@[\w\-]+'
hashtag_regex = '#[\w\-]+'
parsed_text = re.sub(space_pattern, ' ', text_string)
parsed_text = re.sub(giant_url_regex, 'URLHERE', parsed_text)
parsed_text = re.sub(mention_regex, 'MENTIONHERE', parsed_text)
parsed_text = re.sub(hashtag_regex, 'HASHTAGHERE', parsed_text)
return parsed_text.count('URLHERE'), parsed_text.count('MENTIONHERE'), parsed_text.count('HASHTAGHERE')
def get_oth_features(self, tweets):
"""Takes a list of tweets, generates features for
each tweet, and returns a numpy array of tweet x features"""
feats = []
for t in tweets:
feats.append(self.other_features_(t))
return np.array(feats)
def transform_inputs(self, tweets):
"""
This function takes a list of tweets, along with used to
transform the tweets into the format accepted by the model.
Each tweet is decomposed into
(a) An array of TF-IDF scores for a set of n-grams in the tweet.
(b) An array of POS tag sequences in the tweet.
(c) An array of features including sentiment, vocab, and readability.
Returns a pandas dataframe where each row is the set of features
for a tweet. The features are a subset selected using a Logistic
Regression with L1-regularization on the training data.
"""
tf_array = self.tf_vectorizer.fit_transform(tweets).toarray()
tfidf_array = tf_array * self.idf_vector
print "Built TF-IDF array"
pos_tags = self.get_pos_tags(tweets)
pos_array = self.pos_vectorizer.fit_transform(pos_tags).toarray()
print "Built POS array"
oth_array = self.get_oth_features(tweets)
print "Built other feature array"
M = np.concatenate([tfidf_array, pos_array, oth_array], axis=1)
return pd.DataFrame(M)
def predictions(self, X):
"""
This function calls the predict function on
the trained model to generated a predicted y
value for each observation.
"""
return self.model.predict(X)
def get_text_predictions(self, string, ignore_pos=True):
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
string = tokenizer.tokenize(string)
print string
return self._text_predictions(string, ignore_pos=ignore_pos)
def _text_predictions(self, content, ignore_pos=True):
fixed_content = []
for idx, t_orig in enumerate(content):
s = t_orig
try:
s = s.encode("latin1")
except:
try:
s = s.encode("utf-8")
except UnicodeEncodeError as e:
raise e
if type(s) != unicode:
fixed_content.append(unicode(s, errors="ignore"))
else:
fixed_content.append(s)
content = fixed_content
X = self.transform_inputs(content)
predicted_class = self.predictions(X).tolist()
if ignore_pos:
result = [(content[idx], label) for idx, label in enumerate(predicted_class) if label != 2]
else:
result = [(content[idx], label) for idx, label in enumerate(predicted_class)]
return result
def get_url_predictions(self, url, ignore_pos=True):
html = urlopen(url)
soup = BeautifulSoup(html.read())
data = []
for string in soup.strings:
string = " ".join(re.split("[^a-zA-Z.,!?]*", string.lower())).strip()
data.append(string)
return self._text_predictions(data, ignore_pos=ignore_pos)
class WordMasker(object):
def __init__(self):
# TODO true corpus
self.corpus = {'fuck', 'dick'}
def get_masked_text(self, text, mask='*'):
res = map(lambda word: text.replace(word, mask * len(word)), self.corpus)
return res
def analyze_content(_str):
print 'warning, deprecated'
_str = str(_str)
html = urlopen(_str)
soup = BeautifulSoup(html.read())
data = []
for string in soup.strings:
string = " ".join(re.split("[^a-zA-Z.,!?]*", string.lower())).strip()
data.append(string)
return get_tweets_predictions(data).tolist()
if __name__ == '__main__':
from classifier import tokenize as tokenize, preprocess
test_str = "https://en.wikipedia.org/wiki/Twitter"
print type(test_str)
result = analyze_content(test_str)
print result
count = 0
for i in result:
count = count + 1
if i != 2:
print i, count
| StarcoderdataPython |
4859140 | from django.urls import path
from . import views
app_name = 'projects'
urlpatterns = [
path('', views.MyProjects.as_view(), name='index'),
path('page/<int:page>', views.MyProjects.as_view(), name='index_paginated'),
path('new/', views.NewProject.as_view(), name='new'),
path('edit/<int:id>', views.UpdateProject.as_view(), name='edit'),
path('detail/<str:name>/<int:id>', views.ProjectDetail.as_view(), name='detail'),
path('categories/', views.categories, name='categories'),
path('categories/<int:id>/', views.categories, name='sub_categories'),
path('createExperiment/<slug:name>/<int:id>', views.createExperiment, name='createExperiment'),
path('delete/<int:project_id>', views.delete_project, name='deleteProject'),
]
| StarcoderdataPython |
11230975 | <gh_stars>0
import numpy as np
import cv2
import torch
from .post_parser import remove_subjects
def padding_image_overlap(image, overlap_ratio=0.46):
h, w = image.shape[:2]
pad_length = int(h* overlap_ratio)
pad_w = w+2*pad_length
pad_image = np.zeros((h, pad_w, 3), dtype=np.uint8)
top, left = 0, pad_length
bottom, right = h, w+pad_length
pad_image[top:bottom, left:right] = image
# due to BEV takes square input, so we convert top, bottom to the state that assuming square padding
pad_height = (w - h)//2
top = pad_height
bottom = w - top
left = 0
right = w
image_pad_info = torch.Tensor([top, bottom, left, right, h, w])
return pad_image, image_pad_info, pad_length
def get_image_split_plan(image, overlap_ratio=0.46):
h, w = image.shape[:2]
aspect_ratio = w / h
slide_time = int(np.ceil((aspect_ratio - 1) / (1 - overlap_ratio))) + 1
crop_box = [] # left, right, top, bottom
move_step = (1 - overlap_ratio) * h
for ind in range(slide_time):
if ind == (slide_time-1):
left = w-h
else:
left = move_step * ind
right = left+h
crop_box.append([left, right, 0, h])
return np.array(crop_box).astype(np.int32)
def exclude_boudary_subjects(outputs, drop_boundary_ratio, ptype='left', torlerance=0.05):
if ptype=='left':
drop_mask = outputs['cam'][:, 2] > (1 - drop_boundary_ratio + torlerance)
elif ptype=='right':
drop_mask = outputs['cam'][:, 2] < (drop_boundary_ratio - 1 - torlerance)
remove_subjects(outputs, torch.where(drop_mask)[0])
def convert_crop_cam_params2full_image(cam_params, crop_bbox, image_shape):
h, w = image_shape
# adjust scale, cam 3: depth, y, x
scale_adjust = (crop_bbox[[1,3]]-crop_bbox[[0,2]]).max() / max(h, w)
cam_params *= scale_adjust
# adjust x
# crop_bbox[:2] -= pad_length
bbox_mean_x = crop_bbox[:2].mean()
cam_params[:,2] += bbox_mean_x / (w /2) - 1
return cam_params
def collect_outputs(outputs, all_outputs):
keys = list(outputs.keys())
for key in keys:
if key not in all_outputs:
all_outputs[key] = outputs[key]
else:
if key in ['smpl_face']:
continue
if key in ['center_map']:
all_outputs[key] = torch.cat([all_outputs[key], outputs[key]],3)
continue
if key in ['center_map_3d']:
all_outputs[key] = torch.cat([all_outputs[key], outputs[key]],2)
continue
all_outputs[key] = torch.cat([all_outputs[key], outputs[key]],0)
| StarcoderdataPython |
161941 | <gh_stars>0
import json
html = open("../../_includes/help.md", "w")
html.write(open("../resource/help.txt","r").read()) | StarcoderdataPython |
1983781 | <reponame>R1tschY/mydocpy
# -*- coding=utf-8 -*-
#
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
from typing import Sequence, Union, TextIO, List, Text
from mydocpy.docstrings import DocString, FunctionDocString, ClassDocString, \
FuncType
def parse_file(filepath):
# type: (Union[Text, TextIO]) -> Sequence[DocString]
"""
parse file
:param filepath: file path to file to process
:return:
"""
if isinstance(filepath, basestring):
with open(filepath, "r") as f:
content = f.read()
return parse(content)
else:
return parse(filepath.read())
def parse(content):
# type: (unicode) -> Sequence[DocString]
"""
parse file
:param content: source to process
:return:
"""
extractor = DocStringExtractor()
extractor.visit(ast.parse(content))
# TODO: add a hook for use from the outside
return extractor.doc_strings
def _get_node_docstring(node, cls=DocString):
"""
Return the docstring for a ClassDef or FunctionDef node
"""
if node.body:
first_expr = node.body[0]
if (isinstance(first_expr, ast.Expr)
and isinstance(first_expr.value, ast.Str)):
return cls.from_node(node, first_expr.value)
class DocStringExtractor(ast.NodeVisitor):
doc_strings = None # type: List[DocString]
def __init__(self):
self.doc_strings = []
self.in_class = False
def visit_ClassDef(self, node):
docstring = _get_node_docstring(node, ClassDocString)
if docstring:
self.doc_strings.append(docstring)
# inlined for performance
in_class = self.in_class
self.in_class = True
self.generic_visit(node)
self.in_class = in_class
def visit_FunctionDef(self, node):
docstring = _get_node_docstring(node, FunctionDocString)
if docstring:
docstring.args = [arg.id for arg in node.args.args]
docstring.vaarg = node.args.vararg
docstring.kwarg = node.args.kwarg
if self.in_class:
if node.decorator_list:
# FIXME: we hope nobody overwriten staticmethod or
# classmethod
decorators = [
decorator.id for decorator in node.decorator_list
]
if "staticmethod" in decorators:
docstring.func_type = FuncType.STATIC
elif "classmethod" in decorators:
docstring.func_type = FuncType.CLASS
else:
docstring.func_type = FuncType.INSTANCE
else:
docstring.func_type = FuncType.INSTANCE
else:
docstring.func_type = FuncType.FREE
self.doc_strings.append(docstring)
# inlined for performance
in_class = self.in_class
self.in_class = False
self.generic_visit(node)
self.in_class = in_class
| StarcoderdataPython |
8097134 | <reponame>Ezibenroc/crous_menu
from setuptools import setup
if __name__ == '__main__':
setup(
name='crous_menu',
py_modules=['crous_menu'],
entry_points='''
[console_scripts]
crous_menu=crous_menu:main
''',
description='Script to fetch the CROUS menu for Barnave',
author="<NAME>",
author_email="<EMAIL>",
install_requires=[
'requests',
'beautifulsoup4',
'colorama',
],
)
| StarcoderdataPython |
11350199 | <filename>server_mysql.py
from flask import Flask, render_template, request, redirect, session, flash, get_flashed_messages
from flask_bcrypt import Bcrypt
from mysqlconnection import connectToMySQL
from datetime import datetime
import re
import os
import my_utils
app = Flask(__name__)
bcrypt = Bcrypt(app)
app.secret_key = 'darksecret'
dbname = 'login_users'
not_logged_in = "You're not logged in"
sql = {'db': connectToMySQL(dbname)}
EDITPASSWORD = sql['db'].query_db("SELECT * FROM editpassword_form ORDER BY itemid;")
REGISTRATION = sql['db'].query_db("SELECT * FROM registration_form ORDER BY itemid;")
EDITPROFILE = sql['db'].query_db("SELECT * FROM editprofile_form ORDER BY itemid;")
LANGUAGES = sql['db'].query_db("SELECT * FROM languages ORDER BY itemid;")
del sql['db']
@app.route("/")
def mainpage():
if 'id' in session:
return redirect("/success")
print(get_flashed_messages())
if 'reg' not in session:
session['reg'] = {
'firstname': "",
'lastname': "",
'email': ""
}
return render_template("index.html", REGISTRATION=REGISTRATION, LANGUAGES=LANGUAGES)
@app.route("/login", methods=["POST"])
def login():
mysql = connectToMySQL(dbname)
users = mysql.query_db("SELECT * FROM users WHERE email = %(loginemail)s;", request.form)
if len(users) > 0:
if bcrypt.check_password_hash(users[0]['pswdhash'], request.form['loginpassword']):
session['id'] = users[0]['id']
session['logged_in'] = True
session['firstname'] = users[0]['firstname']
flash("You've been logged in", "success")
return redirect("/success")
flash("You could not be logged in", "error")
return redirect("/")
def get_selected_languages(fields):
langs = []
for language in LANGUAGES:
if language['name'] in fields:
langs.append(language['name'])
return langs
def validate_nonpassword(fields):
is_valid = True
if not my_utils.EMAIL_REGEX.match(fields['email']):
flash("Not a valid email", "email")
is_valid = False
if len(fields['firstname']) < 2:
flash("First name must have at least two characters", "firstname")
is_valid = False
if len(fields['lastname']) < 2:
flash("Last name must have at least two characters", "lastname")
is_valid = False
if not my_utils.is_age_over(10, fields['dob']):
is_valid = False
flash("You're too young to register", "dob")
if len(get_selected_languages(fields)) < 2:
flash("Select at least two languages", "languages")
is_valid = False
return is_valid
def validate_all_fields(fields):
return (validate_nonpassword(fields) and my_utils.validate_password(fields))
@app.route("/register", methods=["POST"])
def register():
print(request.form)
is_valid = validate_all_fields(request.form)
if is_valid:
mysql = connectToMySQL(dbname)
if len(mysql.query_db("SELECT * FROM users WHERE email = %(email)s", request.form)) == 0:
dob = datetime.strptime(request.form['dob'], "%Y-%m-%d")
data = dict()
for name in request.form.keys():
data[name] = request.form[name]
data['pswdhash'] = <PASSWORD>.generate_password_hash(request.form['password'])
data['dob'] = dob
data['languages'] = ', '.join(get_selected_languages(request.form))
if mysql.query_db("INSERT INTO users ( firstname, lastname, email, pswdhash, created_at, updated_at, dob, languages ) VALUES ( %(firstname)s, %(lastname)s, %(email)s, %(pswdhash)s, NOW(), NOW(), %(dob)s, %(languages)s ) ;", data):
flash("Successfully registered "+request.form['email']+". Try logging in!", "success")
if 'reg' in session:
del session['reg']
else:
flash("Something went wrong. It is us, not you! Try in a few hours", "error")
else:
flash(request.form['email']+" is already a user. Please login instead.", "error")
else:
session['reg'] = {
'firstname': request.form['firstname'],
'lastname': request.form['lastname'],
'email': request.form['email']
}
return redirect("/")
@app.route("/success")
def success():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
rec_msgs = mysql.query_db("SELECT users.firstname AS firstname, users.lastname AS lastname, sender_id, message_id, content, TIMEDIFF(NOW(), sent_at) AS message_age FROM messages JOIN users ON messages.sender_id = users.id WHERE messages.recipient_id = %(id)s AND messages.recipient_del = 0 ORDER BY sent_at DESC;", {'id': session['id']})
sent_msgs = mysql.query_db("SELECT * FROM messages WHERE sender_id = %(id)s AND sender_del = 0;", {'id': session['id']})
other_users = mysql.query_db("SELECT * FROM users WHERE id != %(id)s;", {'id': session['id']})
return render_template("success.html", rec_msgs=rec_msgs, other_users=other_users, sent_msgs=sent_msgs)
@app.route("/logout")
def logout():
session.clear()
return redirect("/")
@app.route("/viewprofile")
def viewprofile():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
users = mysql.query_db("SELECT id, firstname, lastname, email, created_at, dob, languages FROM users WHERE id = %(id)s", {'id': session['id']})
if len(users) > 0:
return render_template("profile.html", user=users[0], user_age=my_utils.get_age(users[0]['dob'].strftime('%Y-%m-%d')))
else:
flash("Aw snap! Something went wrong. Try again in a few hours", "error")
return redirect("/success")
@app.route("/viewprofile/<user_id>")
def viewprofile_user_id(user_id):
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
users = mysql.query_db("SELECT id, firstname, lastname, email, created_at, dob, languages FROM users WHERE id = %(id)s", {'id': user_id})
if len(users) > 0:
return render_template("profile.html", user=users[0], user_age=my_utils.get_age(users[0]['dob'].strftime('%Y-%m-%d')))
else:
flash("Aw snap! Something went wrong. Try again in a few hours", "error")
return redirect("/success")
@app.route("/editprofile")
def editprofile():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
users = mysql.query_db("SELECT id, firstname, lastname, email, created_at, languages, dob FROM users WHERE id = %(id)s", {'id': session['id']})
userdata = dict()
for key in users[0].keys():
userdata[key] = users[0][key]
userdata['dob'] = userdata['dob'].strftime("%Y-%m-%d")
print(userdata)
checkedlanguages = userdata['languages'].split(', ')
if len(users) > 0:
return render_template("editprofile.html", user = userdata, EDITPROFILE=EDITPROFILE, LANGUAGES=LANGUAGES, checkedlanguages=checkedlanguages)
else:
flash("Aw snap! Something went wrong. Try again in a few hours", "error")
return redirect("/success")
@app.route("/updateprofile", methods=['POST'])
def updateprofile():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
if validate_nonpassword(request.form):
mysql = connectToMySQL(dbname)
data = dict()
for key in request.form.keys():
data[key] = request.form[key]
data['dob'] = datetime.strptime(request.form['dob'], '%Y-%m-%d')
data['languages'] = ', '.join(get_selected_languages(request.form))
data['id'] = session['id']
if mysql.query_db("UPDATE users SET firstname = %(firstname)s, lastname = %(lastname)s, email = %(email)s, dob = %(dob)s, languages = %(languages)s, updated_at = NOW() WHERE id = %(id)s;", data):
flash("Updated your profile", "success")
else:
flash("Something went wrong. It's us, not you. Try again later.", "error")
return redirect("/success")
else:
return redirect("/editprofile")
@app.route("/changepasswd")
def changepasswd():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
return render_template("change_password.html", EDITPASSWORD=EDITPASSWORD)
@app.route("/updatepassword", methods=['POST'])
def updatepassword():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
rows = mysql.query_db("SELECT id, pswdhash FROM users WHERE id = %(id)s", {'id': session['id']})
is_valid = True
if len(rows) == 0:
is_valid = False
flash("Passwords cannot be updated now. Please try again later", "error")
return redirect("/success")
if not bcrypt.check_password_hash(rows[0]['pswdhash'], request.form['currentpassword']):
is_valid = False
flash('Current password does not match', 'currentpassword')
return redirect("/changepasswd")
if not my_utils.validate_password(request.form, categories=['newpassword', 'confirm']):
is_valid = False
if not is_valid:
return redirect("/changepasswd")
else:
pswdhash = bcrypt.generate_password_hash(request.form['newpassword'])
status = mysql.query_db("UPDATE users SET pswdhash = %(pswdhash)s WHERE id = %(id)s", {'id': rows[0]['id'], 'pswdhash': pswdhash})
if status:
flash('Password changed', 'success')
else:
flash("Something went wrong in saving your new password. Password did not change.", "error")
return redirect("/success")
@app.route("/deleteprofile")
def deleteprofile():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
mysql.query_db("DELETE FROM users WHERE id = %(id)s", {'id': session['id']})
return redirect("/logout")
@app.route("/delmsg/<id>")
def delmsg(id):
if 'id' not in session:
flash(not_logged_in, 'error')
return redirect("/")
mysql = connectToMySQL(dbname)
data = {'message_id': id, 'recipient_id': session['id']}
messages = mysql.query_db("SELECT * FROM messages WHERE message_id = %(message_id)s AND recipient_id = %(recipient_id)s;", data)
if bool(messages) == 0:
if 'mischief' not in session:
session['mischief'] = datetime.now()
flash("You can't delete that message", "error")
return redirect("/hacker_alert")
else:
return redirect("/logout")
elif not mysql.query_db("UPDATE messages SET recipient_del = 1 WHERE message_id = %(message_id)s AND recipient_id = %(recipient_id)s;", data):
flash("Something went wrong in deleting the message. Server error.", "error")
return redirect("/success")
@app.route("/hacker_alert")
def hacker_alert():
if 'id' not in session:
flash(not_logged_in, 'error')
return redirect("/")
mysql = connectToMySQL(dbname)
users = mysql.query_db("SELECT * FROM users WHERE id = %(id)s", {'id': session['id']})
return render_template("mischief.html", user=users[0], ip_address=request.remote_addr)
@app.route("/sendmsg", methods=['POST'])
def sendmsg():
if 'id' not in session:
flash(not_logged_in, "error")
return redirect("/")
mysql = connectToMySQL(dbname)
data = dict()
for key in request.form.keys():
data[key] = request.form[key]
data['sender_id'] = session['id']
if not mysql.query_db("INSERT INTO messages (content, recipient_id, sender_id, sent_at, recipient_del, sender_del) VALUES ( %(content)s, %(recipient_id)s, %(sender_id)s, NOW(), 0, 0 );", data):
flash("Aw snap! Something is wrong at our end. Try in a few hours. Message was not sent", "error")
else:
users = mysql.query_db("SELECT firstname FROM users WHERE id = %(recipient_id)s", request.form)
if len(users) < 0:
flash("There was an error when you tried to send a message", "error")
else:
flash("Message sent to "+users[0]['firstname']+' successfully.', 'success')
return redirect("/success")
if __name__ == "__main__":
app.run(debug=True) | StarcoderdataPython |
229695 | #!/usr/bin/python
from urllib.request import urlopen
from xml.etree import ElementTree as ET
url = 'http://freegeoip.net/xml/'
try:
with urlopen(url) as res:
s = res.read()
root = ET.fromstring(s)
ip = root.find('IP').text
country = root.find('CountryName').text
print('%s (%s)' % (ip, country))
except Exception:
print('')
| StarcoderdataPython |
1822194 | # -*- coding: utf-8 -*-
'''
The high-level capsul.api module pre-imports the main objects from several sub-modules:
Classes
-------
* :class:`~capsul.process.process.Process`
* :class:`~capsul.process.process.NipypeProcess`
* :class:`~capsul.process.process.ProcessResult`
* :class:`~capsul.process.process.FileCopyProcess`
* :class:`~capsul.process.process.InteractiveProcess`
* :class:`~capsul.pipeline.pipeline.Pipeline`
* :class:`~capsul.pipeline.pipeline_nodes.Plug`
* :class:`~capsul.pipeline.pipeline_nodes.Node`
* :class:`~capsul.pipeline.pipeline_nodes.ProcessNode`
* :class:`~capsul.pipeline.pipeline_nodes.PipelineNode`
* :class:`~capsul.pipeline.pipeline_nodes.Switch`
* :class:`~capsul.pipeline.pipeline_nodes.OptionalOutputSwitch`
* :class:`~capsul.study_config.study_config.StudyConfig`
Functions
---------
* :func:`~capsul.engine.capsul_engine`
* :func:`~capsul.study_config.process_instance.get_process_instance`
* :func:`~capsul.utils.finder.find_processes`
'''
from __future__ import absolute_import
from capsul.process.process import (Process, NipypeProcess, ProcessResult,
FileCopyProcess, InteractiveProcess)
from capsul.pipeline.pipeline import Pipeline
from capsul.pipeline.pipeline_nodes import Plug
from capsul.pipeline.pipeline_nodes import Node
from capsul.pipeline.pipeline_nodes import ProcessNode
from capsul.pipeline.pipeline_nodes import PipelineNode
from capsul.pipeline.pipeline_nodes import Switch
from capsul.pipeline.pipeline_nodes import OptionalOutputSwitch
from capsul.engine import capsul_engine
from capsul.engine import activate_configuration
from capsul.study_config.process_instance import get_process_instance
from capsul.study_config.study_config import StudyConfig
from capsul.utils.finder import find_processes
| StarcoderdataPython |
1858827 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-03 10:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("organisations", "0049_auto_20180703_1030")]
operations = [
migrations.RemoveField(
model_name="organisationdivision", name="geography_curie"
)
]
| StarcoderdataPython |
6568046 | #!/usr/bin/env python
from os import path
from setuptools import find_packages, setup
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, "README.rst")) as fd:
long_description = fd.read()
requirements = [
"click>=6.0",
"pytest>=5.0",
]
setup(
name="pytest_click",
version="1.0.2",
url="https://github.com/Stranger6667/pytest-click",
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
maintainer="<NAME>",
maintainer_email="<EMAIL>",
description="Py.test plugin for Click",
long_description=long_description,
long_description_content_type="text/x-rst",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Testing",
],
include_package_data=True,
packages=find_packages(where="src"),
package_dir={"": "src"},
install_requires=requirements,
entry_points={
"pytest11": [
"pytest_click = pytest_click",
]
},
)
| StarcoderdataPython |
9760616 | <filename>Example_Scripts/Ex3_User_Specific_Passes.py
# -*- coding: ISO-8859-1 -*-
##########################################
# Using the PassTools API
# Example3: Creating user-specific passes from a user DB
#
# In this example, we'll use a PassTools template to generate a set of passes, each of which is
# uniquely-created for one user from our customer DB. Of course, our example will be simple,
# but you have the freedom to create much more elaborate templates, and correspondingly-elaborate passes.
# For our example, we have prepared a basic 'generic' template (such as you might use for a club membership card.)
# In our template, we have added two secondary fields. We have given them custom keynames: 'fname' and 'lname'
# which are to contain the customer's first and last name, respectively.
# Our script, below, will generate the passes from our 'user DB', and download the passes so we can distribute them.
#
# Copyright 2013, Urban Airship, Inc.
##########################################
from passtools import PassTools
from passtools.pt_pass import Pass
from passtools.pt_template import Template
# API User:
# STEP 1: Retrieve your API key from your Account view on the PassTools website
my_api_key = "your-key-goes-in-here"
# STEP 2:
# You'll always configure the api, providing your api key.
# This is required!
PassTools.configure(api_key = my_api_key)
# Our model DB...
user_db = [{"first_name": "James", "last_name":"Bond"},
{"first_name": "Jimi", "last_name":"Hendrix"},
{"first_name": "Johnny", "last_name":"Appleseed"}]
# You'll have selected the template you want to use...you can find the template ID in the Template Builder UI
selected_template_id = 604
# Retrieve your template, so you can modify the data and create passes from it
get_response = Template.get(selected_template_id)
the_fields_model = get_response["fieldsModel"]
# Now for each user in your DB, grab the user data, modify the template.fields_model, create a pass and download it:
for user_record in user_db:
the_fields_model["fname"]["value"] = user_record["first_name"]
the_fields_model["lname"]["value"] = user_record["last_name"]
create_response = Pass.create(selected_template_id, the_fields_model)
new_pass_id = create_response["id"]
print "NEW PASS CREATED. ID: %s, First: %s, Last: %s" % (new_pass_id, user_record["first_name"], user_record["last_name"])
Pass.download(new_pass_id, "/tmp/%s_%s.pkpass" % (user_record["first_name"], user_record["last_name"]))
# Now distribute the passes to your users!
| StarcoderdataPython |
8190299 | <filename>legacy/utils/dashboard/pushtoDB.py
##############################################################################
# Copyright (c) 2017 ZTE Corporation and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import requests
import json
import datetime
import os
import sys
from qtip.utils import logger_utils
logger = logger_utils.QtipLogger('push_db').get
TEST_DB = 'http://testresults.opnfv.org/test/api/v1'
suite_list = [('compute_result.json', 'compute_test_suite'),
('network_result.json', 'network_test_suite'),
('storage_result.json', 'storage_test_suite')]
payload_list = {}
def push_results_to_db(db_url, case_name, payload, installer, pod_name):
url = db_url + "/results"
creation_date = str(datetime.datetime.utcnow().isoformat())
params = {"project_name": "qtip", "case_name": case_name,
"pod_name": pod_name, "installer": installer, "start_date": creation_date,
"version": "test", "details": payload}
headers = {'Content-Type': 'application/json'}
logger.info('pod_name:{0},installer:{1},creation_data:{2}'.format(pod_name,
installer,
creation_date))
# temporary code, will be deleted after Bigergia dashboard is ready
try:
qtip_testapi_url = "http://testapi.qtip.openzero.net/results"
qtip_testapi_r = requests.post(qtip_testapi_url, data=json.dumps(params), headers=headers)
logger.info('Pushing Results to qtip_testapi: %s'.format(qtip_testapi_r))
except:
logger.info("Pushing Results to qtip_testapi Error:{0}".format(sys.exc_info()[0]))
try:
r = requests.post(url, data=json.dumps(params), headers=headers)
logger.info(r)
return True
except:
logger.info("Error:{0}".format(sys.exc_info()[0]))
return False
def populate_payload(suite_list):
global payload_list
for k, v in suite_list:
if os.path.isfile('results/' + str(k)):
payload_list[k] = v
def main():
global payload_list
populate_payload(suite_list)
if payload_list:
logger.info(payload_list)
for suite, case in payload_list.items():
with open('results/' + suite, 'r') as result_file:
j = json.load(result_file)
push_results_to_db(TEST_DB, case, j,
os.environ['INSTALLER_TYPE'],
os.environ['NODE_NAME'])
elif not payload_list:
logger.info('Results not found')
if __name__ == "__main__":
main()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.